lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | 84ce72c2d81adff4d58e942f3a825979846a04ea | 0 | asciidocfx/AsciidocFX,asciidocfx/AsciidocFX,asciidocfx/AsciidocFX,asciidocfx/AsciidocFX | package com.kodedu.component;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.scene.control.Alert;
import javafx.scene.control.ButtonType;
import javafx.scene.control.DialogPane;
import javafx.scene.control.ListView;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.Priority;
import javafx.stage.Screen;
import java.nio.file.Path;
import java.util.List;
import java.util.Optional;
import java.util.stream.IntStream;
import static javafx.scene.control.Alert.AlertType;
/**
* Created by usta on 06.03.2015.
*/
public final class AlertHelper {
public static final ButtonType LOAD_FILE_SYSTEM_CHANGES = new ButtonType("Load File System Changes");
public static final ButtonType KEEP_MEMORY_CHANGES = new ButtonType("Keep Memory Changes");
public static final ButtonType OPEN_IN_APP = new ButtonType("Open anyway");
public static final ButtonType OPEN_EXTERNAL = new ButtonType("Open external");
static Alert buildDeleteAlertDialog(List<Path> pathsLabel) {
Alert deleteAlert = new Alert(Alert.AlertType.WARNING, null, ButtonType.YES, ButtonType.CANCEL);
deleteAlert.setHeaderText("Do you want to delete selected path(s)?");
DialogPane dialogPane = deleteAlert.getDialogPane();
ObservableList<Path> paths = Optional.ofNullable(pathsLabel)
.map(FXCollections::observableList)
.orElse(FXCollections.emptyObservableList());
if (paths.isEmpty()) {
dialogPane.setContentText("There are no files selected.");
deleteAlert.getButtonTypes().clear();
deleteAlert.getButtonTypes().add(ButtonType.CANCEL);
return deleteAlert;
}
ListView<Path> listView = new ListView<>(paths);
listView.setId("listOfPaths");
GridPane gridPane = new GridPane();
gridPane.addRow(0, listView);
GridPane.setHgrow(listView, Priority.ALWAYS);
double minWidth = 200.0;
double maxWidth = Screen.getScreens().stream()
.mapToDouble(s -> s.getBounds().getWidth()/3)
.min().orElse(minWidth);
double prefWidth = paths.stream()
.map(String::valueOf)
.mapToDouble(s->s.length() * 6)
.max()
.orElse(maxWidth);
double minHeight = IntStream.of(paths.size())
.map(e -> e * 40)
.filter(e -> e <= 300 && e >= 40)
.findFirst()
.orElse(200);
gridPane.setMinWidth(minWidth);
gridPane.setPrefWidth(prefWidth);
gridPane.setPrefHeight(minHeight);
dialogPane.setContent(gridPane);
return deleteAlert;
}
public static Optional<ButtonType> deleteAlert(List<Path> pathsLabel) {
return buildDeleteAlertDialog(pathsLabel).showAndWait();
}
public static Optional<ButtonType> showAlert(String alertMessage) {
AlertDialog deleteAlert = new AlertDialog(AlertType.WARNING, null, ButtonType.YES, ButtonType.CANCEL);
deleteAlert.setHeaderText(alertMessage);
return deleteAlert.showAndWait();
}
public static void okayAlert(String alertMessage) {
AlertDialog deleteAlert = new AlertDialog(AlertType.WARNING, null, ButtonType.OK);
deleteAlert.setHeaderText(alertMessage);
deleteAlert.show();
}
public static Optional<ButtonType> nullDirectoryAlert() {
AlertDialog deleteAlert = new AlertDialog(AlertType.WARNING, null, ButtonType.OK);
deleteAlert.setHeaderText("Please select directorie(s)");
return deleteAlert.showAndWait();
}
public static Optional<ButtonType> notImplementedDialog() {
AlertDialog alert = new AlertDialog(AlertType.WARNING, null, ButtonType.OK);
alert.setHeaderText("This feature is not available for Markdown.");
return alert.showAndWait();
}
public static Optional<ButtonType> saveAlert() {
AlertDialog saveAlert = new AlertDialog();
saveAlert.setHeaderText("This document is not saved. Do you want to close it?");
return saveAlert.showAndWait();
}
public static Optional<ButtonType> conflictAlert(Path path) {
Alert alert = new Alert(Alert.AlertType.WARNING);
alert.setTitle("File Cache Conflict");
alert.setHeaderText(String.format("Changes have been made to '%s' in memory and on disk", path));
alert.getButtonTypes().clear();
alert.getButtonTypes().addAll(LOAD_FILE_SYSTEM_CHANGES, KEEP_MEMORY_CHANGES, ButtonType.CANCEL);
return alert.showAndWait();
}
public static Optional<ButtonType> sizeHangAlert(Path path, int hangFileSizeLimit) {
Alert alert = new Alert(Alert.AlertType.WARNING);
alert.setTitle(String.format("File size > %dMB", hangFileSizeLimit));
alert.setHeaderText(String.format("It may cause application being unresponsive", path));
alert.getButtonTypes().clear();
alert.getButtonTypes().addAll(OPEN_IN_APP, OPEN_EXTERNAL, ButtonType.CANCEL);
return alert.showAndWait();
}
public static Optional<ButtonType> nosizeAlert(Path path, int hangFileSizeLimit) {
Alert alert = new Alert(Alert.AlertType.WARNING);
alert.setTitle("No file size");
alert.setHeaderText(String.format("It may cause application being unresponsive if it's real size > %dMB", path, hangFileSizeLimit));
alert.getButtonTypes().clear();
alert.getButtonTypes().addAll(OPEN_IN_APP, OPEN_EXTERNAL, ButtonType.CANCEL);
return alert.showAndWait();
}
public static void showDuplicateWarning(List<String> duplicatePaths, Path lib) {
Alert alert = new Alert(Alert.AlertType.WARNING);
DialogPane dialogPane = alert.getDialogPane();
ListView listView = new ListView();
listView.getStyleClass().clear();
ObservableList items = listView.getItems();
items.addAll(duplicatePaths);
listView.setEditable(false);
dialogPane.setContent(listView);
alert.setTitle("Duplicate JARs found");
alert.setHeaderText(String.format("Duplicate JARs found, it may cause unexpected behaviours.\n\n" +
"Please remove the older versions from these pair(s) manually. \n" +
"JAR files are located at %s directory.", lib));
alert.getButtonTypes().clear();
alert.getButtonTypes().addAll(ButtonType.OK);
alert.showAndWait();
}
}
| src/main/java/com/kodedu/component/AlertHelper.java | package com.kodedu.component;
import javafx.collections.ObservableList;
import javafx.scene.control.Alert;
import javafx.scene.control.ButtonType;
import javafx.scene.control.DialogPane;
import javafx.scene.control.ListView;
import java.nio.file.Path;
import java.util.List;
import java.util.Optional;
import static javafx.scene.control.Alert.AlertType;
/**
* Created by usta on 06.03.2015.
*/
public final class AlertHelper {
public static final ButtonType LOAD_FILE_SYSTEM_CHANGES = new ButtonType("Load File System Changes");
public static final ButtonType KEEP_MEMORY_CHANGES = new ButtonType("Keep Memory Changes");
public static final ButtonType OPEN_IN_APP = new ButtonType("Open anyway");
public static final ButtonType OPEN_EXTERNAL = new ButtonType("Open external");
public static Optional<ButtonType> deleteAlert(List<Path> pathsLabel) {
Alert deleteAlert = new Alert(Alert.AlertType.WARNING, null, ButtonType.YES, ButtonType.CANCEL);
deleteAlert.setHeaderText("Do you want to delete selected path(s)?");
DialogPane dialogPane = deleteAlert.getDialogPane();
ListView listView = new ListView();
listView.setMinHeight(40);
listView.getStyleClass().clear();
ObservableList items = listView.getItems();
items.addAll(pathsLabel);
listView.setEditable(false);
dialogPane.setContent(listView);
listView.setPrefHeight(Optional.ofNullable(pathsLabel)
.map(List::size)
.map(e -> e * 40)
.filter(e -> e <= 300 && e >= 40)
.orElse(300));
listView.refresh();
return deleteAlert.showAndWait();
}
public static Optional<ButtonType> showAlert(String alertMessage) {
AlertDialog deleteAlert = new AlertDialog(AlertType.WARNING, null, ButtonType.YES, ButtonType.CANCEL);
deleteAlert.setHeaderText(alertMessage);
return deleteAlert.showAndWait();
}
public static void okayAlert(String alertMessage) {
AlertDialog deleteAlert = new AlertDialog(AlertType.WARNING, null, ButtonType.OK);
deleteAlert.setHeaderText(alertMessage);
deleteAlert.show();
}
public static Optional<ButtonType> nullDirectoryAlert() {
AlertDialog deleteAlert = new AlertDialog(AlertType.WARNING, null, ButtonType.OK);
deleteAlert.setHeaderText("Please select directorie(s)");
return deleteAlert.showAndWait();
}
public static Optional<ButtonType> notImplementedDialog() {
AlertDialog alert = new AlertDialog(AlertType.WARNING, null, ButtonType.OK);
alert.setHeaderText("This feature is not available for Markdown.");
return alert.showAndWait();
}
public static Optional<ButtonType> saveAlert() {
AlertDialog saveAlert = new AlertDialog();
saveAlert.setHeaderText("This document is not saved. Do you want to close it?");
return saveAlert.showAndWait();
}
public static Optional<ButtonType> conflictAlert(Path path) {
Alert alert = new Alert(Alert.AlertType.WARNING);
alert.setTitle("File Cache Conflict");
alert.setHeaderText(String.format("Changes have been made to '%s' in memory and on disk", path));
alert.getButtonTypes().clear();
alert.getButtonTypes().addAll(LOAD_FILE_SYSTEM_CHANGES, KEEP_MEMORY_CHANGES, ButtonType.CANCEL);
return alert.showAndWait();
}
public static Optional<ButtonType> sizeHangAlert(Path path, int hangFileSizeLimit) {
Alert alert = new Alert(Alert.AlertType.WARNING);
alert.setTitle(String.format("File size > %dMB", hangFileSizeLimit));
alert.setHeaderText(String.format("It may cause application being unresponsive", path));
alert.getButtonTypes().clear();
alert.getButtonTypes().addAll(OPEN_IN_APP, OPEN_EXTERNAL, ButtonType.CANCEL);
return alert.showAndWait();
}
public static Optional<ButtonType> nosizeAlert(Path path, int hangFileSizeLimit) {
Alert alert = new Alert(Alert.AlertType.WARNING);
alert.setTitle("No file size");
alert.setHeaderText(String.format("It may cause application being unresponsive if it's real size > %dMB", path, hangFileSizeLimit));
alert.getButtonTypes().clear();
alert.getButtonTypes().addAll(OPEN_IN_APP, OPEN_EXTERNAL, ButtonType.CANCEL);
return alert.showAndWait();
}
public static void showDuplicateWarning(List<String> duplicatePaths, Path lib) {
Alert alert = new Alert(Alert.AlertType.WARNING);
DialogPane dialogPane = alert.getDialogPane();
ListView listView = new ListView();
listView.getStyleClass().clear();
ObservableList items = listView.getItems();
items.addAll(duplicatePaths);
listView.setEditable(false);
dialogPane.setContent(listView);
alert.setTitle("Duplicate JARs found");
alert.setHeaderText(String.format("Duplicate JARs found, it may cause unexpected behaviours.\n\n" +
"Please remove the older versions from these pair(s) manually. \n" +
"JAR files are located at %s directory.", lib));
alert.getButtonTypes().clear();
alert.getButtonTypes().addAll(ButtonType.OK);
alert.showAndWait();
}
}
| Scaling of ListView works now properly on Windows 8.1. | src/main/java/com/kodedu/component/AlertHelper.java | Scaling of ListView works now properly on Windows 8.1. |
|
Java | apache-2.0 | e4f0dae8407b022bec145c085f4143b2574b7f07 | 0 | abrayall/rameses | package javax.lang;
import static javax.lang.Strings.*;
public class System {
public static void print(Object... objects) {
println(strings(objects));
}
public static void println(Object... objects) {
println(strings(objects));
}
public static void print(String... strings) {
print(join(strings));
}
public static void println(String... strings) {
println(join(strings));
}
public static void print(String string) {
java.lang.System.out.print(string);
}
public static void println(String string) {
java.lang.System.out.println(string);
}
public static long now() {
return java.lang.System.currentTimeMillis();
}
public static String operatingSystem() {
return java.lang.System.getProperty("os.name").toLowerCase();
}
public static boolean isWindows() {
return operatingSystem().indexOf("win") >= 0;
}
public static boolean isMac() {
return operatingSystem().indexOf("mac") >= 0;
}
public static boolean isUnix() {
return operatingSystem().indexOf("nux") >= 0;
}
public static boolean isSolaris() {
return operatingSystem().contains("solaris") || operatingSystem().contains("sunos");
}
public static boolean isAndroid() {
return java.lang.System.getProperty("os.name").toLowerCase().contains("android");
}
}
| src/main/java/javax/lang/System.java | package javax.lang;
import static javax.lang.Strings.*;
public class System {
public static void print(Object... objects) {
println(strings(objects));
}
public static void println(Object... objects) {
println(strings(objects));
}
public static void print(String... strings) {
print(join(strings));
}
public static void println(String... strings) {
println(join(strings));
}
public static void print(String string) {
java.lang.System.out.print(string);
}
public static void println(String string) {
java.lang.System.out.println(string);
}
public static long now() {
return java.lang.System.currentTimeMillis();
}
}
| updating javax.lang.System with methods to help determine the current OS | src/main/java/javax/lang/System.java | updating javax.lang.System with methods to help determine the current OS |
|
Java | apache-2.0 | 177c65f1a9163a4bf21860cbc6a50adffb1035cc | 0 | leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere | /*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.core.api;
import io.shardingsphere.core.hint.HintManagerHolder;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
/**
* The manager that use hint to inject sharding key directly through {@code ThreadLocal}.
*
* @author gaohongtao
* @author zhangliang
* @author panjun
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class HintManager implements AutoCloseable {
/**
* Get a new instance for {@code HintManager}.
*
* @return {@code HintManager} instance
*/
public static HintManager getInstance() {
HintManager result = new HintManager();
HintManagerHolder.setHintManager(result);
return result;
}
/**
* Add sharding value for database sharding only.
*
* <p>The sharding operator is {@code =}</p>
* When you only need to sharding database, use this method to add database sharding value.
*
* @param values sharding value
*/
public void setDatabaseShardingValue(final Comparable<?>... values) {
HintManagerHolder.setDatabaseShardingValue(values);
}
/**
* Set CRUD operation force route to master database only.
*/
public void setMasterRouteOnly() {
HintManagerHolder.setMasterRouteOnly(true);
}
/**
* Add sharding value for database.
*
* <p>The sharding operator is {@code =}</p>
*
* @param logicTable logic table name
* @param value sharding value
*/
public void addDatabaseShardingValue(final String logicTable, final Comparable<?> value) {
HintManagerHolder.addDatabaseShardingValue(logicTable, value);
}
/**
* Add sharding value for table.
*
* <p>The sharding operator is {@code =}</p>
*
* @param logicTable logic table name
* @param value sharding value
*/
public void addTableShardingValue(final String logicTable, final Comparable<?> value) {
HintManagerHolder.addTableShardingValue(logicTable, value);
}
@Override
public void close() {
HintManagerHolder.clear();
}
}
| sharding-core/src/main/java/io/shardingsphere/core/api/HintManager.java | /*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.core.api;
import io.shardingsphere.core.hint.HintManagerHolder;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
/**
* The manager that use hint to inject sharding key directly through {@code ThreadLocal}.
*
* @author gaohongtao
* @author zhangliang
* @author panjun
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class HintManager implements AutoCloseable {
/**
* Get a new instance for {@code HintManager}.
*
* @return {@code HintManager} instance
*/
public static HintManager getInstance() {
HintManager result = new HintManager();
HintManagerHolder.setHintManager(result);
return result;
}
/**
* Add sharding value for database sharding only.
*
* <p>The sharding operator is {@code =}</p>
* When you only need to sharding database, use this method to add database sharding value.
*
* @param value sharding value
*/
public void setDatabaseShardingValue(final Comparable<?> value) {
HintManagerHolder.setDatabaseShardingValue(value);
}
/**
* Add sharding value for database.
*
* <p>The sharding operator is {@code =}</p>
*
* @param logicTable logic table name
* @param value sharding value
*/
public void addDatabaseShardingValue(final String logicTable, final Comparable<?> value) {
HintManagerHolder.addDatabaseShardingValue(logicTable, value);
}
/**
* Add sharding value for table.
*
* <p>The sharding operator is {@code =}</p>
*
* @param logicTable logic table name
* @param value sharding value
*/
public void addTableShardingValue(final String logicTable, final Comparable<?> value) {
HintManagerHolder.addTableShardingValue(logicTable, value);
}
/**
* Set CRUD operation force route to master database only.
*/
public void setMasterRouteOnly() {
HintManagerHolder.setMasterRouteOnly(true);
}
@Override
public void close() {
HintManagerHolder.clear();
}
}
| setDatabaseShardingValue()
| sharding-core/src/main/java/io/shardingsphere/core/api/HintManager.java | setDatabaseShardingValue() |
|
Java | apache-2.0 | da68800d0171975244c427bacbc1c445c88781c6 | 0 | melix/jmh-gradle-plugin,vyazelenko/jmh-gradle-plugin,vyazelenko/jmh-gradle-plugin,melix/jmh-gradle-plugin | /*
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.champeau.gradle;
import org.gradle.api.Project;
import org.gradle.api.file.DuplicatesStrategy;
import java.io.File;
import java.util.*;
import static org.codehaus.groovy.runtime.DefaultGroovyMethods.join;
import static org.codehaus.groovy.runtime.DefaultGroovyMethods.unique;
public class JMHPluginExtension {
private final Project project;
private String jmhVersion = "1.12";
private boolean includeTests = true;
private String include = "";
private String exclude;
private List<String> benchmarkMode;
private Integer iterations;
private Integer batchSize;
private Integer fork;
private Boolean failOnError;
private Boolean forceGC;
private String jvm;
private String jvmArgs;
private String jvmArgsAppend;
private String jvmArgsPrepend;
private File humanOutputFile;
private File resultsFile;
private Integer operationsPerInvocation;
private Map benchmarkParameters;
private List<String> profilers;
private String timeOnIteration;
private String resultExtension;
private String resultFormat;
private Boolean synchronizeIterations;
private Integer threads;
private List<Integer> threadGroups;
private String timeUnit;
private String verbosity;
private String timeout;
private String warmup;
private Integer warmupBatchSize;
private Integer warmupForks;
private Integer warmupIterations;
private String warmupMode;
private List<String> warmupBenchmarks;
private boolean zip64 = false;
private DuplicatesStrategy duplicateClassesStrategy = DuplicatesStrategy.FAIL;
public JMHPluginExtension(final Project project) {
this.project = project;
}
public List<String> buildArgs() {
resolveArgs();
List<String> args = new ArrayList<String>();
args.add(include);
addOption(args, exclude, "e");
addOption(args, iterations, "i");
addOption(args, benchmarkModeCSV(), "bm");
addOption(args, batchSize, "bs");
addOption(args, fork, "f");
addOption(args, failOnError, "foe");
addOption(args, forceGC, "gc");
addOption(args, jvm, "jvm");
addOption(args, jvmArgs, "jvmArgs");
addOption(args, jvmArgsAppend, "jvmArgsAppend");
addOption(args, jvmArgsPrepend, "jvmArgsPrepend");
addOption(args, humanOutputFile, "o");
addOption(args, operationsPerInvocation, "opi");
addOption(args, benchmarkParameters, "p");
addOption(args, profilers, "prof", " -prof ");
addOption(args, resultsFile, "rff");
addOption(args, timeOnIteration, "r");
addOption(args, resultFormat, "rf");
addOption(args, synchronizeIterations, "si");
addOption(args, threads, "t");
addOption(args, timeout, "to");
addOption(args, threadGroups, "tg");
addOption(args, timeUnit, "tu");
addOption(args, verbosity, "v");
addOption(args, warmup, "w");
addOption(args, warmupBatchSize, "wbs");
addOption(args, warmupForks, "wf");
addOption(args, warmupIterations, "wi");
addOption(args, warmupMode, "wm");
addOption(args, warmupBenchmarks, "wmb");
return args;
}
private String benchmarkModeCSV() {
if (benchmarkMode == null || benchmarkMode.isEmpty()) {
return null;
}
return join((Iterable<String>) new LinkedHashSet<String>(benchmarkMode), ",");
}
private void resolveArgs() {
resolveResultExtension();
resolveResultFormat();
resolveResultsFile();
}
private void resolveResultsFile() {
resultsFile = resultsFile != null ? resultsFile : project.file(String.valueOf(project.getBuildDir()) + "/reports/jmh/results." + resultExtension);
}
private void resolveResultExtension() {
resultExtension = resultFormat != null ? parseResultFormat() : "txt";
}
private void resolveResultFormat() {
resultFormat = resultFormat != null ? resultFormat : "text";
}
private String parseResultFormat() {
return ResultFormatType.translate(resultFormat);
}
private void addOption(List<String> options, String str, String option) {
if (str != null) {
options.add("-" + option);
options.add(str);
}
}
private void addOption(List<String> options, List values, String option) {
addOption(options, values, option, ",");
}
private void addOption(List<String> options, List values, String option, String separator) {
if (values != null) {
options.add("-" + option);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < values.size(); i++) {
final Object value = values.get(i);
sb.append(value);
if (i < values.size() - 1) {
sb.append(separator);
}
}
options.add(sb.toString());
}
}
private void addOption(List<String> options, Boolean b, String option) {
if (b != null) {
options.add("-" + option);
options.add(b ? "1" : "0");
}
}
private void addOption(List<String> options, Integer i, String option) {
if (i != null) {
options.add("-" + option);
options.add(String.valueOf(i));
}
}
private void addOption(List<String> options, File f, String option) {
if (f != null) {
options.add("-" + option);
options.add(project.relativePath(f));
}
}
@SuppressWarnings("unchecked")
private void addOption(List<String> options, Map params, String option) {
if (params != null && !params.isEmpty()) {
options.add("-" + option);
StringBuilder sb = new StringBuilder();
List<Map.Entry> values = new ArrayList<Map.Entry>(params.entrySet());
for (int i = 0; i < values.size(); i++) {
final Map.Entry entry = values.get(i);
sb.append(entry.getKey()).append("=").append(entry.getValue());
if (i < values.size() - 1) {
sb.append(" -" + option + " ");
}
}
options.add(sb.toString());
}
}
public String getInclude() {
return include;
}
public void setInclude(String include) {
this.include = include;
}
public String getExclude() {
return exclude;
}
public void setExclude(String exclude) {
this.exclude = exclude;
}
public List<String> getBenchmarkMode() {
return benchmarkMode;
}
public void setBenchmarkMode(List<String> benchmarkMode) {
this.benchmarkMode = benchmarkMode;
}
public Integer getIterations() {
return iterations;
}
public void setIterations(Integer iterations) {
this.iterations = iterations;
}
public Integer getBatchSize() {
return batchSize;
}
public void setBatchSize(Integer batchSize) {
this.batchSize = batchSize;
}
public Integer getFork() {
return fork;
}
public void setFork(Integer fork) {
this.fork = fork;
}
public Boolean getFailOnError() {
return failOnError;
}
public void setFailOnError(Boolean failOnError) {
this.failOnError = failOnError;
}
public Boolean getForceGC() {
return forceGC;
}
public void setForceGC(Boolean forceGC) {
this.forceGC = forceGC;
}
public String getJvm() {
return jvm;
}
public void setJvm(String jvm) {
this.jvm = jvm;
}
public String getJvmArgs() {
return jvmArgs;
}
public void setJvmArgs(String jvmArgs) {
this.jvmArgs = jvmArgs;
}
public String getJvmArgsAppend() {
return jvmArgsAppend;
}
public void setJvmArgsAppend(String jvmArgsAppend) {
this.jvmArgsAppend = jvmArgsAppend;
}
public String getJvmArgsPrepend() {
return jvmArgsPrepend;
}
public void setJvmArgsPrepend(String jvmArgsPrepend) {
this.jvmArgsPrepend = jvmArgsPrepend;
}
public File getHumanOutputFile() {
return humanOutputFile;
}
public void setHumanOutputFile(File humanOutputFile) {
this.humanOutputFile = humanOutputFile;
}
public File getResultsFile() {
return resultsFile;
}
public void setResultsFile(File resultsFile) {
this.resultsFile = resultsFile;
}
public Integer getOperationsPerInvocation() {
return operationsPerInvocation;
}
public void setOperationsPerInvocation(Integer operationsPerInvocation) {
this.operationsPerInvocation = operationsPerInvocation;
}
public Map getBenchmarkParameters() {
return benchmarkParameters;
}
public void setBenchmarkParameters(Map benchmarkParameters) {
this.benchmarkParameters = benchmarkParameters;
}
public List<String> getProfilers() {
return profilers;
}
public void setProfilers(List<String> profilers) {
this.profilers = profilers;
}
public String getTimeOnIteration() {
return timeOnIteration;
}
public void setTimeOnIteration(String timeOnIteration) {
this.timeOnIteration = timeOnIteration;
}
public String getResultFormat() {
return resultFormat;
}
public void setResultFormat(String resultFormat) {
this.resultFormat = resultFormat;
}
public Boolean getSynchronizeIterations() {
return synchronizeIterations;
}
public void setSynchronizeIterations(Boolean synchronizeIterations) {
this.synchronizeIterations = synchronizeIterations;
}
public Integer getThreads() {
return threads;
}
public void setThreads(Integer threads) {
this.threads = threads;
}
public List<Integer> getThreadGroups() {
return threadGroups;
}
public void setThreadGroups(List<Integer> threadGroups) {
this.threadGroups = threadGroups;
}
public String getTimeUnit() {
return timeUnit;
}
public void setTimeUnit(String timeUnit) {
this.timeUnit = timeUnit;
}
public String getVerbosity() {
return verbosity;
}
public void setVerbosity(String verbosity) {
this.verbosity = verbosity;
}
public String getWarmup() {
return warmup;
}
public void setWarmup(String warmup) {
this.warmup = warmup;
}
public Integer getWarmupBatchSize() {
return warmupBatchSize;
}
public void setWarmupBatchSize(Integer warmupBatchSize) {
this.warmupBatchSize = warmupBatchSize;
}
public Integer getWarmupForks() {
return warmupForks;
}
public void setWarmupForks(Integer warmupForks) {
this.warmupForks = warmupForks;
}
public Integer getWarmupIterations() {
return warmupIterations;
}
public void setWarmupIterations(Integer warmupIterations) {
this.warmupIterations = warmupIterations;
}
public String getWarmupMode() {
return warmupMode;
}
public void setWarmupMode(String warmupMode) {
this.warmupMode = warmupMode;
}
public List<String> getWarmupBenchmarks() {
return warmupBenchmarks;
}
public void setWarmupBenchmarks(List<String> warmupBenchmarks) {
this.warmupBenchmarks = warmupBenchmarks;
}
public boolean isZip64() {
return zip64;
}
public void setZip64(final boolean zip64) {
this.zip64 = zip64;
}
public String getJmhVersion() {
return jmhVersion;
}
public void setJmhVersion(String jmhVersion) {
this.jmhVersion = jmhVersion;
}
public boolean isIncludeTests() {
return includeTests;
}
public void setIncludeTests(boolean includeTests) {
this.includeTests = includeTests;
}
public String getTimeout() {
return timeout;
}
public void setTimeout(String timeout) {
this.timeout = timeout;
}
public DuplicatesStrategy getDuplicateClassesStrategy() {
return duplicateClassesStrategy;
}
public void setDuplicateClassesStrategy(DuplicatesStrategy duplicateClassesStrategy) {
this.duplicateClassesStrategy = duplicateClassesStrategy;
}
private enum ResultFormatType {
TEXT("txt"),
CSV("csv"),
SCSV("scsv"),
JSON("json"),
LATEX("tex");
private String extension;
ResultFormatType(String extension) {
this.extension = extension;
}
public static String translate(String resultFormat) {
return ResultFormatType.valueOf(resultFormat.toUpperCase()).extension;
}
}
}
| src/main/groovy/me/champeau/gradle/JMHPluginExtension.java | /*
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.champeau.gradle;
import org.gradle.api.Project;
import org.gradle.api.file.DuplicatesStrategy;
import java.io.File;
import java.util.*;
import static org.codehaus.groovy.runtime.DefaultGroovyMethods.join;
import static org.codehaus.groovy.runtime.DefaultGroovyMethods.unique;
public class JMHPluginExtension {
private final Project project;
private String jmhVersion = "1.12";
private boolean includeTests = true;
private String include = "";
private String exclude;
private List<String> benchmarkMode;
private Integer iterations;
private Integer batchSize;
private Integer fork;
private Boolean failOnError;
private Boolean forceGC;
private String jvm;
private String jvmArgs;
private String jvmArgsAppend;
private String jvmArgsPrepend;
private File humanOutputFile;
private File resultsFile;
private Integer operationsPerInvocation;
private Map benchmarkParameters;
private List<String> profilers;
private String timeOnIteration;
private String resultExtension;
private String resultFormat;
private Boolean synchronizeIterations;
private Integer threads;
private List<Integer> threadGroups;
private String timeUnit;
private String verbosity;
private String timeout;
private String warmup;
private Integer warmupBatchSize;
private Integer warmupForks;
private Integer warmupIterations;
private String warmupMode;
private List<String> warmupBenchmarks;
private boolean zip64 = false;
private DuplicatesStrategy duplicateClassesStrategy = DuplicatesStrategy.FAIL;
public JMHPluginExtension(final Project project) {
this.project = project;
}
public List<String> buildArgs() {
resolveArgs();
List<String> args = new ArrayList<String>();
args.add(include);
addOption(args, exclude, "e");
addOption(args, iterations, "i");
addOption(args, benchmarkModeCSV(), "bm");
addOption(args, batchSize, "bs");
addOption(args, fork, "f");
addOption(args, failOnError, "foe");
addOption(args, forceGC, "gc");
addOption(args, jvm, "jvm");
addOption(args, jvmArgs, "jvmArgs");
addOption(args, jvmArgsAppend, "jvmArgsAppend");
addOption(args, jvmArgsPrepend, "jvmArgsPrepend");
addOption(args, humanOutputFile, "o");
addOption(args, operationsPerInvocation, "opi");
addOption(args, benchmarkParameters, "p");
addOption(args, profilers, "prof", "-prof");
addOption(args, resultsFile, "rff");
addOption(args, timeOnIteration, "r");
addOption(args, resultFormat, "rf");
addOption(args, synchronizeIterations, "si");
addOption(args, threads, "t");
addOption(args, timeout, "to");
addOption(args, threadGroups, "tg");
addOption(args, timeUnit, "tu");
addOption(args, verbosity, "v");
addOption(args, warmup, "w");
addOption(args, warmupBatchSize, "wbs");
addOption(args, warmupForks, "wf");
addOption(args, warmupIterations, "wi");
addOption(args, warmupMode, "wm");
addOption(args, warmupBenchmarks, "wmb");
return args;
}
private String benchmarkModeCSV() {
if (benchmarkMode == null || benchmarkMode.isEmpty()) {
return null;
}
return join((Iterable<String>) new LinkedHashSet<String>(benchmarkMode), ",");
}
private void resolveArgs() {
resolveResultExtension();
resolveResultFormat();
resolveResultsFile();
}
private void resolveResultsFile() {
resultsFile = resultsFile != null ? resultsFile : project.file(String.valueOf(project.getBuildDir()) + "/reports/jmh/results." + resultExtension);
}
private void resolveResultExtension() {
resultExtension = resultFormat != null ? parseResultFormat() : "txt";
}
private void resolveResultFormat() {
resultFormat = resultFormat != null ? resultFormat : "text";
}
private String parseResultFormat() {
return ResultFormatType.translate(resultFormat);
}
private void addOption(List<String> options, String str, String option) {
if (str != null) {
options.add("-" + option);
options.add(str);
}
}
private void addOption(List<String> options, List values, String option) {
addOption(options, values, option, ",");
}
private void addOption(List<String> options, List values, String option, String separator) {
if (values != null) {
options.add("-" + option);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < values.size(); i++) {
final Object value = values.get(i);
sb.append(value);
if (i < values.size() - 1) {
sb.append(separator);
}
}
options.add(sb.toString());
}
}
private void addOption(List<String> options, Boolean b, String option) {
if (b != null) {
options.add("-" + option);
options.add(b ? "1" : "0");
}
}
private void addOption(List<String> options, Integer i, String option) {
if (i != null) {
options.add("-" + option);
options.add(String.valueOf(i));
}
}
private void addOption(List<String> options, File f, String option) {
if (f != null) {
options.add("-" + option);
options.add(project.relativePath(f));
}
}
@SuppressWarnings("unchecked")
private void addOption(List<String> options, Map params, String option) {
if (params != null && !params.isEmpty()) {
options.add("-" + option);
StringBuilder sb = new StringBuilder();
List<Map.Entry> values = new ArrayList<Map.Entry>(params.entrySet());
for (int i = 0; i < values.size(); i++) {
final Map.Entry entry = values.get(i);
sb.append(entry.getKey()).append("=").append(entry.getValue());
if (i < values.size() - 1) {
sb.append(" -" + option + " ");
}
}
options.add(sb.toString());
}
}
public String getInclude() {
return include;
}
public void setInclude(String include) {
this.include = include;
}
public String getExclude() {
return exclude;
}
public void setExclude(String exclude) {
this.exclude = exclude;
}
public List<String> getBenchmarkMode() {
return benchmarkMode;
}
public void setBenchmarkMode(List<String> benchmarkMode) {
this.benchmarkMode = benchmarkMode;
}
public Integer getIterations() {
return iterations;
}
public void setIterations(Integer iterations) {
this.iterations = iterations;
}
public Integer getBatchSize() {
return batchSize;
}
public void setBatchSize(Integer batchSize) {
this.batchSize = batchSize;
}
public Integer getFork() {
return fork;
}
public void setFork(Integer fork) {
this.fork = fork;
}
public Boolean getFailOnError() {
return failOnError;
}
public void setFailOnError(Boolean failOnError) {
this.failOnError = failOnError;
}
public Boolean getForceGC() {
return forceGC;
}
public void setForceGC(Boolean forceGC) {
this.forceGC = forceGC;
}
public String getJvm() {
return jvm;
}
public void setJvm(String jvm) {
this.jvm = jvm;
}
public String getJvmArgs() {
return jvmArgs;
}
public void setJvmArgs(String jvmArgs) {
this.jvmArgs = jvmArgs;
}
public String getJvmArgsAppend() {
return jvmArgsAppend;
}
public void setJvmArgsAppend(String jvmArgsAppend) {
this.jvmArgsAppend = jvmArgsAppend;
}
public String getJvmArgsPrepend() {
return jvmArgsPrepend;
}
public void setJvmArgsPrepend(String jvmArgsPrepend) {
this.jvmArgsPrepend = jvmArgsPrepend;
}
public File getHumanOutputFile() {
return humanOutputFile;
}
public void setHumanOutputFile(File humanOutputFile) {
this.humanOutputFile = humanOutputFile;
}
public File getResultsFile() {
return resultsFile;
}
public void setResultsFile(File resultsFile) {
this.resultsFile = resultsFile;
}
public Integer getOperationsPerInvocation() {
return operationsPerInvocation;
}
public void setOperationsPerInvocation(Integer operationsPerInvocation) {
this.operationsPerInvocation = operationsPerInvocation;
}
public Map getBenchmarkParameters() {
return benchmarkParameters;
}
public void setBenchmarkParameters(Map benchmarkParameters) {
this.benchmarkParameters = benchmarkParameters;
}
public List<String> getProfilers() {
return profilers;
}
public void setProfilers(List<String> profilers) {
this.profilers = profilers;
}
public String getTimeOnIteration() {
return timeOnIteration;
}
public void setTimeOnIteration(String timeOnIteration) {
this.timeOnIteration = timeOnIteration;
}
public String getResultFormat() {
return resultFormat;
}
public void setResultFormat(String resultFormat) {
this.resultFormat = resultFormat;
}
public Boolean getSynchronizeIterations() {
return synchronizeIterations;
}
public void setSynchronizeIterations(Boolean synchronizeIterations) {
this.synchronizeIterations = synchronizeIterations;
}
public Integer getThreads() {
return threads;
}
public void setThreads(Integer threads) {
this.threads = threads;
}
public List<Integer> getThreadGroups() {
return threadGroups;
}
public void setThreadGroups(List<Integer> threadGroups) {
this.threadGroups = threadGroups;
}
public String getTimeUnit() {
return timeUnit;
}
public void setTimeUnit(String timeUnit) {
this.timeUnit = timeUnit;
}
public String getVerbosity() {
return verbosity;
}
public void setVerbosity(String verbosity) {
this.verbosity = verbosity;
}
public String getWarmup() {
return warmup;
}
public void setWarmup(String warmup) {
this.warmup = warmup;
}
public Integer getWarmupBatchSize() {
return warmupBatchSize;
}
public void setWarmupBatchSize(Integer warmupBatchSize) {
this.warmupBatchSize = warmupBatchSize;
}
public Integer getWarmupForks() {
return warmupForks;
}
public void setWarmupForks(Integer warmupForks) {
this.warmupForks = warmupForks;
}
public Integer getWarmupIterations() {
return warmupIterations;
}
public void setWarmupIterations(Integer warmupIterations) {
this.warmupIterations = warmupIterations;
}
public String getWarmupMode() {
return warmupMode;
}
public void setWarmupMode(String warmupMode) {
this.warmupMode = warmupMode;
}
public List<String> getWarmupBenchmarks() {
return warmupBenchmarks;
}
public void setWarmupBenchmarks(List<String> warmupBenchmarks) {
this.warmupBenchmarks = warmupBenchmarks;
}
public boolean isZip64() {
return zip64;
}
public void setZip64(final boolean zip64) {
this.zip64 = zip64;
}
public String getJmhVersion() {
return jmhVersion;
}
public void setJmhVersion(String jmhVersion) {
this.jmhVersion = jmhVersion;
}
public boolean isIncludeTests() {
return includeTests;
}
public void setIncludeTests(boolean includeTests) {
this.includeTests = includeTests;
}
public String getTimeout() {
return timeout;
}
public void setTimeout(String timeout) {
this.timeout = timeout;
}
public DuplicatesStrategy getDuplicateClassesStrategy() {
return duplicateClassesStrategy;
}
public void setDuplicateClassesStrategy(DuplicatesStrategy duplicateClassesStrategy) {
this.duplicateClassesStrategy = duplicateClassesStrategy;
}
private enum ResultFormatType {
TEXT("txt"),
CSV("csv"),
SCSV("scsv"),
JSON("json"),
LATEX("tex");
private String extension;
ResultFormatType(String extension) {
this.extension = extension;
}
public static String translate(String resultFormat) {
return ResultFormatType.valueOf(resultFormat.toUpperCase()).extension;
}
}
}
| Add spaces to '-prof' separator. Fixes #74
| src/main/groovy/me/champeau/gradle/JMHPluginExtension.java | Add spaces to '-prof' separator. Fixes #74 |
|
Java | apache-2.0 | 4a8d48f2e965646523125431b3f9c61c221832d3 | 0 | magicdoom/Mycat-Server,ccvcd/Mycat-Server,magicdoom/Mycat-Server,magicdoom/Mycat-Server,ccvcd/Mycat-Server,ccvcd/Mycat-Server,ccvcd/Mycat-Server,magicdoom/Mycat-Server | package io.mycat.sqlengine.mpp;
/*
* Copyright (c) 2013, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
import io.mycat.MycatServer;
import io.mycat.backend.mysql.BufferUtil;
import io.mycat.backend.mysql.nio.handler.MultiNodeQueryHandler;
import io.mycat.net.mysql.EOFPacket;
import io.mycat.net.mysql.RowDataPacket;
import io.mycat.route.RouteResultset;
import io.mycat.route.RouteResultsetNode;
import io.mycat.server.ServerConnection;
import io.mycat.sqlengine.mpp.tmp.RowDataSorter;
import io.mycat.util.StringUtil;
import org.apache.log4j.Logger;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* Data merge service handle data Min,Max,AVG group 、order by 、limit
*
* @author wuzhih /modify by coder_czp/2015/11/2
*
* Fixbug: mycat sql timeout and hang problem.
* @author Uncle-pan
* @since 2016-03-23
*
*/
public class DataMergeService extends AbstractDataNodeMerge {
private RowDataSorter sorter;
private RowDataPacketGrouper grouper;
private Map<String, LinkedList<RowDataPacket>> result = new HashMap<String, LinkedList<RowDataPacket>>();
private static Logger LOGGER = Logger.getLogger(DataMergeService.class);
private ConcurrentHashMap<String, Boolean> canDiscard = new ConcurrentHashMap<String, Boolean>();
public DataMergeService(MultiNodeQueryHandler handler, RouteResultset rrs) {
super(handler,rrs);
for (RouteResultsetNode node : rrs.getNodes()) {
result.put(node.getName(), new LinkedList<RowDataPacket>());
}
}
/**
* @param columToIndx
* @param fieldCount
*/
public void onRowMetaData(Map<String, ColMeta> columToIndx, int fieldCount) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("field metadata keys:" + columToIndx.keySet());
LOGGER.debug("field metadata values:" + columToIndx.values());
}
int[] groupColumnIndexs = null;
this.fieldCount = fieldCount;
if (rrs.getGroupByCols() != null) {
groupColumnIndexs = toColumnIndex(rrs.getGroupByCols(), columToIndx);
}
if (rrs.getHavingCols() != null) {
ColMeta colMeta = columToIndx.get(rrs.getHavingCols().getLeft()
.toUpperCase());
if (colMeta != null) {
rrs.getHavingCols().setColMeta(colMeta);
}
}
if (rrs.isHasAggrColumn()) {
List<MergeCol> mergCols = new LinkedList<MergeCol>();
Map<String, Integer> mergeColsMap = rrs.getMergeCols();
if (mergeColsMap != null) {
for (Map.Entry<String, Integer> mergEntry : mergeColsMap
.entrySet()) {
String colName = mergEntry.getKey().toUpperCase();
int type = mergEntry.getValue();
if (MergeCol.MERGE_AVG == type) {
ColMeta sumColMeta = columToIndx.get(colName + "SUM");
ColMeta countColMeta = columToIndx.get(colName
+ "COUNT");
if (sumColMeta != null && countColMeta != null) {
ColMeta colMeta = new ColMeta(sumColMeta.colIndex,
countColMeta.colIndex,
sumColMeta.getColType());
mergCols.add(new MergeCol(colMeta, mergEntry
.getValue()));
}
} else {
ColMeta colMeta = columToIndx.get(colName);
mergCols.add(new MergeCol(colMeta, mergEntry.getValue()));
}
}
}
// add no alias merg column
for (Map.Entry<String, ColMeta> fieldEntry : columToIndx.entrySet()) {
String colName = fieldEntry.getKey();
int result = MergeCol.tryParseAggCol(colName);
if (result != MergeCol.MERGE_UNSUPPORT
&& result != MergeCol.MERGE_NOMERGE) {
mergCols.add(new MergeCol(fieldEntry.getValue(), result));
}
}
grouper = new RowDataPacketGrouper(groupColumnIndexs,
mergCols.toArray(new MergeCol[mergCols.size()]),
rrs.getHavingCols());
}
if (rrs.getOrderByCols() != null) {
LinkedHashMap<String, Integer> orders = rrs.getOrderByCols();
OrderCol[] orderCols = new OrderCol[orders.size()];
int i = 0;
for (Map.Entry<String, Integer> entry : orders.entrySet()) {
String key = StringUtil.removeBackquote(entry.getKey()
.toUpperCase());
ColMeta colMeta = columToIndx.get(key);
if (colMeta == null) {
throw new IllegalArgumentException(
"all columns in order by clause should be in the selected column list!"
+ entry.getKey());
}
orderCols[i++] = new OrderCol(colMeta, entry.getValue());
}
RowDataSorter tmp = new RowDataSorter(orderCols);
tmp.setLimit(rrs.getLimitStart(), rrs.getLimitSize());
sorter = tmp;
}
if (MycatServer.getInstance().
getConfig().getSystem().
getUseStreamOutput() == 1
&& grouper == null
&& sorter == null) {
setStreamOutputResult(true);
}else {
setStreamOutputResult(false);
}
}
/**
* release resources
*/
public void clear() {
result.clear();
grouper = null;
sorter = null;
}
@Override
public void run() {
// sort-or-group: no need for us to using multi-threads, because
//both sorter and group are synchronized!!
// @author Uncle-pan
// @since 2016-03-23
if(!running.compareAndSet(false, true)){
return;
}
// eof handler has been placed to "if (pack == END_FLAG_PACK){}" in for-statement
// @author Uncle-pan
// @since 2016-03-23
boolean nulpack = false;
try{
// loop-on-packs
for (; ; ) {
final PackWraper pack = packs.poll();
// async: handling row pack queue, this business thread should exit when no pack
// @author Uncle-pan
// @since 2016-03-23
if(pack == null){
nulpack = true;
break;
}
// eof: handling eof pack and exit
if (pack == END_FLAG_PACK) {
final int warningCount = 0;
final EOFPacket eofp = new EOFPacket();
final ByteBuffer eof = ByteBuffer.allocate(9);
BufferUtil.writeUB3(eof, eofp.calcPacketSize());
eof.put(eofp.packetId);
eof.put(eofp.fieldCount);
BufferUtil.writeUB2(eof, warningCount);
BufferUtil.writeUB2(eof, eofp.status);
final ServerConnection source = multiQueryHandler.getSession().getSource();
final byte[] array = eof.array();
multiQueryHandler.outputMergeResult(source, array, getResults(array));
break;
}
// merge: sort-or-group, or simple add
final RowDataPacket row = new RowDataPacket(fieldCount);
row.read(pack.rowData);
if (grouper != null) {
grouper.addRow(row);
} else if (sorter != null) {
if (!sorter.addRow(row)) {
canDiscard.put(pack.dataNode,true);
}
} else {
result.get(pack.dataNode).add(row);
}
}// rof
}catch(final Exception e){
multiQueryHandler.handleDataProcessException(e);
}finally{
running.set(false);
}
// try to check packs, it's possible that adding a pack after polling a null pack
//and before this time pointer!!
// @author Uncle-pan
// @since 2016-03-23
if(nulpack && !packs.isEmpty()){
this.run();
}
}
/**
* return merged data
* @return (最多i*(offset+size)行数据)
*/
public List<RowDataPacket> getResults(byte[] eof) {
List<RowDataPacket> tmpResult = null;
if (this.grouper != null) {
tmpResult = grouper.getResult();
grouper = null;
}
if (sorter != null) {
if (tmpResult != null) {
Iterator<RowDataPacket> itor = tmpResult.iterator();
while (itor.hasNext()) {
sorter.addRow(itor.next());
itor.remove();
}
}
tmpResult = sorter.getSortedResult();
sorter = null;
}
//no grouper and sorter
if(tmpResult == null){
tmpResult = new LinkedList<RowDataPacket>();
for (RouteResultsetNode node : rrs.getNodes()) {
tmpResult.addAll(result.get(node.getName()));
}
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("prepare mpp merge result for " + rrs.getStatement());
}
return tmpResult;
}
}
| src/main/java/io/mycat/sqlengine/mpp/DataMergeService.java | package io.mycat.sqlengine.mpp;
/*
* Copyright (c) 2013, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
import io.mycat.MycatServer;
import io.mycat.backend.mysql.BufferUtil;
import io.mycat.backend.mysql.nio.handler.MultiNodeQueryHandler;
import io.mycat.net.mysql.EOFPacket;
import io.mycat.net.mysql.RowDataPacket;
import io.mycat.route.RouteResultset;
import io.mycat.server.ServerConnection;
import io.mycat.sqlengine.mpp.tmp.RowDataSorter;
import io.mycat.util.StringUtil;
import org.apache.log4j.Logger;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* Data merge service handle data Min,Max,AVG group 、order by 、limit
*
* @author wuzhih /modify by coder_czp/2015/11/2
*
* Fixbug: mycat sql timeout and hang problem.
* @author Uncle-pan
* @since 2016-03-23
*
*/
public class DataMergeService extends AbstractDataNodeMerge {
private RowDataSorter sorter;
private RowDataPacketGrouper grouper;
private List<RowDataPacket> result = new Vector<RowDataPacket>();
private static Logger LOGGER = Logger.getLogger(DataMergeService.class);
private ConcurrentHashMap<String, Boolean> canDiscard = new ConcurrentHashMap<String, Boolean>();
public DataMergeService(MultiNodeQueryHandler handler, RouteResultset rrs) {
super(handler,rrs);
}
/**
* @param columToIndx
* @param fieldCount
*/
public void onRowMetaData(Map<String, ColMeta> columToIndx, int fieldCount) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("field metadata keys:" + columToIndx.keySet());
LOGGER.debug("field metadata values:" + columToIndx.values());
}
int[] groupColumnIndexs = null;
this.fieldCount = fieldCount;
if (rrs.getGroupByCols() != null) {
groupColumnIndexs = toColumnIndex(rrs.getGroupByCols(), columToIndx);
}
if (rrs.getHavingCols() != null) {
ColMeta colMeta = columToIndx.get(rrs.getHavingCols().getLeft()
.toUpperCase());
if (colMeta != null) {
rrs.getHavingCols().setColMeta(colMeta);
}
}
if (rrs.isHasAggrColumn()) {
List<MergeCol> mergCols = new LinkedList<MergeCol>();
Map<String, Integer> mergeColsMap = rrs.getMergeCols();
if (mergeColsMap != null) {
for (Map.Entry<String, Integer> mergEntry : mergeColsMap
.entrySet()) {
String colName = mergEntry.getKey().toUpperCase();
int type = mergEntry.getValue();
if (MergeCol.MERGE_AVG == type) {
ColMeta sumColMeta = columToIndx.get(colName + "SUM");
ColMeta countColMeta = columToIndx.get(colName
+ "COUNT");
if (sumColMeta != null && countColMeta != null) {
ColMeta colMeta = new ColMeta(sumColMeta.colIndex,
countColMeta.colIndex,
sumColMeta.getColType());
mergCols.add(new MergeCol(colMeta, mergEntry
.getValue()));
}
} else {
ColMeta colMeta = columToIndx.get(colName);
mergCols.add(new MergeCol(colMeta, mergEntry.getValue()));
}
}
}
// add no alias merg column
for (Map.Entry<String, ColMeta> fieldEntry : columToIndx.entrySet()) {
String colName = fieldEntry.getKey();
int result = MergeCol.tryParseAggCol(colName);
if (result != MergeCol.MERGE_UNSUPPORT
&& result != MergeCol.MERGE_NOMERGE) {
mergCols.add(new MergeCol(fieldEntry.getValue(), result));
}
}
grouper = new RowDataPacketGrouper(groupColumnIndexs,
mergCols.toArray(new MergeCol[mergCols.size()]),
rrs.getHavingCols());
}
if (rrs.getOrderByCols() != null) {
LinkedHashMap<String, Integer> orders = rrs.getOrderByCols();
OrderCol[] orderCols = new OrderCol[orders.size()];
int i = 0;
for (Map.Entry<String, Integer> entry : orders.entrySet()) {
String key = StringUtil.removeBackquote(entry.getKey()
.toUpperCase());
ColMeta colMeta = columToIndx.get(key);
if (colMeta == null) {
throw new IllegalArgumentException(
"all columns in order by clause should be in the selected column list!"
+ entry.getKey());
}
orderCols[i++] = new OrderCol(colMeta, entry.getValue());
}
RowDataSorter tmp = new RowDataSorter(orderCols);
tmp.setLimit(rrs.getLimitStart(), rrs.getLimitSize());
sorter = tmp;
}
if (MycatServer.getInstance().
getConfig().getSystem().
getUseStreamOutput() == 1
&& grouper == null
&& sorter == null) {
setStreamOutputResult(true);
}else {
setStreamOutputResult(false);
}
}
/**
* release resources
*/
public void clear() {
result.clear();
grouper = null;
sorter = null;
}
@Override
public void run() {
// sort-or-group: no need for us to using multi-threads, because
//both sorter and group are synchronized!!
// @author Uncle-pan
// @since 2016-03-23
if(!running.compareAndSet(false, true)){
return;
}
// eof handler has been placed to "if (pack == END_FLAG_PACK){}" in for-statement
// @author Uncle-pan
// @since 2016-03-23
boolean nulpack = false;
try{
// loop-on-packs
for (; ; ) {
final PackWraper pack = packs.poll();
// async: handling row pack queue, this business thread should exit when no pack
// @author Uncle-pan
// @since 2016-03-23
if(pack == null){
nulpack = true;
break;
}
// eof: handling eof pack and exit
if (pack == END_FLAG_PACK) {
final int warningCount = 0;
final EOFPacket eofp = new EOFPacket();
final ByteBuffer eof = ByteBuffer.allocate(9);
BufferUtil.writeUB3(eof, eofp.calcPacketSize());
eof.put(eofp.packetId);
eof.put(eofp.fieldCount);
BufferUtil.writeUB2(eof, warningCount);
BufferUtil.writeUB2(eof, eofp.status);
final ServerConnection source = multiQueryHandler.getSession().getSource();
final byte[] array = eof.array();
multiQueryHandler.outputMergeResult(source, array, getResults(array));
break;
}
// merge: sort-or-group, or simple add
final RowDataPacket row = new RowDataPacket(fieldCount);
row.read(pack.rowData);
if (grouper != null) {
grouper.addRow(row);
} else if (sorter != null) {
if (!sorter.addRow(row)) {
canDiscard.put(pack.dataNode,true);
}
} else {
result.add(row);
}
}// rof
}catch(final Exception e){
multiQueryHandler.handleDataProcessException(e);
}finally{
running.set(false);
}
// try to check packs, it's possible that adding a pack after polling a null pack
//and before this time pointer!!
// @author Uncle-pan
// @since 2016-03-23
if(nulpack && !packs.isEmpty()){
this.run();
}
}
/**
* return merged data
* @return (最多i*(offset+size)行数据)
*/
public List<RowDataPacket> getResults(byte[] eof) {
List<RowDataPacket> tmpResult = result;
if (this.grouper != null) {
tmpResult = grouper.getResult();
grouper = null;
}
if (sorter != null) {
if (tmpResult != null) {
Iterator<RowDataPacket> itor = tmpResult.iterator();
while (itor.hasNext()) {
sorter.addRow(itor.next());
itor.remove();
}
}
tmpResult = sorter.getSortedResult();
sorter = null;
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("prepare mpp merge result for " + rrs.getStatement());
}
return tmpResult;
}
}
| Update DataMergeService.java | src/main/java/io/mycat/sqlengine/mpp/DataMergeService.java | Update DataMergeService.java |
|
Java | bsd-3-clause | 6272739d9713218a236f61f5b96d2305803515f2 | 0 | NCIP/wustl-common-utilities | package edu.wustl.common.util;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
// TODO
// path related methods check when src == target
/**
* A weighted, directed graph. This class can be used to superimpose a graph
* structure on any set of classes that logically represent vertices and weights
* e.g. if an application has a class called <tt>City</tt>, then the
* <tt>Graph</tt> class can be used to create a route map across all cities by
* specifying vertices that of type <tt>City</tt> and weights that are of type
* <tt>Long</tt> (indicating distance).<br>
* Note that several methods in this class throw an
* <tt>IllegalArgumentException</tt> if a specified vertex is not present in
* the graph. All methods throw a <tt>NullPointerException</tt> if a specified
* vertex parameter is <tt>null</tt>.
*
* @param <V> the type of vertices
* @param <W> the type of weights on edges
*/
public class Graph<V, W> implements Serializable, Cloneable {
private static final long serialVersionUID = 2744129191470144562L;
private transient HashMap<V, Map<V, W>> incomingEdgeMap;
private HashMap<V, Map<V, W>> outgoingEdgeMap;
private int numEdges = 0;
public Graph() {
initMaps();
}
public Graph(Graph<? extends V, ? extends W> graph) {
assign(graph);
}
private void initMaps() {
incomingEdgeMap = new HashMap<V, Map<V, W>>();
outgoingEdgeMap = new HashMap<V, Map<V, W>>();
}
/* DATA-RELATED METHODS */
// ACCESSORS
/**
* @return set of all vertices present in graph.
*/
public Set<V> getVertices() {
return copy(outgoingEdgeMap.keySet());
}
public Map<V, W> getOutgoingEdges(V source) {
validateVertex(source);
return copy(outgoingEdgeMap.get(source));
}
public Map<V, W> getIncomingEdges(V target) {
validateVertex(target);
return copy(incomingEdgeMap.get(target));
}
/**
* Checks if specified vertex is present in this graph.
*
* @param vertex the vertex whose presence is to be checked.
* @return <tt>true</tt> if this vertex is present in this graph;
* <tt>false</tt> otherwise.
*/
public boolean containsVertex(V vertex) {
return outgoingEdgeMap.containsKey(vertex);
}
/**
* This method checks whether an edge lies between the source vertex and
* target vertex.
*
* @return true if the edge exists; false otherwise
*/
public boolean containsEdge(V source, V target) {
validateVertex(source);
validateVertex(target);
return outgoingEdgeMap.get(source).containsKey(target);
}
/**
* Get the edge from the list of edges, if one exists between source vertex
* and target vertex.
*
* @return edge object if it exists; null otherwise
*/
public W getEdge(V source, V target) {
validateVertex(source);
validateVertex(target);
return outgoingEdgeMap.get(source).get(target);
}
public int numEdges() {
return numEdges;
}
public int numVertices() {
return outgoingEdgeMap.size();
}
// MODIFIERS
/**
* Remove the edge if it exists in the list of edges.
*
* @param sourceVertex
* @param targetVertex
* @return removed edge if edge object is not null; null otherwise
*/
public W removeEdge(V source, V target) {
if (containsEdge(source, target)) {
numEdges--;
outgoingEdgeMap.get(source).remove(target);
return incomingEdgeMap.get(target).remove(source);
}
return null;
}
/**
* Put the edge into the list of edges if it does not exist. If the edge
* exists return the old edge and replace it with a new edge.
*
* @param sourceVertex
* @param targetVertex
* @param edge
* @return the old edge if it exists; null otherwise
*/
public W putEdge(V source, V target, W edge) {
addVertex(source);
addVertex(target);
if (!containsEdge(source, target)) {
numEdges++;
}
outgoingEdgeMap.get(source).put(target, edge);
return incomingEdgeMap.get(target).put(source, edge);
}
/**
* Add a vertex to the list of vertices if same does not exist in the list
*
* @param vertex the vertex to add to the graph.
* @return <tt>false</tt> if this vertex already existed.
* @throws NullPointerException if the specified vertex is null.
*/
public boolean addVertex(V vertex) {
checkNull(vertex);
if (containsVertex(vertex))
return false;
else {
incomingEdgeMap.put(vertex, new HashMap<V, W>());
outgoingEdgeMap.put(vertex, new HashMap<V, W>());
return true;
}
}
/**
* @param vertices the vertices to add to the graph
* @return <tt>true</tt> if atleast one new vertex was added in this call.
*/
public boolean addVertices(Set<? extends V> vertices) {
if (vertices == null) {
return false;
}
boolean modified = false;
for (V v : vertices) {
if (addVertex(v))
modified = true;
}
return modified;
}
/**
* Removes the specified vertex from the list of vertices if one exists
*
* @param vertex
* @return true upon removing specified existing vertex; false otherwise
*/
public boolean removeVertex(V vertex) {
if (!containsVertex(vertex)) {
return false;
}
for (V src : getInNeighbours(vertex)) {
removeEdge(src, vertex);
}
for (V target : getOutNeighbours(vertex)) {
removeEdge(vertex, target);
}
incomingEdgeMap.remove(vertex);
outgoingEdgeMap.remove(vertex);
return true;
}
public void clear() {
incomingEdgeMap.clear();
outgoingEdgeMap.clear();
}
// HELPERS
void checkNull(V vertex) {
if (vertex == null) {
throw new NullPointerException("null vertex.");
}
}
void validateVertex(V vertex) {
checkNull(vertex);
if (!containsVertex(vertex)) {
throw new IllegalArgumentException("specified vertex is not present in graph.");
}
}
private static <E> Set<E> copy(Set<E> set) {
return new HashSet<E>(set);
}
private static <K, V> Map<K, V> copy(Map<K, V> map) {
return new HashMap<K, V>(map);
}
// ////////////////////////////////////////////////////////////////////////////////
/* GRAPH STRUCTURE BASED OPERATIONS */
/**
* To get the list directly reachable Vertices from the given vertex.
*
* @return List of Vertices directly reachable from the given vertex.
* Returns null if vertex is not present in graph, Returns empty
* list if vertex has no directly reachable node.
*/
public Set<V> getOutNeighbours(V v) {
return getOutgoingEdges(v).keySet();
}
/**
* To get the list of vertices from which the given vertex is directly
* reachable.
*
* @return List of Vertices from which the given vertex is directly
* reachable. Returns null if vertex is not present in graph,
* Returns empty list if vertex has no incomming Edges.
*/
public Set<V> getInNeighbours(V v) {
return getIncomingEdges(v).keySet();
}
/**
* This method will return the list of vertices having no incomming Edges.
* The node having no incoming edges will be treated as Root node.
*
* @return list of vertices having no incomming Edges.
*/
public Set<V> getUnreachableVertices() {
Set<V> res = new HashSet<V>();
for (Map.Entry<V, Map<V, W>> entry : incomingEdgeMap.entrySet()) {
if (entry.getValue().isEmpty()) {
res.add(entry.getKey());
}
}
return res;
}
/**
* This method checks whether adding node from sourceVertex to targetVertex
* will result into cyclic graph or not. Adding an edge will result into
* cycle only when the target vertex is reachable from source vertex i.e.
* Vertex B is reachable from vertex A if and only if 1. There is direct
* edge from vertex C to B 2. and C is reachable from A.
*
* @param sourceVertex The source vertex of edge to be added.
* @param targetVertex The target vertex of edge to be added.
* @return true if the
*/
private boolean isReverseReachable(V sourceVertex, V targetVertex) {
if (sourceVertex.equals(targetVertex))
return true; // finally reached from source to target!!!
for (V v : incomingEdgeMap.get(sourceVertex).keySet()) {
if (isReverseReachable(v, targetVertex))
return true;
}
return false;
}
/**
* All possible (acyclic) paths between two vertices. If source and target
* are the same, then the resulting path will have no vertices.
*
* @param source the begining vertex.
* @param target the ending vertix.
* @return List of all paths, where path is again List of Vertices.
* @throws IllegalArgumentException when the fromVetrex or toVetrex is not
* in the graph.
*/
public Set<List<V>> getVertexPaths(V source, V target) {
validateVertex(source);
validateVertex(target);
if (source.equals(target)) {
return new HashSet<List<V>>();
}
return getVertexPaths(source, target, new HashSet<V>());
}
// TODO check source == target
private Set<List<V>> getVertexPaths(V source, V target, Set<V> verticesToIgnore) {
Set<List<V>> res = new HashSet<List<V>>();
verticesToIgnore.add(target);
for (Map.Entry<V, W> entry : incomingEdgeMap.get(target).entrySet()) {
V srcSrc = entry.getKey();
if (verticesToIgnore.contains(srcSrc)) {
continue;
}
if (source.equals(srcSrc)) {
List<V> path = new ArrayList<V>();
path.add(source);
path.add(target);
res.add(path);
continue;
}
Set<List<V>> thePaths = getVertexPaths(source, srcSrc, verticesToIgnore);
for (List<V> thePath : thePaths) {
thePath.add(target);
}
res.addAll(thePaths);
}
verticesToIgnore.remove(target);
return res;
}
/**
* All possible path Edges between two vertices.
*
* @param fromVertex the begining vertex.
* @param toVetrex the ending vertix.
* @return List of all path Edges, where path is again List of Vertices.
* @throws IllegalArgumentException when the fromVetrex or toVetrex is not
* in the graph.
*/
public Set<List<W>> getEdgePaths(V fromVertex, V toVetrex) {
Set<List<W>> edgePaths = new HashSet<List<W>>();
Set<List<V>> verticesPaths = getVertexPaths(fromVertex, toVetrex);
for (List<V> thePath : verticesPaths) {
List<W> theEdgePath = new ArrayList<W>();
for (int j = 1; j < thePath.size(); j++) {
theEdgePath.add(getEdge(thePath.get(j - 1), thePath.get(j)));
edgePaths.add(theEdgePath);
}
}
return edgePaths;
}
// ///////////////////////////////////////////////////////////
/* STRUCTURAL CONSTRAINT CHECKERS */
/**
* Checks if the graph is weakly connected. Graph will be connected if 1.
* after Depth first traversing (without considering edge Direction) through
* graph from (the only one)unreachable node, results into
*
* @return true if graph is connected; false if graph is disjoint
*/
public boolean isConnected() {
Set<V> vertices = getVertices();
if (vertices.isEmpty()) {
return false;
}
dfs(vertices.iterator().next(), vertices);
return vertices.isEmpty();
}
/**
* Method to traverse using Depth First algorithm. It removes the vertex
* from allVertexSet while visiting each vertex. dfs of connected graph
* should result into the allVetrexSet empty.
*
* @param vertex The vertex to be visited.
* @param allVertexSet Set of all nodes not visited yet.
*/
private void dfs(V vertex, Set<V> allVertexSet) {
allVertexSet.remove(vertex);
Set<V> adjacentVertices = new HashSet<V>();
adjacentVertices.addAll(outgoingEdgeMap.get(vertex).keySet());
adjacentVertices.addAll(incomingEdgeMap.get(vertex).keySet());
// insure against self-edge
adjacentVertices.remove(vertex);
for (V adjacentVertex : adjacentVertices) {
if (allVertexSet.contains(adjacentVertex))
dfs(adjacentVertex, allVertexSet);
}
}
public boolean isTree() {
if (!isConnected()) {
return false;
}
Set<V> roots = getUnreachableVertices();
if (roots.size() != 1) {
return false;
}
V root = roots.iterator().next();
Set<V> vertices = getVertices();
vertices.remove(root);
for (V vertex : vertices) {
Map<V, W> in = incomingEdgeMap.get(vertex);
if (in.size() > 1) {
return false;
}
}
return true;
}
/**
* Checks if adding specified edge will cause a new cycle in the graph.
*
* @param src src vertex
* @param target target vertex
* @return <tt>true</tt> if this edge will cause a new cycle in this
* graph;<tt>false</tt> otherwise.
*/
public boolean willCauseNewCycle(V src, V target) {
if (!(containsVertex(src) && containsVertex(target))) {
return false;
}
return isReverseReachable(src, target);
}
@Override
@SuppressWarnings("unchecked")
public Graph<V, W> clone() {
Graph<V, W> res = null;
try {
res = (Graph<V, W>) super.clone();
} catch (CloneNotSupportedException e) {
// can't occur
}
res.assign(this);
return res;
}
public void assign(Graph<? extends V, ? extends W> graph) {
assign2(graph);
}
private <V1 extends V, E1 extends W> void assign2(Graph<V1, E1> graph) {
initMaps();
for (V1 src : graph.getVertices()) {
for (Map.Entry<V1, E1> entry : graph.getOutgoingEdges(src).entrySet()) {
putEdge(src, entry.getKey(), entry.getValue());
}
}
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return outgoingEdgeMap.toString();
}
@SuppressWarnings("unchecked")
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Graph)) {
return false;
}
Graph<V, W> other = (Graph<V, W>) obj;
return outgoingEdgeMap.equals(other.outgoingEdgeMap);
}
@Override
public int hashCode() {
return outgoingEdgeMap.hashCode();
}
private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException {
s.defaultReadObject();
incomingEdgeMap = new HashMap<V, Map<V, W>>();
for (V v : outgoingEdgeMap.keySet()) {
incomingEdgeMap.put(v, new HashMap<V, W>());
}
for (Map.Entry<V, Map<V, W>> outgoingEdgesEntry : outgoingEdgeMap.entrySet()) {
V src = outgoingEdgesEntry.getKey();
for (Map.Entry<V, W> outgoingEdge : outgoingEdgesEntry.getValue().entrySet()) {
V target = outgoingEdge.getKey();
W edge = outgoingEdge.getValue();
incomingEdgeMap.get(target).put(src, edge);
}
}
}
} | src/edu/wustl/common/util/Graph.java | package edu.wustl.common.util;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
// TODO
// path related methods check when src == target
/**
* A weighted, directed graph. This class can be used to superimpose a graph
* structure on any set of classes that logically represent vertices and weights
* e.g. if an application has a class called <tt>City</tt>, then the
* <tt>Graph</tt> class can be used to create a route map across all cities by
* specifying vertices that of type <tt>City</tt> and weights that are of type
* <tt>Long</tt> (indicating distance).<br>
* Note that several methods in this class throw an
* <tt>IllegalArgumentException</tt> if a specified vertex is not present in
* the graph. All methods throw a <tt>NullPointerException</tt> if a specified
* vertex parameter is <tt>null</tt>.
*
* @param <V> the type of vertices
* @param <W> the type of weights on edges
*/
public class Graph<V, W> implements Serializable, Cloneable {
private static final long serialVersionUID = 2744129191470144562L;
private transient HashMap<V, Map<V, W>> incomingEdgeMap;
private HashMap<V, Map<V, W>> outgoingEdgeMap;
private int numEdges = 0;
public Graph() {
initMaps();
}
public Graph(Graph<? extends V, ? extends W> graph) {
assign(graph);
}
private void initMaps() {
incomingEdgeMap = new HashMap<V, Map<V, W>>();
outgoingEdgeMap = new HashMap<V, Map<V, W>>();
}
/* DATA-RELATED METHODS */
// ACCESSORS
/**
* @return set of all vertices present in graph.
*/
public Set<V> getVertices() {
return copy(outgoingEdgeMap.keySet());
}
public Map<V, W> getOutgoingEdges(V source) {
validateVertex(source);
return copy(outgoingEdgeMap.get(source));
}
public Map<V, W> getIncomingEdges(V target) {
validateVertex(target);
return copy(incomingEdgeMap.get(target));
}
/**
* Checks if specified vertex is present in this graph.
*
* @param vertex the vertex whose presence is to be checked.
* @return <tt>true</tt> if this vertex is present in this graph;
* <tt>false</tt> otherwise.
*/
public boolean containsVertex(V vertex) {
return outgoingEdgeMap.containsKey(vertex);
}
/**
* This method checks whether an edge lies between the source vertex and
* target vertex.
*
* @return true if the edge exists; false otherwise
*/
public boolean containsEdge(V source, V target) {
validateVertex(source);
validateVertex(target);
return outgoingEdgeMap.get(source).containsKey(target);
}
/**
* Get the edge from the list of edges, if one exists between source vertex
* and target vertex.
*
* @return edge object if it exists; null otherwise
*/
public W getEdge(V source, V target) {
validateVertex(source);
validateVertex(target);
return outgoingEdgeMap.get(source).get(target);
}
public int numEdges() {
return numEdges;
}
public int numVertices() {
return outgoingEdgeMap.size();
}
// MODIFIERS
/**
* Remove the edge if it exists in the list of edges.
*
* @param sourceVertex
* @param targetVertex
* @return removed edge if edge object is not null; null otherwise
*/
public W removeEdge(V source, V target) {
if (containsEdge(source, target)) {
numEdges--;
outgoingEdgeMap.get(source).remove(target);
return incomingEdgeMap.get(target).remove(source);
}
return null;
}
/**
* Put the edge into the list of edges if it does not exist. If the edge
* exists return the old edge and replace it with a new edge.
*
* @param sourceVertex
* @param targetVertex
* @param edge
* @return the old edge if it exists; null otherwise
*/
public W putEdge(V source, V target, W edge) {
addVertex(source);
addVertex(target);
if (!containsEdge(source, target)) {
numEdges++;
}
outgoingEdgeMap.get(source).put(target, edge);
return incomingEdgeMap.get(target).put(source, edge);
}
/**
* Add a vertex to the list of vertices if same does not exist in the list
*
* @param vertex the vertex to add to the graph.
* @return <tt>false</tt> if this vertex already existed.
* @throws NullPointerException if the specified vertex is null.
*/
public boolean addVertex(V vertex) {
checkNull(vertex);
if (containsVertex(vertex))
return false;
else {
incomingEdgeMap.put(vertex, new HashMap<V, W>());
outgoingEdgeMap.put(vertex, new HashMap<V, W>());
return true;
}
}
/**
* @param vertices the vertices to add to the graph
* @return <tt>true</tt> if atleast one new vertex was added in this call.
*/
public boolean addVertices(Set<? extends V> vertices) {
if (vertices == null) {
return false;
}
boolean modified = false;
for (V v : vertices) {
if (addVertex(v))
modified = true;
}
return modified;
}
/**
* Removes the specified vertex from the list of vertices if one exists
*
* @param vertex
* @return true upon removing specified existing vertex; false otherwise
*/
public boolean removeVertex(V vertex) {
if (!containsVertex(vertex)) {
return false;
}
for (V src : incomingEdgeMap.get(vertex).keySet()) {
removeEdge(src, vertex);
}
for (V target : outgoingEdgeMap.get(vertex).keySet()) {
removeEdge(vertex, target);
}
incomingEdgeMap.remove(vertex);
outgoingEdgeMap.remove(vertex);
return true;
}
public void clear() {
incomingEdgeMap.clear();
outgoingEdgeMap.clear();
}
// HELPERS
void checkNull(V vertex) {
if (vertex == null) {
throw new NullPointerException("null vertex.");
}
}
void validateVertex(V vertex) {
checkNull(vertex);
if (!containsVertex(vertex)) {
throw new IllegalArgumentException("specified vertex is not present in graph.");
}
}
private static <E> Set<E> copy(Set<E> set) {
return new HashSet<E>(set);
}
private static <K, V> Map<K, V> copy(Map<K, V> map) {
return new HashMap<K, V>(map);
}
// ////////////////////////////////////////////////////////////////////////////////
/* GRAPH STRUCTURE BASED OPERATIONS */
/**
* To get the list directly reachable Vertices from the given vertex.
*
* @return List of Vertices directly reachable from the given vertex.
* Returns null if vertex is not present in graph, Returns empty
* list if vertex has no directly reachable node.
*/
public Set<V> getOutNeighbours(V v) {
return getOutgoingEdges(v).keySet();
}
/**
* To get the list of vertices from which the given vertex is directly
* reachable.
*
* @return List of Vertices from which the given vertex is directly
* reachable. Returns null if vertex is not present in graph,
* Returns empty list if vertex has no incomming Edges.
*/
public Set<V> getInNeighbours(V v) {
return getIncomingEdges(v).keySet();
}
/**
* This method will return the list of vertices having no incomming Edges.
* The node having no incoming edges will be treated as Root node.
*
* @return list of vertices having no incomming Edges.
*/
public Set<V> getUnreachableVertices() {
Set<V> res = new HashSet<V>();
for (Map.Entry<V, Map<V, W>> entry : incomingEdgeMap.entrySet()) {
if (entry.getValue().isEmpty()) {
res.add(entry.getKey());
}
}
return res;
}
/**
* This method checks whether adding node from sourceVertex to targetVertex
* will result into cyclic graph or not. Adding an edge will result into
* cycle only when the target vertex is reachable from source vertex i.e.
* Vertex B is reachable from vertex A if and only if 1. There is direct
* edge from vertex C to B 2. and C is reachable from A.
*
* @param sourceVertex The source vertex of edge to be added.
* @param targetVertex The target vertex of edge to be added.
* @return true if the
*/
private boolean isReverseReachable(V sourceVertex, V targetVertex) {
if (sourceVertex.equals(targetVertex))
return true; // finally reached from source to target!!!
for (V v : incomingEdgeMap.get(sourceVertex).keySet()) {
if (isReverseReachable(v, targetVertex))
return true;
}
return false;
}
/**
* All possible (acyclic) paths between two vertices. If source and target
* are the same, then the resulting path will have no vertices.
*
* @param source the begining vertex.
* @param target the ending vertix.
* @return List of all paths, where path is again List of Vertices.
* @throws IllegalArgumentException when the fromVetrex or toVetrex is not
* in the graph.
*/
public Set<List<V>> getVertexPaths(V source, V target) {
validateVertex(source);
validateVertex(target);
if (source.equals(target)) {
return new HashSet<List<V>>();
}
return getVertexPaths(source, target, new HashSet<V>());
}
// TODO check source == target
private Set<List<V>> getVertexPaths(V source, V target, Set<V> verticesToIgnore) {
Set<List<V>> res = new HashSet<List<V>>();
verticesToIgnore.add(target);
for (Map.Entry<V, W> entry : incomingEdgeMap.get(target).entrySet()) {
V srcSrc = entry.getKey();
if (verticesToIgnore.contains(srcSrc)) {
continue;
}
if (source.equals(srcSrc)) {
List<V> path = new ArrayList<V>();
path.add(source);
path.add(target);
res.add(path);
continue;
}
Set<List<V>> thePaths = getVertexPaths(source, srcSrc, verticesToIgnore);
for (List<V> thePath : thePaths) {
thePath.add(target);
}
res.addAll(thePaths);
}
verticesToIgnore.remove(target);
return res;
}
/**
* All possible path Edges between two vertices.
*
* @param fromVertex the begining vertex.
* @param toVetrex the ending vertix.
* @return List of all path Edges, where path is again List of Vertices.
* @throws IllegalArgumentException when the fromVetrex or toVetrex is not
* in the graph.
*/
public Set<List<W>> getEdgePaths(V fromVertex, V toVetrex) {
Set<List<W>> edgePaths = new HashSet<List<W>>();
Set<List<V>> verticesPaths = getVertexPaths(fromVertex, toVetrex);
for (List<V> thePath : verticesPaths) {
List<W> theEdgePath = new ArrayList<W>();
for (int j = 1; j < thePath.size(); j++) {
theEdgePath.add(getEdge(thePath.get(j - 1), thePath.get(j)));
edgePaths.add(theEdgePath);
}
}
return edgePaths;
}
// ///////////////////////////////////////////////////////////
/* STRUCTURAL CONSTRAINT CHECKERS */
/**
* Checks if the graph is weakly connected. Graph will be connected if 1.
* after Depth first traversing (without considering edge Direction) through
* graph from (the only one)unreachable node, results into
*
* @return true if graph is connected; false if graph is disjoint
*/
public boolean isConnected() {
Set<V> vertices = getVertices();
if (vertices.isEmpty()) {
return false;
}
dfs(vertices.iterator().next(), vertices);
return vertices.isEmpty();
}
/**
* Method to traverse using Depth First algorithm. It removes the vertex
* from allVertexSet while visiting each vertex. dfs of connected graph
* should result into the allVetrexSet empty.
*
* @param vertex The vertex to be visited.
* @param allVertexSet Set of all nodes not visited yet.
*/
private void dfs(V vertex, Set<V> allVertexSet) {
allVertexSet.remove(vertex);
Set<V> adjacentVertices = new HashSet<V>();
adjacentVertices.addAll(outgoingEdgeMap.get(vertex).keySet());
adjacentVertices.addAll(incomingEdgeMap.get(vertex).keySet());
// insure against self-edge
adjacentVertices.remove(vertex);
for (V adjacentVertex : adjacentVertices) {
if (allVertexSet.contains(adjacentVertex))
dfs(adjacentVertex, allVertexSet);
}
}
public boolean isTree() {
if (!isConnected()) {
return false;
}
Set<V> roots = getUnreachableVertices();
if (roots.size() != 1) {
return false;
}
V root = roots.iterator().next();
Set<V> vertices = getVertices();
vertices.remove(root);
for (V vertex : vertices) {
Map<V, W> in = incomingEdgeMap.get(vertex);
if (in.size() > 1) {
return false;
}
}
return true;
}
/**
* Checks if adding specified edge will cause a new cycle in the graph.
*
* @param src src vertex
* @param target target vertex
* @return <tt>true</tt> if this edge will cause a new cycle in this
* graph;<tt>false</tt> otherwise.
*/
public boolean willCauseNewCycle(V src, V target) {
if (!(containsVertex(src) && containsVertex(target))) {
return false;
}
return isReverseReachable(src, target);
}
protected Map<V, Map<V, W>> outMap() {
return outgoingEdgeMap;
}
@Override
@SuppressWarnings("unchecked")
public Graph<V, W> clone() {
Graph<V, W> res = null;
try {
res = (Graph<V, W>) super.clone();
} catch (CloneNotSupportedException e) {
// can't occur
}
res.assign(this);
return res;
}
public void assign(Graph<? extends V, ? extends W> graph) {
assign2(graph);
}
private <V1 extends V, E1 extends W> void assign2(Graph<V1, E1> graph) {
initMaps();
for (V1 src : graph.getVertices()) {
for (Map.Entry<V1, E1> entry : graph.getOutgoingEdges(src).entrySet()) {
putEdge(src, entry.getKey(), entry.getValue());
}
}
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return outgoingEdgeMap.toString();
}
@SuppressWarnings("unchecked")
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Graph)) {
return false;
}
Graph<V, W> other = (Graph<V, W>) obj;
return outgoingEdgeMap.equals(other.outgoingEdgeMap);
}
@Override
public int hashCode() {
return outgoingEdgeMap.hashCode();
}
private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException {
s.defaultReadObject();
incomingEdgeMap = new HashMap<V, Map<V, W>>();
for (V v : outgoingEdgeMap.keySet()) {
incomingEdgeMap.put(v, new HashMap<V, W>());
}
for (Map.Entry<V, Map<V, W>> outgoingEdgesEntry : outgoingEdgeMap.entrySet()) {
V src = outgoingEdgesEntry.getKey();
for (Map.Entry<V, W> outgoingEdge : outgoingEdgesEntry.getValue().entrySet()) {
V target = outgoingEdge.getKey();
W edge = outgoingEdge.getValue();
incomingEdgeMap.get(target).put(src, edge);
}
}
}
} | bugfix removeVertex
SVN-Revision: 141
| src/edu/wustl/common/util/Graph.java | bugfix removeVertex |
|
Java | mit | 55e4e8c7280b200d6eda92c451bae7129e007d47 | 0 | konsultaner/word-clock-raspberry-pi-zero-neopixels,konsultaner/word-clock-raspberry-pi-zero-neopixels | package de.konsultaner.wordClock;
import org.hamcrest.CoreMatchers;
import org.junit.Test;
import static org.junit.Assert.*;
public class WordClockMatrixTest {
@Test
public void testMatrix() throws Exception {
WordClockMatrix matrix = new WordClockMatrix(
new String[]{
"ESKISTLRMITTAGSIABENDS",
"FRÜHAMLFÜNFZEHNZWANZIG",
"DREIVIERTELTGNACHVORJM",
"HALBQZWÖLFPZWEINSIEBEN",
"KDREIRHFÜNFELFNEUNVIER",
"WACHTZEHNRSBSECHSFMUHR",
"AFMOLDIWMIRDOPFRLSAKSO"
}
);
int[][] result1 = matrix.getMatrixBySentence("ES IST ABENDS FÜNFZEHN VOR ZWÖLF");
assertThat("Should return a fitting matrix",result1[0][0],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][1],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][2],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][3],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][4],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][5],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][6],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][7],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][8],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][9],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][10],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][11],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][12],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][13],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][14],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][15],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][16],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][17],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][18],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][19],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][20],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][21],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][0],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][1],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][2],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][3],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][4],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][5],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][6],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][7],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][8],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][9],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][10],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][11],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][12],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][13],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][14],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][15],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][16],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][17],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][18],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][19],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][20],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][21],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][0],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][1],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][2],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][3],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][4],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][5],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][6],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][7],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][8],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][9],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][10],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][11],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][12],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][13],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][14],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][15],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][16],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][17],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[2][18],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[2][19],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[2][20],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[2][21],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][0],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][1],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][2],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][3],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][4],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][5],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[3][6],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[3][7],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[3][8],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[3][9],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[3][10],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][11],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][12],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][13],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][14],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][15],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][16],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][17],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][18],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][19],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][20],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[3][21],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][0],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][1],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][2],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][3],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][4],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][5],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][6],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][7],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][8],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][9],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][10],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][11],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][12],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][13],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][14],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][15],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][16],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][17],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][18],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][19],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][20],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[4][21],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][0],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][1],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][2],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][3],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][4],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][5],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][6],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][7],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][8],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][9],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][10],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][11],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][12],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][13],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][14],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][15],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][16],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][17],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][18],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][19],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][20],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[5][21],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][0],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][1],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][2],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][3],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][4],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][5],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][6],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][7],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][8],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][9],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][10],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][11],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][12],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][13],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][14],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][15],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][16],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][17],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][18],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][19],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][20],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[6][21],CoreMatchers.equalTo(0));
}
} | src/test/de/konsultaner/wordClock/WordClockMatrixTest.java | package de.konsultaner.wordClock;
import org.hamcrest.CoreMatchers;
import org.junit.Test;
import static org.junit.Assert.*;
public class WordClockMatrixTest {
@Test
public void testMatrix() throws Exception {
WordClockMatrix matrix = new WordClockMatrix(
new String[]{
"ESKISTLRMITTAGSIABENDS",
"FRÜHAMLFÜNFZEHNZWANZIG",
"DREIVIERTELTGNACHVORJM",
"HALBQZWÖLFPZWEINSIEBEN",
"KDREIRHFÜNFELFNEUNVIER",
"WACHTZEHNRSBSECHSFMUHR",
"AFMOLDIWMIRDOPFRLSAKSO"
}
);
int[][] result1 = matrix.getMatrixBySentence("ES IST ABENDS FÜNFZEHN VOR ZWÖLF");
assertThat("Should return a fitting matrix",result1[0][0],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][1],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][2],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][3],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][4],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][5],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][6],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][7],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][8],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][9],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][10],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][11],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][12],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][13],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][14],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][15],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[0][16],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][17],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][18],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][19],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][20],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[0][21],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][0],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][1],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][2],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][3],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][4],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][5],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][6],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][7],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][8],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][9],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][10],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][11],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][12],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][13],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][14],CoreMatchers.equalTo(1));
assertThat("Should return a fitting matrix",result1[1][15],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][16],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][17],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][18],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][19],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][20],CoreMatchers.equalTo(0));
assertThat("Should return a fitting matrix",result1[1][21],CoreMatchers.equalTo(0));
}
} | completed test
| src/test/de/konsultaner/wordClock/WordClockMatrixTest.java | completed test |
|
Java | mit | 55151c544401be5c30395343909c7e04e4693cdd | 0 | Sometrik/framework,Sometrik/framework,Sometrik/framework | package com.sometrik.framework;
import java.util.ArrayList;
import com.sometrik.framework.NativeCommand.Selector;
import android.graphics.Bitmap;
import android.text.InputFilter;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
public class FWPicker extends Spinner implements NativeCommandHandler {
private FrameWork frame;
private ArrayAdapter<String> adapter;
private ArrayList<Integer> idList;
ViewStyleManager normalStyle, activeStyle, currentStyle;
public FWPicker(FrameWork frameWork) {
super(frameWork);
this.frame = frameWork;
adapter = new ArrayAdapter<String>(frame, android.R.layout.simple_spinner_item);
idList = new ArrayList<Integer>();
final float scale = getContext().getResources().getDisplayMetrics().density;
this.normalStyle = currentStyle = new ViewStyleManager(frame.bitmapCache, scale, true);
this.activeStyle = new ViewStyleManager(frame.bitmapCache, scale, false);
setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> view, View arg1, int position, long itemId) {
frame.sendNativeValueEvent(getId(), idList.get(position), 0);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}
@Override
public int getElementId() {
return getId();
}
@Override
public void addChild(View view) {
System.out.println("Picker couldn't handle addChild");
//TODO
}
@Override
public void addOption(int optionId, String text) {
idList.add(optionId);
adapter.add(text);
setAdapter(adapter);
}
@Override
public void setValue(String v) {
}
@Override
public void setValue(int v) {
}
@Override
public void setStyle(Selector selector, String key, String value) {
if (selector == Selector.NORMAL) {
normalStyle.setStyle(key, value);
} else if (selector == Selector.ACTIVE) {
activeStyle.setStyle(key, value);
}
}
@Override
public void applyStyles() {
currentStyle.apply(this); }
@Override
public void setError(boolean hasError, String errorText) { }
@Override
public void onScreenOrientationChange(boolean isLandscape) {
// TODO Auto-generated method stub
}
@Override
public void addData(String text, int row, int column, int sheet) {
System.out.println("Picker couldn't handle command");
}
@Override
public void setViewVisibility(boolean visibility) {
if (visibility){
this.setVisibility(VISIBLE);
} else {
this.setVisibility(INVISIBLE);
}
}
@Override
public void clear() {
System.out.println("couldn't handle command");
}
@Override
public void flush() {
// TODO Auto-generated method stub
}
@Override
public void addColumn(String text, int columnType) {
// TODO Auto-generated method stub
}
@Override
public void reshape(int value, int size) {
// TODO Auto-generated method stub
}
@Override
public void setBitmap(Bitmap bitmap) { }
@Override
public void reshape(int size) {
// TODO Auto-generated method stub
}
@Override
public void deinitialize() {
// TODO Auto-generated method stub
}
@Override
public void addImageUrl(String url, int width, int height) {
// TODO Auto-generated method stub
}
}
| android/java/com/sometrik/framework/FWPicker.java | package com.sometrik.framework;
import java.util.ArrayList;
import com.sometrik.framework.NativeCommand.Selector;
import android.graphics.Bitmap;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
public class FWPicker extends Spinner implements NativeCommandHandler {
private FrameWork frame;
private ArrayAdapter<String> adapter;
private ArrayList<Integer> idList;
public FWPicker(FrameWork frameWork) {
super(frameWork);
this.frame = frameWork;
adapter = new ArrayAdapter<String>(frame, android.R.layout.simple_spinner_item);
idList = new ArrayList<Integer>();
setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> view, View arg1, int position, long itemId) {
frame.sendNativeValueEvent(getId(), idList.get(position), 0);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}
@Override
public int getElementId() {
return getId();
}
@Override
public void addChild(View view) {
System.out.println("Picker couldn't handle addChild");
//TODO
}
@Override
public void addOption(int optionId, String text) {
idList.add(optionId);
adapter.add(text);
setAdapter(adapter);
}
@Override
public void setValue(String v) {
}
@Override
public void setValue(int v) {
}
@Override
public void setStyle(Selector selector, String key, String value) { }
@Override
public void applyStyles() { }
@Override
public void setError(boolean hasError, String errorText) { }
@Override
public void onScreenOrientationChange(boolean isLandscape) {
// TODO Auto-generated method stub
}
@Override
public void addData(String text, int row, int column, int sheet) {
System.out.println("Picker couldn't handle command");
}
@Override
public void setViewVisibility(boolean visibility) {
if (visibility){
this.setVisibility(VISIBLE);
} else {
this.setVisibility(INVISIBLE);
}
}
@Override
public void clear() {
System.out.println("couldn't handle command");
}
@Override
public void flush() {
// TODO Auto-generated method stub
}
@Override
public void addColumn(String text, int columnType) {
// TODO Auto-generated method stub
}
@Override
public void reshape(int value, int size) {
// TODO Auto-generated method stub
}
@Override
public void setBitmap(Bitmap bitmap) { }
@Override
public void reshape(int size) {
// TODO Auto-generated method stub
}
@Override
public void deinitialize() {
// TODO Auto-generated method stub
}
@Override
public void addImageUrl(String url, int width, int height) {
// TODO Auto-generated method stub
}
}
| Make FWPicker stylable | android/java/com/sometrik/framework/FWPicker.java | Make FWPicker stylable |
|
Java | mit | ca41d6bd86f545969866bd0b8b0aac0a4251a748 | 0 | martindisch/Design-Library | /*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.martin.designlibrary;
import android.os.Bundle;
import android.support.design.widget.NavigationView;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
public class MainActivity extends AppCompatActivity {
private DrawerLayout mDrawerLayout;
private int mSelected, mCurrent;
private NavigationView mNavigationView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (savedInstanceState != null) {
mSelected = savedInstanceState.getInt("mSelected");
mCurrent = savedInstanceState.getInt("mCurrent");
} else {
mCurrent = -1;
mSelected = 0;
}
mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
mDrawerLayout.setDrawerListener(new DrawerLayout.DrawerListener() {
@Override
public void onDrawerSlide(View drawerView, float slideOffset) {
}
@Override
public void onDrawerOpened(View drawerView) {
}
@Override
public void onDrawerStateChanged(int newState) {
}
@Override
public void onDrawerClosed(View drawerView) {
updateContainer();
}
});
mNavigationView = (NavigationView) findViewById(R.id.nav_view);
if (mNavigationView != null) {
setupDrawerContent(mNavigationView);
}
selectDrawerItem(mSelected);
}
private void selectDrawerItem(int i) {
mNavigationView.getMenu().getItem(i).setChecked(true);
updateContainer();
}
private void updateContainer() {
FragmentManager fragmentManager = getSupportFragmentManager();
Fragment fragment = null;
if (mSelected != mCurrent) {
switch (mSelected) {
case 0:
fragment = first_fragment.newInstance();
break;
default:
fragment = first_fragment.newInstance();
break;
}
mCurrent = mSelected;
fragmentManager.beginTransaction()
.replace(R.id.container, fragment)
.commit();
}
}
private void setupDrawerContent(final NavigationView navigationView) {
navigationView.setNavigationItemSelectedListener(
new NavigationView.OnNavigationItemSelectedListener() {
@Override
public boolean onNavigationItemSelected(MenuItem menuItem) {
switch (menuItem.getItemId()) {
case R.id.nav_home:
mSelected = 0;
break;
case R.id.nav_messages:
mSelected = 1;
break;
case R.id.nav_friends:
mSelected = 2;
break;
case R.id.nav_discussion:
mSelected = 3;
break;
case R.id.nav_sub1:
mSelected = 4;
break;
case R.id.nav_sub2:
mSelected = 5;
break;
}
menuItem.setChecked(true);
mDrawerLayout.closeDrawers();
return true;
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
mDrawerLayout.openDrawer(GravityCompat.START);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putInt("mSelected", mSelected);
outState.putInt("mCurrent", mCurrent);
super.onSaveInstanceState(outState);
}
}
| app/src/main/java/com/martin/designlibrary/MainActivity.java | /*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.martin.designlibrary;
import android.os.Bundle;
import android.support.design.widget.NavigationView;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.FrameLayout;
public class MainActivity extends AppCompatActivity {
private DrawerLayout mDrawerLayout;
private FrameLayout mLayoutContainer;
private int mSelected, mCurrent;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mSelected = 0;
mCurrent = -1;
mLayoutContainer = (FrameLayout) findViewById(R.id.container);
mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
mDrawerLayout.setDrawerListener(new DrawerLayout.DrawerListener() {
@Override
public void onDrawerSlide(View drawerView, float slideOffset) {
}
@Override
public void onDrawerOpened(View drawerView) {
}
@Override
public void onDrawerClosed(View drawerView) {
FragmentManager fragmentManager = getSupportFragmentManager();
Fragment fragment = null;
if (mSelected != mCurrent) {
switch (mSelected) {
case 0:
fragment = first_fragment.newInstance();
break;
}
mCurrent = mSelected;
fragmentManager.beginTransaction()
.replace(R.id.container, fragment)
.commit();
}
}
@Override
public void onDrawerStateChanged(int newState) {
}
});
NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view);
if (navigationView != null) {
setupDrawerContent(navigationView);
}
}
private void setupDrawerContent(final NavigationView navigationView) {
navigationView.setNavigationItemSelectedListener(
new NavigationView.OnNavigationItemSelectedListener() {
@Override
public boolean onNavigationItemSelected(MenuItem menuItem) {
switch (menuItem.getItemId()) {
case R.id.nav_home:
mSelected = 0;
break;
case R.id.nav_messages:
mSelected = 0;
break;
case R.id.nav_friends:
mSelected = 0;
break;
case R.id.nav_discussion:
mSelected = 0;
break;
case R.id.nav_sub1:
mSelected = 0;
break;
case R.id.nav_sub2:
mSelected = 0;
break;
}
menuItem.setChecked(true);
mDrawerLayout.closeDrawers();
return true;
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
mDrawerLayout.openDrawer(GravityCompat.START);
return true;
}
return super.onOptionsItemSelected(item);
}
}
| Got a somewhat stable and performant NavigationDrawer
| app/src/main/java/com/martin/designlibrary/MainActivity.java | Got a somewhat stable and performant NavigationDrawer |
|
Java | mit | 7db570fcafc66172ee1eed66b46e8c909858bc59 | 0 | metal-crow/Sentry-Gun-Computer-Vision | package preprocessing;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.javatuples.Pair;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import processing.MassDetectionandObjectPriority;
public class ImagePartitioning {
//method options for determining priority of blobs. Used in method.
public static final int
BASIC_IDENTIFICATION=0,
LASER_IDENTIFICATION=1,
PERSON_IDENTIFICATION=2;
private static final int minBlobArea=35;
/**
* Get array of all blobs in image. Blob will be represented by bounding box.
* Give every blob to a thread which will generate an outline with it an nearby blobs
* THE MOVEMENT MAKES AN OUTLINE. find outline, fill in, thats a person.
* @return
*/
public static ArrayList<Pair<int[], Integer>> OutlineBlobDetection(Mat img, int identification){
//first we have to get the blobs in an array as bounding box forms.
ArrayList<Rect> blobs=new ArrayList<Rect>();
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(img.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL , Imgproc.CHAIN_APPROX_NONE);
for(MatOfPoint points:contours){
Rect blobbound=Imgproc.boundingRect(points);
if(blobbound.width*blobbound.height>minBlobArea){
blobs.add(blobbound);
}
}
//need a catch in case we dont find any blobs
if(blobs.isEmpty()){
return new ArrayList<Pair<int[], Integer>>();
}
else{
ExecutorService executor = Executors.newFixedThreadPool(blobs.size());
ArrayList<Future<int[]>> tasks = new ArrayList<Future<int[]>>(blobs.size());
//pass each starting blob and the rest of the blobs to a thread. 1 thread for every starting blob and potential outline
for(int i=0;i<blobs.size();i++){
@SuppressWarnings("unchecked")
GenerateBlobFromOutline t=new GenerateBlobFromOutline(i, (ArrayList<Rect>)blobs.clone(),img);
tasks.add(executor.submit(t));
}
ArrayList<int[]> blobsFromOutlinespreDup = new ArrayList<int[]>();
//join the threads
executor.shutdown();
for(Future<int[]> t:tasks){
try {
int[] store=t.get();
//check to make sure the thread didn't return that it couldn't use the blobs
if(store!=null){
blobsFromOutlinespreDup.add(store);
}
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
//get rid of duplicate blob outlines (have to do this after all threads are finished to prevent thread collisions)
ArrayList<int[]> blobsFromOutlines = new ArrayList<int[]>();
int color=7;//FIXME only works at this color and above. Probably important to find out why.
for(int[] startPointforOutlineBlob:blobsFromOutlinespreDup){
//check to make sure this point is in white space
if(img.get(startPointforOutlineBlob[1], startPointforOutlineBlob[0])[0]==255){
//avoid duplicates by floodfilling this blob so that other points in the same blob wont be in white space
Imgproc.floodFill(img, new Mat(), new Point(startPointforOutlineBlob[0],startPointforOutlineBlob[1]), new Scalar(color));
color++;
blobsFromOutlines.add(startPointforOutlineBlob);
}
}
//need this catch in case we couldn't get an outline b/c there were not enough blobs
if(blobsFromOutlines.isEmpty()){
return new ArrayList<Pair<int[], Integer>>();
}
else{
//pass each blob location and the mat to MassDetectionandObjectPriority thread
ExecutorService executorMSOPP = Executors.newFixedThreadPool(blobsFromOutlines.size());
ArrayList<Future<Pair<int[],Integer>>> tasksMSOP = new ArrayList<Future<Pair<int[],Integer>>>(blobsFromOutlines.size());
for(int[] point:blobsFromOutlines){
Callable<Pair<int[],Integer>> thread=new MassDetectionandObjectPriority(img, (int)img.get(point[1], point[0])[0], identification);
tasksMSOP.add(executorMSOPP.submit(thread));
}
//get the results of the threads
executorMSOPP.shutdown();
ArrayList<Pair<int[], Integer>> targets=new ArrayList<Pair<int[], Integer>>(tasksMSOP.size());
for(Future<Pair<int[],Integer>> task:tasksMSOP){
try {
targets.add(task.get());
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
//return threads results
return targets;
}
}
}
/**
* Basic discrete blob detection. Fragment the image and give each fragment to a thread.
* The thread will go through the image, when we find a white pixel floodfill the blob, and give it to a massdetection thread.
* Yes i know this is a lot of code repeat from FragmentationSplitting, this is easier than figuring out how to combine the two methods.
* @param img
* @param identification
* @return
*/
public static ArrayList<Pair<int[], Integer>> BasicBlobDetection(Mat img, int fragments, int identification){
ExecutorService executor = Executors.newFixedThreadPool((int) Math.pow(fragments,2));
ArrayList<Future<ArrayList<Pair<int[],Integer>>>> tasks = new ArrayList<Future<ArrayList<Pair<int[],Integer>>>>((int) Math.pow(fragments,2));
//truncating in worst case looses a pixel for each edge fragment
int fragmentWidth=(img.width()-1)/fragments;
int fragmentHeight=(img.height()-1)/fragments;
//need to give each thread a unique starting color
int colorincrement=256/(((int) Math.pow(fragments,2))+1);
int curcolor=1;
int y=0;
while(y+fragmentHeight<img.height()){
int x=0;
while(x+fragmentWidth<img.width()){
//>OpenCV >Rows are actually collums
Mat fragment=img.submat(y, y+fragmentHeight, x, x+fragmentWidth);
Callable<ArrayList<Pair<int[],Integer>>> thread =new GetDiscreteBlobsFromFragments(fragment,y,x,identification,img,curcolor);
tasks.add(executor.submit(thread));
x+=fragmentWidth;
curcolor+=colorincrement;
}
y+=fragmentHeight;
}
executor.shutdown();
ArrayList<Pair<int[], Integer>> targets=new ArrayList<Pair<int[], Integer>>(tasks.size());
for(Future<ArrayList<Pair<int[],Integer>>> task:tasks){
try {
targets.addAll(task.get());
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
return targets;
}
/**
* Fragment the image, give each fragment to a thread for at rest detection
* @param img the current frame (the normal color one)
* @param fragments The square root of whatever number of fragments you want.
* @return
*/
public static ArrayList<Pair<int[], Integer>> FragmentationSplitting(Mat img, int fragments){
ExecutorService executor = Executors.newFixedThreadPool((int) Math.pow(fragments,2));
ArrayList<Future<Pair<int[],Integer>>> tasks = new ArrayList<Future<Pair<int[],Integer>>>((int) Math.pow(fragments,2));
//truncating in worst case looses a pixel for each edge fragment
int fragmentWidth=(img.width()-1)/fragments;
int fragmentHeight=(img.height()-1)/fragments;
int y=0;
while(y+fragmentHeight<img.height()){
int x=0;
while(x+fragmentWidth<img.width()){
//>OpenCV >Rows are actually collums
Mat fragment=img.submat(y, y+fragmentHeight, x, x+fragmentWidth);
Callable<Pair<int[],Integer>> thread =new MassDetectionandObjectPriority(y,x,fragment);
tasks.add(executor.submit(thread));
x+=fragmentWidth;
}
y+=fragmentHeight;
}
executor.shutdown();
ArrayList<Pair<int[], Integer>> targets=new ArrayList<Pair<int[], Integer>>(tasks.size());
for(Future<Pair<int[],Integer>> task:tasks){
try {
targets.add(task.get());
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
return targets;
}
}
| src/preprocessing/ImagePartitioning.java | package preprocessing;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.javatuples.Pair;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import processing.MassDetectionandObjectPriority;
public class ImagePartitioning {
//method options for determining priority of blobs. Used in method.
public static final int
BASIC_IDENTIFICATION=0,
LASER_IDENTIFICATION=1,
PERSON_IDENTIFICATION=2;
private static final int minBlobArea=35;
/**
* Get array of all blobs in image. Blob will be represented by bounding box.
* Give every blob to a thread which will generate an outline with it an nearby blobs
* THE MOVEMENT MAKES AN OUTLINE. find outline, fill in, thats a person.
* @return
*/
public static ArrayList<Pair<int[], Integer>> OutlineBlobDetection(Mat img, int identification){
//first we have to get the blobs in an array as bounding box forms.
ArrayList<Rect> blobs=new ArrayList<Rect>();
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(img.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL , Imgproc.CHAIN_APPROX_NONE);
for(MatOfPoint points:contours){
Rect blobbound=Imgproc.boundingRect(points);
if(blobbound.width*blobbound.height>minBlobArea){
blobs.add(blobbound);
}
}
//need a catch in case we dont find any blobs
if(blobs.isEmpty()){
return new ArrayList<Pair<int[], Integer>>();
}
else{
ExecutorService executor = Executors.newFixedThreadPool(blobs.size());
ArrayList<Future<int[]>> tasks = new ArrayList<Future<int[]>>(blobs.size());
//pass each starting blob and the rest of the blobs to a thread. 1 thread for every starting blob and potential outline
for(int i=0;i<blobs.size();i++){
@SuppressWarnings("unchecked")
GenerateBlobFromOutline t=new GenerateBlobFromOutline(i, (ArrayList<Rect>)blobs.clone(),img);
tasks.add(executor.submit(t));
}
ArrayList<int[]> blobsFromOutlinespreDup = new ArrayList<int[]>();
//join the threads
executor.shutdown();
for(Future<int[]> t:tasks){
try {
int[] store=t.get();
//check to make sure the thread didn't return that it couldn't use the blobs
if(store!=null){
blobsFromOutlinespreDup.add(store);
}
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
//get rid of duplicate blob outlines (have to do this after all threads are finished to prevent thread collisions)
ArrayList<int[]> blobsFromOutlines = new ArrayList<int[]>();
int color=7;//FIXME only works at this color and above. Probably important to find out why.
for(int[] startPointforOutlineBlob:blobsFromOutlinespreDup){
//check to make sure this point is in white space
if(img.get(startPointforOutlineBlob[1], startPointforOutlineBlob[0])[0]==255){
//avoid duplicates by floodfilling this blob so that other points in the same blob wont be in white space
Imgproc.floodFill(img, new Mat(), new Point(startPointforOutlineBlob[0],startPointforOutlineBlob[1]), new Scalar(color));
color++;
blobsFromOutlines.add(startPointforOutlineBlob);
}
}
//need this catch in case we couldn't get an outline b/c there were not enough blobs or FIXME the point we returned wasn't in the blobs whitespace
if(blobsFromOutlines.isEmpty()){
return new ArrayList<Pair<int[], Integer>>();
}
else{
//pass each blob location and the mat to MassDetectionandObjectPriority thread
ExecutorService executorMSOPP = Executors.newFixedThreadPool(blobsFromOutlines.size());
ArrayList<Future<Pair<int[],Integer>>> tasksMSOP = new ArrayList<Future<Pair<int[],Integer>>>(blobsFromOutlines.size());
for(int[] point:blobsFromOutlines){
Callable<Pair<int[],Integer>> thread=new MassDetectionandObjectPriority(img, (int)img.get(point[1], point[0])[0], identification);
tasksMSOP.add(executorMSOPP.submit(thread));
}
//get the results of the threads
executorMSOPP.shutdown();
ArrayList<Pair<int[], Integer>> targets=new ArrayList<Pair<int[], Integer>>(tasksMSOP.size());
for(Future<Pair<int[],Integer>> task:tasksMSOP){
try {
targets.add(task.get());
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
//return threads results
return targets;
}
}
}
/**
* Basic discrete blob detection. Fragment the image and give each fragment to a thread.
* The thread will go through the image, when we find a white pixel floodfill the blob, and give it to a massdetection thread.
* Yes i know this is a lot of code repeat from FragmentationSplitting, this is easier than figuring out how to combine the two methods.
* @param img
* @param identification
* @return
*/
public static ArrayList<Pair<int[], Integer>> BasicBlobDetection(Mat img, int fragments, int identification){
ExecutorService executor = Executors.newFixedThreadPool((int) Math.pow(fragments,2));
ArrayList<Future<ArrayList<Pair<int[],Integer>>>> tasks = new ArrayList<Future<ArrayList<Pair<int[],Integer>>>>((int) Math.pow(fragments,2));
//truncating in worst case looses a pixel for each edge fragment
int fragmentWidth=(img.width()-1)/fragments;
int fragmentHeight=(img.height()-1)/fragments;
//need to give each thread a unique starting color
int colorincrement=256/(((int) Math.pow(fragments,2))+1);
int curcolor=1;
int y=0;
while(y+fragmentHeight<img.height()){
int x=0;
while(x+fragmentWidth<img.width()){
//>OpenCV >Rows are actually collums
Mat fragment=img.submat(y, y+fragmentHeight, x, x+fragmentWidth);
Callable<ArrayList<Pair<int[],Integer>>> thread =new GetDiscreteBlobsFromFragments(fragment,y,x,identification,img,curcolor);
tasks.add(executor.submit(thread));
x+=fragmentWidth;
curcolor+=colorincrement;
}
y+=fragmentHeight;
}
executor.shutdown();
ArrayList<Pair<int[], Integer>> targets=new ArrayList<Pair<int[], Integer>>(tasks.size());
for(Future<ArrayList<Pair<int[],Integer>>> task:tasks){
try {
targets.addAll(task.get());
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
return targets;
}
/**
* Fragment the image, give each fragment to a thread for at rest detection
* @param img the current frame (the normal color one)
* @param fragments The square root of whatever number of fragments you want.
* @return
*/
public static ArrayList<Pair<int[], Integer>> FragmentationSplitting(Mat img, int fragments){
ExecutorService executor = Executors.newFixedThreadPool((int) Math.pow(fragments,2));
ArrayList<Future<Pair<int[],Integer>>> tasks = new ArrayList<Future<Pair<int[],Integer>>>((int) Math.pow(fragments,2));
//truncating in worst case looses a pixel for each edge fragment
int fragmentWidth=(img.width()-1)/fragments;
int fragmentHeight=(img.height()-1)/fragments;
int y=0;
while(y+fragmentHeight<img.height()){
int x=0;
while(x+fragmentWidth<img.width()){
//>OpenCV >Rows are actually collums
Mat fragment=img.submat(y, y+fragmentHeight, x, x+fragmentWidth);
Callable<Pair<int[],Integer>> thread =new MassDetectionandObjectPriority(y,x,fragment);
tasks.add(executor.submit(thread));
x+=fragmentWidth;
}
y+=fragmentHeight;
}
executor.shutdown();
ArrayList<Pair<int[], Integer>> targets=new ArrayList<Pair<int[], Integer>>(tasks.size());
for(Future<Pair<int[],Integer>> task:tasks){
try {
targets.add(task.get());
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
return targets;
}
}
| forgot to remove a fixme | src/preprocessing/ImagePartitioning.java | forgot to remove a fixme |
|
Java | mit | 3d3055bffcda0f90e55174bad37441926da562bc | 0 | codingchili/chili-core | package com.codingchili.core.storage;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.unit.Async;
import io.vertx.ext.unit.TestContext;
import io.vertx.ext.unit.junit.VertxUnitRunner;
import org.junit.*;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.nio.file.Paths;
import com.codingchili.core.files.JsonFileStore;
import com.codingchili.core.protocol.Serializer;
import com.codingchili.core.testing.MapTestCases;
import com.codingchili.core.testing.StorageObject;
import static com.codingchili.core.configuration.CoreStrings.*;
/**
* @author Robin Duda
* <p>
* Tests for the storage providers in core. Reuse these tests when new
* storage subsystems are implemented using the StorageLoader.
*/
@Ignore("Travis: disabled to check if this one is problematic.")
@RunWith(VertxUnitRunner.class)
public class JsonMapTest extends MapTestCases {
@Before
public void setUp(TestContext test) {
super.setUp(test, JsonMap.class);
}
@After
public void tearDown(TestContext test) {
super.tearDown(test);
}
@Test
public void testMapSaved(TestContext test) {
Async async = test.async();
StorageObject storable = new StorageObject("the_id", 21);
store.put(storable, result -> context.timer(500, event -> {
try {
JsonObject db = JsonFileStore.readObject(
Paths.get(getDBPath(getDBIdentifier(DB, COLLECTION))).toString());
StorageObject second = Serializer.unpack(
db.getJsonObject(storable.getName()), StorageObject.class);
test.assertEquals(storable, second);
async.complete();
} catch (IOException e) {
test.fail(e);
}
}));
}
}
| core/test/java/com/codingchili/core/storage/JsonMapTest.java | package com.codingchili.core.storage;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.unit.Async;
import io.vertx.ext.unit.TestContext;
import io.vertx.ext.unit.junit.VertxUnitRunner;
import org.junit.*;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.nio.file.Paths;
import com.codingchili.core.files.JsonFileStore;
import com.codingchili.core.protocol.Serializer;
import com.codingchili.core.testing.MapTestCases;
import com.codingchili.core.testing.StorageObject;
import static com.codingchili.core.configuration.CoreStrings.*;
/**
* @author Robin Duda
* <p>
* Tests for the storage providers in core. Reuse these tests when new
* storage subsystems are implemented using the StorageLoader.
*/
@RunWith(VertxUnitRunner.class)
public class JsonMapTest extends MapTestCases {
@Before
public void setUp(TestContext test) {
super.setUp(test, JsonMap.class);
}
@After
public void tearDown(TestContext test) {
super.tearDown(test);
}
@Test
public void testMapSaved(TestContext test) {
Async async = test.async();
StorageObject storable = new StorageObject("the_id", 21);
store.put(storable, result -> context.timer(500, event -> {
try {
JsonObject db = JsonFileStore.readObject(
Paths.get(getDBPath(getDBIdentifier(DB, COLLECTION))).toString());
StorageObject second = Serializer.unpack(
db.getJsonObject(storable.getName()), StorageObject.class);
test.assertEquals(storable, second);
async.complete();
} catch (IOException e) {
test.fail(e);
}
}));
}
}
| Disable JsonMapTest for travis
| core/test/java/com/codingchili/core/storage/JsonMapTest.java | Disable JsonMapTest for travis |
|
Java | mit | 5470a1404bbddf165e97b68e465db9e0b06fbf89 | 0 | xtf-cz/xtf | package cz.xtf.openshift;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BooleanSupplier;
import java.util.stream.Collectors;
import cz.xtf.TestConfiguration;
import cz.xtf.openshift.builder.SecretBuilder;
import cz.xtf.openshift.builder.secret.SecretType;
import cz.xtf.wait.SimpleWaiter;
import cz.xtf.wait.Waiters;
import io.fabric8.kubernetes.api.model.ConfigMap;
import io.fabric8.kubernetes.api.model.Endpoints;
import io.fabric8.kubernetes.api.model.EnvVar;
import io.fabric8.kubernetes.api.model.EnvVarBuilder;
import io.fabric8.kubernetes.api.model.Event;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.HorizontalPodAutoscaler;
import io.fabric8.kubernetes.api.model.KubernetesList;
import io.fabric8.kubernetes.api.model.LocalObjectReference;
import io.fabric8.kubernetes.api.model.Node;
import io.fabric8.kubernetes.api.model.ObjectReference;
import io.fabric8.kubernetes.api.model.ObjectReferenceBuilder;
import io.fabric8.kubernetes.api.model.PersistentVolumeClaim;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.api.model.ReplicationController;
import io.fabric8.kubernetes.api.model.ResourceQuota;
import io.fabric8.kubernetes.api.model.Secret;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.ServiceAccount;
import io.fabric8.kubernetes.client.KubernetesClientException;
import io.fabric8.kubernetes.client.dsl.LogWatch;
import io.fabric8.openshift.api.model.Build;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.BuildRequest;
import io.fabric8.openshift.api.model.BuildRequestBuilder;
import io.fabric8.openshift.api.model.DeploymentConfig;
import io.fabric8.openshift.api.model.ImageStream;
import io.fabric8.openshift.api.model.Project;
import io.fabric8.openshift.api.model.ProjectRequest;
import io.fabric8.openshift.api.model.ProjectRequestBuilder;
import io.fabric8.openshift.api.model.Role;
import io.fabric8.openshift.api.model.RoleBinding;
import io.fabric8.openshift.api.model.RoleBindingBuilder;
import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.api.model.Template;
import io.fabric8.openshift.client.DefaultOpenShiftClient;
import io.fabric8.openshift.client.NamespacedOpenShiftClient;
import io.fabric8.openshift.client.OpenShiftConfig;
import io.fabric8.openshift.client.OpenShiftConfigBuilder;
import io.fabric8.openshift.client.ParameterValue;
import lombok.extern.slf4j.Slf4j;
import rx.Observable;
import rx.observables.StringObservable;
@Slf4j
public class OpenShiftUtil implements AutoCloseable {
private final NamespacedOpenShiftClient client;
private final OpenShiftWaiters waiters;
private final String namespace;
public static final String KEEP_LABEL = "xtf.cz/keep";
public OpenShiftUtil(OpenShiftConfig openShiftConfig) {
if (openShiftConfig.getNamespace() == null) {
throw new IllegalArgumentException("Namespace in OpenShiftConfig must not be null!");
}
this.namespace = openShiftConfig.getNamespace();
this.client = new DefaultOpenShiftClient(openShiftConfig);
this.waiters = new OpenShiftWaiters(this);
}
public OpenShiftUtil(String masterUrl, String namespace, String username, String password) throws MalformedURLException {
new URL(masterUrl); // masterUrl validation
OpenShiftConfig openShiftConfig = new OpenShiftConfigBuilder()
.withMasterUrl(masterUrl)
.withTrustCerts(true)
.withRequestTimeout(120_000)
.withNamespace(namespace)
.withUsername(username)
.withPassword(password)
.build();
this.namespace = namespace;
this.client = new DefaultOpenShiftClient(openShiftConfig);
this.waiters = new OpenShiftWaiters(this);
}
public OpenShiftUtil(String masterUrl, String namespace, String token) throws MalformedURLException {
new URL(masterUrl); // masterUrl validation
OpenShiftConfig openShiftConfig = new OpenShiftConfigBuilder()
.withMasterUrl(masterUrl)
.withTrustCerts(true)
.withRequestTimeout(120_000)
.withNamespace(namespace)
.withOauthToken(token)
.build();
this.namespace = namespace;
this.client = new DefaultOpenShiftClient(openShiftConfig);
this.waiters = new OpenShiftWaiters(this);
}
public String namespace() {
return namespace;
}
public NamespacedOpenShiftClient client() {
return client;
}
// General functions
public KubernetesList createResources(HasMetadata... resources) {
return createResources(Arrays.asList(resources));
}
public KubernetesList createResources(List<HasMetadata> resources) {
KubernetesList list = new KubernetesList();
list.setItems(resources);
return createResources(list);
}
public KubernetesList createResources(KubernetesList resources) {
return client.lists().create(resources);
}
public boolean deleteResources(KubernetesList resources) {
return client.lists().delete(resources);
}
public void loadResource(InputStream is) {
client.load(is).deletingExisting().createOrReplace();
}
// Projects
public ProjectRequest createProjectRequest() {
return createProjectRequest(new ProjectRequestBuilder().withNewMetadata().withName(namespace).endMetadata().build());
}
public ProjectRequest createProjectRequest(String name) {
return createProjectRequest(new ProjectRequestBuilder().withNewMetadata().withName(name).endMetadata().build());
}
public ProjectRequest createProjectRequest(ProjectRequest projectRequest) {
return client.projectrequests().create(projectRequest);
}
/**
* Calls rectreateProject(namespace).
*
* @see OpenShiftUtil#recreateProject(String)
*/
public ProjectRequest recreateProject() throws TimeoutException {
return recreateProject(new ProjectRequestBuilder().withNewMetadata().withName(namespace).endMetadata().build());
}
/**
* Creates or recreates project specified by name.
*
* @param name name of a project to be created
* @return ProjectRequest instatnce
*/
public ProjectRequest recreateProject(String name) throws TimeoutException {
return recreateProject(new ProjectRequestBuilder().withNewMetadata().withName(name).endMetadata().build());
}
/**
* Creates or recreates project specified by projectRequest instance.
*
* @return ProjectRequest instatnce
*/
public ProjectRequest recreateProject(ProjectRequest projectRequest) throws TimeoutException {
deleteProject(projectRequest.getMetadata().getName());
Waiters.sleep(TimeUnit.SECONDS, 30);
AtomicReference<ProjectRequest> pr = new AtomicReference<>();
BooleanSupplier bs = () -> {
try {
ProjectRequest attempt = createProjectRequest(projectRequest);
pr.set(attempt);
return true;
} catch (KubernetesClientException e) {
log.warn("Failed to create project: {}", projectRequest.getMetadata().getName());
return false;
}
};
new SimpleWaiter(bs, TimeUnit.MILLISECONDS, TestConfiguration.defaultWaitTimeout(), "Waiting for successful project recreation").interval(TimeUnit.SECONDS, 10).execute();
return pr.get();
}
/**
* Tries to retreive project with name 'name'. Swallows KubernetesClientException
* if project doesn't exist or isn't accessible for user.
*
* @param name name of requested project.
* @return Project instance if accessible otherwise null.
*/
public Project getProject(String name) {
try {
return client.projects().withName(name).get();
} catch (KubernetesClientException e) {
return null;
}
}
public boolean deleteProject() {
return deleteProject(namespace);
}
public boolean deleteProject(String name) {
return getProject(name) != null ? client.projects().withName(name).delete() : false;
}
// ImageStreams
public ImageStream createImageStream(ImageStream imageStream) {
return client.imageStreams().create(imageStream);
}
public ImageStream getImageStream(String name) {
return client.imageStreams().withName(name).get();
}
public List<ImageStream> getImageStreams() {
return client.imageStreams().list().getItems();
}
public boolean deleteImageStream(ImageStream imageStream) {
return client.imageStreams().delete(imageStream);
}
// Pods
public Pod createPod(Pod pod) {
return client.pods().create(pod);
}
public Pod getPod(String name) {
return client.pods().withName(name).get();
}
public String getPodLog(Pod pod) {
return client.pods().withName(pod.getMetadata().getName()).getLog();
}
public Reader getPodLogReader(Pod pod) {
return client.pods().withName(pod.getMetadata().getName()).getLogReader();
}
public Observable<String> observePodLog(Pod pod) {
LogWatch watcher = client.pods().withName(pod.getMetadata().getName()).watchLog();
return StringObservable.byLine(StringObservable.from(new InputStreamReader(watcher.getOutput())));
}
public List<Pod> getPods() {
return client.pods().list().getItems();
}
/**
* @param deploymentConfigName name of deploymentConfig
* @return all active pods created by specified deploymentConfig
*/
public List<Pod> getPods(String deploymentConfigName) {
return getLabeledPods("deploymentconfig", deploymentConfigName);
}
/**
* @param deploymentConfigName name of deploymentConfig
* @param version deployment version to be retrieved
* @return active pods created by deploymentConfig with specified version
*/
public List<Pod> getPods(String deploymentConfigName, int version) {
return getLabeledPods("deployment", deploymentConfigName + "-" + version);
}
public List<Pod> getLabeledPods(String key, String value) {
return getLabeledPods(Collections.singletonMap(key, value));
}
public List<Pod> getLabeledPods(Map<String, String> labels) {
return client.pods().withLabels(labels).list().getItems();
}
public Pod getAnyPod(String deploymentConfigName) {
return getAnyPod("deploymentconfig", deploymentConfigName);
}
public Pod getAnyPod(String key, String value) {
return getAnyPod(Collections.singletonMap(key, value));
}
public Pod getAnyPod(Map<String, String> labels) {
List<Pod> pods = getLabeledPods(labels);
return pods.get(new Random().nextInt(pods.size()));
}
public boolean deletePod(Pod pod) {
return deletePod(pod, 0L);
}
public boolean deletePod(Pod pod, long gracePeriod) {
return client.pods().withName(pod.getMetadata().getName()).withGracePeriod(gracePeriod).delete();
}
/**
* Deletes pods with specified label.
*
* @param key key of the label
* @param value value of the label
* @return True if any pod has been deleted
*/
public boolean deletePods(String key, String value) {
return client.pods().withLabel(key, value).delete();
}
public boolean deletePods(Map<String, String> labels) {
return client.pods().withLabels(labels).delete();
}
// Secrets
public Secret createSecret(Secret secret) {
return client.secrets().create(secret);
}
public Secret getSecret(String name) {
return client.secrets().withName(name).get();
}
public List<Secret> getSecrets() {
return client.secrets().list().getItems();
}
/**
* Retrieves secrets that aren't considered default. Secrets that are left out contain type starting with 'kubernetes.io/'.
*
* @return List of secrets that aren't considered default.
*/
public List<Secret> getUserSecrets() {
return client.secrets().withoutLabel(KEEP_LABEL).list().getItems().stream()
.filter(s -> !s.getType().startsWith("kubernetes.io/"))
.collect(Collectors.toList());
}
public boolean deleteSecret(Secret secret) {
return client.secrets().delete(secret);
}
/**
* Creates the secret needed for an authenticated external registry (e.g. registry.redhat.io ) and adds it to the default and builder service accounts
* Uses the TestConfiguration.oregRegistry and TestConfiguration.oregAuth properties
*
* @return
*/
public Secret createORegSecret() {
if (TestConfiguration.oregRegistry() != null && !TestConfiguration.oregRegistry().trim().isEmpty()) {
final String secretName = TestConfiguration.oregRegistry();
Secret oregSecret = getSecret(secretName);
if (oregSecret != null) {
deleteSecret(oregSecret);
}
Secret dockerCfg = new SecretBuilder(secretName)
.setType(SecretType.DOCKERCFG)
.addData(".dockerconfigjson", ("{\"auths\":{\"" + TestConfiguration.oregRegistry() + "\":{\"auth\":\"" + TestConfiguration.oregAuth() + "\"}}}").getBytes())
.build();
dockerCfg.getMetadata().setLabels(Collections.singletonMap(KEEP_LABEL, "keep"));
dockerCfg = createSecret(dockerCfg);
client().serviceAccounts().withName("builder").edit().addToSecrets(new ObjectReferenceBuilder().withKind("Secret").withName(secretName).build()).done();
client().serviceAccounts().withName("default").edit().addToSecrets(new ObjectReferenceBuilder().withKind("Secret").withName(secretName).build()).addToImagePullSecrets(new LocalObjectReference(secretName)).done();
return dockerCfg;
}
return null;
}
// Services
public Service createService(Service service) {
return client.services().create(service);
}
public Service getService(String name) {
return client.services().withName(name).get();
}
public List<Service> getServices() {
return client.services().list().getItems();
}
public boolean deleteService(Service service) {
return client.services().delete(service);
}
// Endpoints
public Endpoints createEndpoint(Endpoints endpoint) {
return client.endpoints().create(endpoint);
}
public Endpoints getEndpoint(String name) {
return client.endpoints().withName(name).get();
}
public List<Endpoints> getEndpoints() {
return client.endpoints().list().getItems();
}
public boolean deleteEndpoint(Endpoints endpoint) {
return client.endpoints().delete(endpoint);
}
// Routes
public Route createRoute(Route route) {
return client.routes().create(route);
}
public Route getRoute(String name) {
return client.routes().withName(name).get();
}
public List<Route> getRoutes() {
return client.routes().list().getItems();
}
public boolean deleteRoute(Route route) {
return client.routes().delete(route);
}
// ReplicationControllers - Only for internal usage with clean
private List<ReplicationController> getReplicationControllers() {
return client.replicationControllers().list().getItems();
}
private boolean deleteReplicationController(ReplicationController replicationController) {
return client.replicationControllers().withName(replicationController.getMetadata().getName()).cascading(false).delete();
}
// DeploymentConfigs
public DeploymentConfig createDeploymentConfig(DeploymentConfig deploymentConfig) {
return client.deploymentConfigs().create(deploymentConfig);
}
public DeploymentConfig getDeploymentConfig(String name) {
return client.deploymentConfigs().withName(name).get();
}
public List<DeploymentConfig> getDeploymentConfigs() {
return client.deploymentConfigs().list().getItems();
}
/**
* Returns first container environment variables.
*
* @param name name of deploymentConfig
* @return Map of environment variables
*/
public Map<String, String> getDeploymentConfigEnvVars(String name) {
return getDeploymentConfig(name).getSpec().getTemplate().getSpec().getContainers().get(0).getEnv().stream().collect(Collectors.toMap(EnvVar::getName, EnvVar::getValue));
}
public DeploymentConfig updateDeploymentconfig(DeploymentConfig deploymentConfig) {
return client.deploymentConfigs().withName(deploymentConfig.getMetadata().getName()).replace(deploymentConfig);
}
/**
* Updates deployment config environment variables with envVars values.
*
* @param name name of deploymentConfig
* @param envVars environment variables
*/
public DeploymentConfig updateDeploymentConfigEnvVars(String name, Map<String, String> envVars) {
DeploymentConfig dc = getDeploymentConfig(name);
List<EnvVar> vars = envVars.entrySet().stream().map(x -> new EnvVarBuilder().withName(x.getKey()).withValue(x.getValue()).build()).collect(Collectors.toList());
dc.getSpec().getTemplate().getSpec().getContainers().get(0).getEnv().removeIf(x -> envVars.containsKey(x.getName()));
dc.getSpec().getTemplate().getSpec().getContainers().get(0).getEnv().addAll(vars);
return updateDeploymentconfig(dc);
}
public boolean deleteDeploymentConfig(DeploymentConfig deploymentConfig) {
return deleteDeploymentConfig(deploymentConfig, false);
}
public boolean deleteDeploymentConfig(DeploymentConfig deploymentConfig, boolean cascading) {
return client.deploymentConfigs().withName(deploymentConfig.getMetadata().getName()).cascading(cascading).delete();
}
/**
* Scales deployment config to specified number of replicas.
*
* @param name name of deploymentConfig
* @param replicas number of target replicas
*/
public void scale(String name, int replicas) {
client.deploymentConfigs().withName(name).scale(replicas);
}
/**
* Redeploys deployment config to latest version.
*
* @param name name of deploymentConfig
*/
public void deployLatest(String name) {
client.deploymentConfigs().withName(name).deployLatest();
}
// Builds
public Build getBuild(String name) {
return client.inNamespace(namespace).builds().withName(name).get();
}
public Build getLatestBuild(String buildConfigName) {
long lastVersion = client.buildConfigs().withName(buildConfigName).get().getStatus().getLastVersion();
return getBuild(buildConfigName + "-" + lastVersion);
}
public List<Build> getBuilds() {
return client.builds().list().getItems();
}
public String getBuildLog(Build build) {
return client.builds().withName(build.getMetadata().getName()).getLog();
}
public Reader getBuildLogReader(Build build) {
return client.builds().withName(build.getMetadata().getName()).getLogReader();
}
public boolean deleteBuild(Build build) {
return client.builds().delete(build);
}
public Build startBuild(String buildConfigName) {
BuildRequest request = new BuildRequestBuilder().withNewMetadata().withName(buildConfigName).endMetadata().build();
return client.buildConfigs().withName(buildConfigName).instantiate(request);
}
public Build startBinaryBuild(String buildConfigName, File file) {
return client.buildConfigs().withName(buildConfigName).instantiateBinary().fromFile(file);
}
// BuildConfigs
public BuildConfig createBuildConfig(BuildConfig buildConfig) {
return client.buildConfigs().create(buildConfig);
}
public BuildConfig getBuildConfig(String name) {
return client.buildConfigs().withName(name).get();
}
public List<BuildConfig> getBuildConfigs() {
return client.buildConfigs().list().getItems();
}
/**
* Returns environment variables of buildConfig specified under sourceStrategy.
*
* @param name name of buildConfig
* @return environment variables
*/
public Map<String, String> getBuildConfigEnvVars(String name) {
return getBuildConfig(name).getSpec().getStrategy().getSourceStrategy().getEnv().stream().collect(Collectors.toMap(EnvVar::getName, EnvVar::getValue));
}
public BuildConfig updateBuildConfig(BuildConfig buildConfig) {
return client.buildConfigs().withName(buildConfig.getMetadata().getName()).replace(buildConfig);
}
/**
* Updates build config with specified environment variables.
*
* @param name name of buildConfig
* @param envVars environment variables
*/
public BuildConfig updateBuildConfigEnvVars(String name, Map<String, String> envVars) {
BuildConfig bc = getBuildConfig(name);
List<EnvVar> vars = envVars.entrySet().stream().map(x -> new EnvVarBuilder().withName(x.getKey()).withValue(x.getValue()).build()).collect(Collectors.toList());
bc.getSpec().getStrategy().getSourceStrategy().getEnv().removeIf(x -> envVars.containsKey(x.getName()));
bc.getSpec().getStrategy().getSourceStrategy().getEnv().addAll(vars);
return updateBuildConfig(bc);
}
public boolean deleteBuildConfig(BuildConfig buildConfig) {
return client.buildConfigs().delete(buildConfig);
}
// ServiceAccounts
public ServiceAccount createServiceAccount(ServiceAccount serviceAccount) {
return client.serviceAccounts().create(serviceAccount);
}
public ServiceAccount getServiceAccount(String name) {
return client.serviceAccounts().withName(name).get();
}
public List<ServiceAccount> getServiceAccounts() {
return client.serviceAccounts().list().getItems();
}
/**
* Retrieves service accounts that aren't considered default.
* Service accounts that are left out from list:
* <ul>
* <li>builder</li>
* <li>default</li>
* <li>deployer</li>
* </ul>
*
* @return List of service accounts that aren't considered default.
*/
public List<ServiceAccount> getUserServiceAccounts() {
return client.serviceAccounts().withoutLabel(KEEP_LABEL).list().getItems().stream()
.filter(sa -> !sa.getMetadata().getName().matches("builder|default|deployer"))
.collect(Collectors.toList());
}
public boolean deleteServiceAccount(ServiceAccount serviceAccount) {
return client.serviceAccounts().delete(serviceAccount);
}
// RoleBindings
public RoleBinding createRoleBinding(RoleBinding roleBinding) {
return client.roleBindings().create(roleBinding);
}
public RoleBinding getRoleBinding(String name) {
return client.roleBindings().withName(name).get();
}
public List<RoleBinding> getRoleBindings() {
return client.roleBindings().list().getItems();
}
public List<Role> getRoles() {
return client.roles().list().getItems();
}
/**
* Retrieves role bindings that aren't considered default.
* Role bindings that are left out from list:
* <ul>
* <li>admin</li>
* <li>system:deployers</li>
* <li>system:image-builders</li>
* <li>system:image-pullers</li>
* </ul>
*
* @return List of role bindings that aren't considered default.
*/
public List<RoleBinding> getUserRoleBindings() {
return client.roleBindings().withoutLabel(KEEP_LABEL).list().getItems().stream()
.filter(rb -> !rb.getMetadata().getName().matches("admin|system:deployers|system:image-builders|system:image-pullers"))
.collect(Collectors.toList());
}
public boolean deleteRoleBinding(RoleBinding roleBinding) {
return client.roleBindings().delete(roleBinding);
}
public RoleBinding addRoleToUser(String roleName, String username) {
RoleBinding roleBinding = getOrCreateRoleBinding(roleName);
addSubjectToRoleBinding(roleBinding, "User", username);
addUserNameToRoleBinding(roleBinding, username);
return updateRoleBinding(roleBinding);
}
public RoleBinding addRoleToServiceAccount(String roleName, String serviceAccountName) {
RoleBinding roleBinding = getOrCreateRoleBinding(roleName);
addSubjectToRoleBinding(roleBinding, "ServiceAccount", serviceAccountName);
addUserNameToRoleBinding(roleBinding, String.format("system:serviceaccount:%s:%s", namespace, serviceAccountName));
return updateRoleBinding(roleBinding);
}
public RoleBinding addRoleToGroup(String roleName, String groupName) {
RoleBinding roleBinding = getOrCreateRoleBinding(roleName);
addSubjectToRoleBinding(roleBinding, "SystemGroup", groupName);
addGroupNameToRoleBinding(roleBinding, groupName);
return updateRoleBinding(roleBinding);
}
private RoleBinding getOrCreateRoleBinding(String name) {
RoleBinding roleBinding = getRoleBinding(name);
if (roleBinding == null) {
roleBinding = new RoleBindingBuilder()
.withNewMetadata().withName(name).endMetadata()
.withNewRoleRef().withName(name).endRoleRef()
.build();
createRoleBinding(roleBinding);
}
return roleBinding;
}
public RoleBinding updateRoleBinding(RoleBinding roleBinding) {
return client.roleBindings().withName(roleBinding.getMetadata().getName()).replace(roleBinding);
}
private void addSubjectToRoleBinding(RoleBinding roleBinding, String entityKind, String entityName) {
ObjectReference subject = new ObjectReferenceBuilder().withKind(entityKind).withName(entityName).build();
if (roleBinding.getSubjects().stream().noneMatch(x -> x.getName().equals(subject.getName()) && x.getKind().equals(subject.getKind()))) {
roleBinding.getSubjects().add(subject);
}
}
private void addUserNameToRoleBinding(RoleBinding roleBinding, String userName) {
if (roleBinding.getUserNames() == null) {
roleBinding.setUserNames(new ArrayList<>());
}
if (!roleBinding.getUserNames().contains(userName)) {
roleBinding.getUserNames().add(userName);
}
}
private void addGroupNameToRoleBinding(RoleBinding roleBinding, String groupName) {
if (roleBinding.getGroupNames() == null) {
roleBinding.setGroupNames(new ArrayList<>());
}
if (!roleBinding.getGroupNames().contains(groupName)) {
roleBinding.getGroupNames().add(groupName);
}
}
public RoleBinding removeRoleFromServiceAccount(String roleName, String serviceAccountName) {
return removeRoleFromEntity(roleName, "ServiceAccount", serviceAccountName, String.format("system:serviceaccount:%s:%s", namespace, serviceAccountName));
}
public RoleBinding removeRoleFromEntity(String roleName, String entityKind, String entityName, String userName) {
RoleBinding roleBinding = client.roleBindings().withName(roleName).get();
if (roleBinding != null) {
roleBinding.getSubjects().remove(new ObjectReferenceBuilder().withKind(entityKind).withName(entityName).withNamespace(namespace).build());
roleBinding.getUserNames().remove(userName);
return updateRoleBinding(roleBinding);
}
return null;
}
// ResourceQuotas
public ResourceQuota createResourceQuota(ResourceQuota resourceQuota) {
return client.resourceQuotas().create(resourceQuota);
}
public ResourceQuota getResourceQuota(String name) {
return client.resourceQuotas().withName(name).get();
}
public boolean deleteResourceQuota(ResourceQuota resourceQuota) {
return client.resourceQuotas().delete(resourceQuota);
}
// Persistent volume claims
public PersistentVolumeClaim createPersistentVolumeClaim(PersistentVolumeClaim pvc) {
return client.persistentVolumeClaims().create(pvc);
}
public PersistentVolumeClaim getPersistentVolumeClaim(String name) {
return client.persistentVolumeClaims().withName(name).get();
}
public List<PersistentVolumeClaim> getPersistentVolumeClaims() {
return client.persistentVolumeClaims().list().getItems();
}
public boolean deletePersistentVolumeClaim(PersistentVolumeClaim pvc) {
return client.persistentVolumeClaims().delete(pvc);
}
// HorizontalPodAutoscalers
public HorizontalPodAutoscaler createHorizontalPodAutoscaler(HorizontalPodAutoscaler hpa) {
return client.autoscaling().horizontalPodAutoscalers().create(hpa);
}
public HorizontalPodAutoscaler getHorizontalPodAutoscaler(String name) {
return client.autoscaling().horizontalPodAutoscalers().withName(name).get();
}
public List<HorizontalPodAutoscaler> getHorizontalPodAutoscalers() {
return client.autoscaling().horizontalPodAutoscalers().list().getItems();
}
public boolean deleteHorizontalPodAutoscaler(HorizontalPodAutoscaler hpa) {
return client.autoscaling().horizontalPodAutoscalers().delete(hpa);
}
// ConfigMaps
public ConfigMap createConfigMap(ConfigMap configMap) {
return client.configMaps().create(configMap);
}
public ConfigMap getConfigMap(String name) {
return client.configMaps().withName(name).get();
}
public List<ConfigMap> getConfigMaps() {
return client.configMaps().list().getItems();
}
public boolean deleteConfigMap(ConfigMap configMap) {
return client.configMaps().delete(configMap);
}
// Templates
public Template createTemplate(Template template) {
return client.templates().create(template);
}
public Template getTemplate(String name) {
return client.templates().withName(name).get();
}
public List<Template> getTemplates() {
return client.templates().list().getItems();
}
public boolean deleteTemplate(String name) {
return client.templates().withName(name).delete();
}
public boolean deleteTemplate(Template template) {
return client.templates().delete(template);
}
public Template loadAndCreateTemplate(InputStream is) {
Template t = client().templates().load(is).get();
deleteTemplate(t);
return createTemplate(t);
}
public KubernetesList recreateAndProcessTemplate(Template template, Map<String, String> parameters) {
deleteTemplate(template.getMetadata().getName());
createTemplate(template);
return processTemplate(template.getMetadata().getName(), parameters);
}
public KubernetesList recreateAndProcessAndDeployTemplate(Template template, Map<String, String> parameters) {
return createResources(recreateAndProcessTemplate(template, parameters));
}
public KubernetesList processTemplate(String name, Map<String, String> parameters) {
ParameterValue[] values = processParameters(parameters);
return client.templates().withName(name).process(values);
}
public KubernetesList processAndDeployTemplate(String name, Map<String, String> parameters) {
return createResources(processTemplate(name, parameters));
}
private ParameterValue[] processParameters(Map<String, String> parameters) {
return parameters.entrySet().stream().map(entry -> new ParameterValue(entry.getKey(), entry.getValue())).collect(Collectors.toList()).toArray(new ParameterValue[parameters.size()]);
}
// Nodes
public Node getNode(String name) {
return client.nodes().withName(name).get();
}
public List<Node> getNodes() {
return client.nodes().list().getItems();
}
public List<Node> getNodes(Map<String, String> labels) {
return client.nodes().withLabels(labels).list().getItems();
}
// Events
public List<Event> getEvents() {
return client.events().list().getItems();
}
// Clean up function
/**
* Deletes all* resources in namespace. Waits till all are deleted. <br/>
* <br/>
* <p>
* * Only user created secrets, service accounts and role bindings are deleted. Default will remain.
*
* @throws TimeoutException in case that some user resources will remain even after timeout.
* @see #getUserSecrets()
* @see #getUserServiceAccounts()
* @see #getUserRoleBindings()
*/
public void cleanAndWait() throws TimeoutException {
clean();
waiters.isProjectClean().execute();
}
/**
* Deletes all* resources in namespace. Waits till all are deleted. <br/>
* <br/>
* <p>
* * Only user created secrets, service accounts and role bindings are deleted. Default will remain.
*
* @throws AssertionError in case that some user resources will remain even after timeout.
* @see #getUserSecrets()
* @see #getUserServiceAccounts()
* @see #getUserRoleBindings()
*/
public void cleanAndAssert() {
clean();
waiters.isProjectClean().assertEventually();
}
/**
* Deletes all* resources in namespace. Doesn't wait till all are deleted. <br/>
* <br/>
* <p>
* * Only user created secrets, service accounts and role bindings are deleted. Default will remain.
*
* @see #getUserSecrets()
* @see #getUserServiceAccounts()
* @see #getUserRoleBindings()
*/
public void clean() {
// keep the order for deletion to prevent K8s creating resources again
client.templates().delete();
client.extensions().deployments().delete();
client.apps().statefulSets().delete();
client.extensions().jobs().delete();
getDeploymentConfigs().forEach(this::deleteDeploymentConfig);
getReplicationControllers().forEach(this::deleteReplicationController);
client.buildConfigs().delete();
client.imageStreams().delete();
client.endpoints().delete();
client.services().delete();
client.builds().delete();
client.routes().delete();
client.pods().withGracePeriod(0).delete();
client.persistentVolumeClaims().delete();
client.autoscaling().horizontalPodAutoscalers().delete();
client.configMaps().delete();
getUserSecrets().forEach(this::deleteSecret);
getUserServiceAccounts().forEach(this::deleteServiceAccount);
getUserRoleBindings().forEach(this::deleteRoleBinding);
client.roles().delete();
}
@Override
public void close() {
client.close();
}
// Logs storing
public Path storePodLog(Pod pod, Path dirPath, String fileName) throws IOException {
String log = getPodLog(pod);
return storeLog(log, dirPath, fileName);
}
public Path storeBuildLog(Build build, Path dirPath, String fileName) throws IOException {
String log = getBuildLog(build);
return storeLog(log, dirPath, fileName);
}
private Path storeLog(String log, Path dirPath, String fileName) throws IOException {
Path filePath = dirPath.resolve(fileName + ".log");
Files.createDirectories(dirPath);
Files.createFile(filePath);
Files.write(filePath, log.getBytes());
return filePath;
}
// Waiting
public OpenShiftWaiters waiters() {
return waiters;
}
}
| utilities/src/main/java/cz/xtf/openshift/OpenShiftUtil.java | package cz.xtf.openshift;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BooleanSupplier;
import java.util.stream.Collectors;
import cz.xtf.TestConfiguration;
import cz.xtf.openshift.builder.SecretBuilder;
import cz.xtf.openshift.builder.secret.SecretType;
import cz.xtf.wait.SimpleWaiter;
import cz.xtf.wait.Waiters;
import io.fabric8.kubernetes.api.model.ConfigMap;
import io.fabric8.kubernetes.api.model.Endpoints;
import io.fabric8.kubernetes.api.model.EnvVar;
import io.fabric8.kubernetes.api.model.EnvVarBuilder;
import io.fabric8.kubernetes.api.model.Event;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.HorizontalPodAutoscaler;
import io.fabric8.kubernetes.api.model.KubernetesList;
import io.fabric8.kubernetes.api.model.LocalObjectReference;
import io.fabric8.kubernetes.api.model.Node;
import io.fabric8.kubernetes.api.model.ObjectReference;
import io.fabric8.kubernetes.api.model.ObjectReferenceBuilder;
import io.fabric8.kubernetes.api.model.PersistentVolumeClaim;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.api.model.ReplicationController;
import io.fabric8.kubernetes.api.model.ResourceQuota;
import io.fabric8.kubernetes.api.model.Secret;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.ServiceAccount;
import io.fabric8.kubernetes.client.KubernetesClientException;
import io.fabric8.kubernetes.client.dsl.LogWatch;
import io.fabric8.openshift.api.model.Build;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.BuildRequest;
import io.fabric8.openshift.api.model.BuildRequestBuilder;
import io.fabric8.openshift.api.model.DeploymentConfig;
import io.fabric8.openshift.api.model.ImageStream;
import io.fabric8.openshift.api.model.Project;
import io.fabric8.openshift.api.model.ProjectRequest;
import io.fabric8.openshift.api.model.ProjectRequestBuilder;
import io.fabric8.openshift.api.model.Role;
import io.fabric8.openshift.api.model.RoleBinding;
import io.fabric8.openshift.api.model.RoleBindingBuilder;
import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.api.model.Template;
import io.fabric8.openshift.client.DefaultOpenShiftClient;
import io.fabric8.openshift.client.NamespacedOpenShiftClient;
import io.fabric8.openshift.client.OpenShiftConfig;
import io.fabric8.openshift.client.OpenShiftConfigBuilder;
import io.fabric8.openshift.client.ParameterValue;
import lombok.extern.slf4j.Slf4j;
import rx.Observable;
import rx.observables.StringObservable;
@Slf4j
public class OpenShiftUtil implements AutoCloseable {
private final NamespacedOpenShiftClient client;
private final OpenShiftWaiters waiters;
private final String namespace;
public static final String KEEP_LABEL = "xtf.cz/keep";
public OpenShiftUtil(OpenShiftConfig openShiftConfig) {
if (openShiftConfig.getNamespace() == null) {
throw new IllegalArgumentException("Namespace in OpenShiftConfig must not be null!");
}
this.namespace = openShiftConfig.getNamespace();
this.client = new DefaultOpenShiftClient(openShiftConfig);
this.waiters = new OpenShiftWaiters(this);
}
public OpenShiftUtil(String masterUrl, String namespace, String username, String password) throws MalformedURLException {
new URL(masterUrl); // masterUrl validation
OpenShiftConfig openShiftConfig = new OpenShiftConfigBuilder()
.withMasterUrl(masterUrl)
.withTrustCerts(true)
.withRequestTimeout(120_000)
.withNamespace(namespace)
.withUsername(username)
.withPassword(password)
.build();
this.namespace = namespace;
this.client = new DefaultOpenShiftClient(openShiftConfig);
this.waiters = new OpenShiftWaiters(this);
}
public OpenShiftUtil(String masterUrl, String namespace, String token) throws MalformedURLException {
new URL(masterUrl); // masterUrl validation
OpenShiftConfig openShiftConfig = new OpenShiftConfigBuilder()
.withMasterUrl(masterUrl)
.withTrustCerts(true)
.withRequestTimeout(120_000)
.withNamespace(namespace)
.withOauthToken(token)
.build();
this.namespace = namespace;
this.client = new DefaultOpenShiftClient(openShiftConfig);
this.waiters = new OpenShiftWaiters(this);
}
public String namespace() {
return namespace;
}
public NamespacedOpenShiftClient client() {
return client;
}
// General functions
public KubernetesList createResources(HasMetadata... resources) {
return createResources(Arrays.asList(resources));
}
public KubernetesList createResources(List<HasMetadata> resources) {
KubernetesList list = new KubernetesList();
list.setItems(resources);
return createResources(list);
}
public KubernetesList createResources(KubernetesList resources) {
return client.lists().create(resources);
}
public boolean deleteResources(KubernetesList resources) {
return client.lists().delete(resources);
}
public void loadResource(InputStream is) {
client.load(is).deletingExisting().createOrReplace();
}
// Projects
public ProjectRequest createProjectRequest() {
return createProjectRequest(new ProjectRequestBuilder().withNewMetadata().withName(namespace).endMetadata().build());
}
public ProjectRequest createProjectRequest(String name) {
return createProjectRequest(new ProjectRequestBuilder().withNewMetadata().withName(name).endMetadata().build());
}
public ProjectRequest createProjectRequest(ProjectRequest projectRequest) {
return client.projectrequests().create(projectRequest);
}
/**
* Calls rectreateProject(namespace).
*
* @see OpenShiftUtil#recreateProject(String)
*/
public ProjectRequest recreateProject() throws TimeoutException {
return recreateProject(new ProjectRequestBuilder().withNewMetadata().withName(namespace).endMetadata().build());
}
/**
* Creates or recreates project specified by name.
*
* @param name name of a project to be created
* @return ProjectRequest instatnce
*/
public ProjectRequest recreateProject(String name) throws TimeoutException {
return recreateProject(new ProjectRequestBuilder().withNewMetadata().withName(name).endMetadata().build());
}
/**
* Creates or recreates project specified by projectRequest instance.
*
* @return ProjectRequest instatnce
*/
public ProjectRequest recreateProject(ProjectRequest projectRequest) throws TimeoutException {
deleteProject(projectRequest.getMetadata().getName());
Waiters.sleep(TimeUnit.SECONDS, 30);
AtomicReference<ProjectRequest> pr = new AtomicReference<>();
BooleanSupplier bs = () -> {
try {
ProjectRequest attempt = createProjectRequest(projectRequest);
pr.set(attempt);
return true;
} catch (KubernetesClientException e) {
log.warn("Failed to create project: {}", projectRequest.getMetadata().getName());
return false;
}
};
new SimpleWaiter(bs, TimeUnit.MINUTES, 3, "Waiting for successful project recreation").interval(TimeUnit.SECONDS, 10).execute();
return pr.get();
}
/**
* Tries to retreive project with name 'name'. Swallows KubernetesClientException
* if project doesn't exist or isn't accessible for user.
*
* @param name name of requested project.
* @return Project instance if accessible otherwise null.
*/
public Project getProject(String name) {
try {
return client.projects().withName(name).get();
} catch (KubernetesClientException e) {
return null;
}
}
public boolean deleteProject() {
return deleteProject(namespace);
}
public boolean deleteProject(String name) {
return getProject(name) != null ? client.projects().withName(name).delete() : false;
}
// ImageStreams
public ImageStream createImageStream(ImageStream imageStream) {
return client.imageStreams().create(imageStream);
}
public ImageStream getImageStream(String name) {
return client.imageStreams().withName(name).get();
}
public List<ImageStream> getImageStreams() {
return client.imageStreams().list().getItems();
}
public boolean deleteImageStream(ImageStream imageStream) {
return client.imageStreams().delete(imageStream);
}
// Pods
public Pod createPod(Pod pod) {
return client.pods().create(pod);
}
public Pod getPod(String name) {
return client.pods().withName(name).get();
}
public String getPodLog(Pod pod) {
return client.pods().withName(pod.getMetadata().getName()).getLog();
}
public Reader getPodLogReader(Pod pod) {
return client.pods().withName(pod.getMetadata().getName()).getLogReader();
}
public Observable<String> observePodLog(Pod pod) {
LogWatch watcher = client.pods().withName(pod.getMetadata().getName()).watchLog();
return StringObservable.byLine(StringObservable.from(new InputStreamReader(watcher.getOutput())));
}
public List<Pod> getPods() {
return client.pods().list().getItems();
}
/**
* @param deploymentConfigName name of deploymentConfig
* @return all active pods created by specified deploymentConfig
*/
public List<Pod> getPods(String deploymentConfigName) {
return getLabeledPods("deploymentconfig", deploymentConfigName);
}
/**
* @param deploymentConfigName name of deploymentConfig
* @param version deployment version to be retrieved
* @return active pods created by deploymentConfig with specified version
*/
public List<Pod> getPods(String deploymentConfigName, int version) {
return getLabeledPods("deployment", deploymentConfigName + "-" + version);
}
public List<Pod> getLabeledPods(String key, String value) {
return getLabeledPods(Collections.singletonMap(key, value));
}
public List<Pod> getLabeledPods(Map<String, String> labels) {
return client.pods().withLabels(labels).list().getItems();
}
public Pod getAnyPod(String deploymentConfigName) {
return getAnyPod("deploymentconfig", deploymentConfigName);
}
public Pod getAnyPod(String key, String value) {
return getAnyPod(Collections.singletonMap(key, value));
}
public Pod getAnyPod(Map<String, String> labels) {
List<Pod> pods = getLabeledPods(labels);
return pods.get(new Random().nextInt(pods.size()));
}
public boolean deletePod(Pod pod) {
return deletePod(pod, 0L);
}
public boolean deletePod(Pod pod, long gracePeriod) {
return client.pods().withName(pod.getMetadata().getName()).withGracePeriod(gracePeriod).delete();
}
/**
* Deletes pods with specified label.
*
* @param key key of the label
* @param value value of the label
* @return True if any pod has been deleted
*/
public boolean deletePods(String key, String value) {
return client.pods().withLabel(key, value).delete();
}
public boolean deletePods(Map<String, String> labels) {
return client.pods().withLabels(labels).delete();
}
// Secrets
public Secret createSecret(Secret secret) {
return client.secrets().create(secret);
}
public Secret getSecret(String name) {
return client.secrets().withName(name).get();
}
public List<Secret> getSecrets() {
return client.secrets().list().getItems();
}
/**
* Retrieves secrets that aren't considered default. Secrets that are left out contain type starting with 'kubernetes.io/'.
*
* @return List of secrets that aren't considered default.
*/
public List<Secret> getUserSecrets() {
return client.secrets().withoutLabel(KEEP_LABEL).list().getItems().stream()
.filter(s -> !s.getType().startsWith("kubernetes.io/"))
.collect(Collectors.toList());
}
public boolean deleteSecret(Secret secret) {
return client.secrets().delete(secret);
}
/**
* Creates the secret needed for an authenticated external registry (e.g. registry.redhat.io ) and adds it to the default and builder service accounts
* Uses the TestConfiguration.oregRegistry and TestConfiguration.oregAuth properties
* @return
*/
public Secret createORegSecret() {
if (TestConfiguration.oregRegistry() != null && !TestConfiguration.oregRegistry().trim().isEmpty()) {
final String secretName = TestConfiguration.oregRegistry();
Secret oregSecret = getSecret(secretName);
if (oregSecret != null) {
deleteSecret(oregSecret);
}
Secret dockerCfg = new SecretBuilder(secretName)
.setType(SecretType.DOCKERCFG)
.addData(".dockerconfigjson", ("{\"auths\":{\"" + TestConfiguration.oregRegistry() + "\":{\"auth\":\"" + TestConfiguration.oregAuth() + "\"}}}").getBytes())
.build();
dockerCfg.getMetadata().setLabels(Collections.singletonMap(KEEP_LABEL, "keep"));
dockerCfg = createSecret(dockerCfg);
client().serviceAccounts().withName("builder").edit().addToSecrets(new ObjectReferenceBuilder().withKind("Secret").withName(secretName).build()).done();
client().serviceAccounts().withName("default").edit().addToSecrets(new ObjectReferenceBuilder().withKind("Secret").withName(secretName).build()).addToImagePullSecrets(new LocalObjectReference(secretName)).done();
return dockerCfg;
}
return null;
}
// Services
public Service createService(Service service) {
return client.services().create(service);
}
public Service getService(String name) {
return client.services().withName(name).get();
}
public List<Service> getServices() {
return client.services().list().getItems();
}
public boolean deleteService(Service service) {
return client.services().delete(service);
}
// Endpoints
public Endpoints createEndpoint(Endpoints endpoint) {
return client.endpoints().create(endpoint);
}
public Endpoints getEndpoint(String name) {
return client.endpoints().withName(name).get();
}
public List<Endpoints> getEndpoints() {
return client.endpoints().list().getItems();
}
public boolean deleteEndpoint(Endpoints endpoint) {
return client.endpoints().delete(endpoint);
}
// Routes
public Route createRoute(Route route) {
return client.routes().create(route);
}
public Route getRoute(String name) {
return client.routes().withName(name).get();
}
public List<Route> getRoutes() {
return client.routes().list().getItems();
}
public boolean deleteRoute(Route route) {
return client.routes().delete(route);
}
// ReplicationControllers - Only for internal usage with clean
private List<ReplicationController> getReplicationControllers() {
return client.replicationControllers().list().getItems();
}
private boolean deleteReplicationController(ReplicationController replicationController) {
return client.replicationControllers().withName(replicationController.getMetadata().getName()).cascading(false).delete();
}
// DeploymentConfigs
public DeploymentConfig createDeploymentConfig(DeploymentConfig deploymentConfig) {
return client.deploymentConfigs().create(deploymentConfig);
}
public DeploymentConfig getDeploymentConfig(String name) {
return client.deploymentConfigs().withName(name).get();
}
public List<DeploymentConfig> getDeploymentConfigs() {
return client.deploymentConfigs().list().getItems();
}
/**
* Returns first container environment variables.
*
* @param name name of deploymentConfig
* @return Map of environment variables
*/
public Map<String, String> getDeploymentConfigEnvVars(String name) {
return getDeploymentConfig(name).getSpec().getTemplate().getSpec().getContainers().get(0).getEnv().stream().collect(Collectors.toMap(EnvVar::getName, EnvVar::getValue));
}
public DeploymentConfig updateDeploymentconfig(DeploymentConfig deploymentConfig) {
return client.deploymentConfigs().withName(deploymentConfig.getMetadata().getName()).replace(deploymentConfig);
}
/**
* Updates deployment config environment variables with envVars values.
*
* @param name name of deploymentConfig
* @param envVars environment variables
*/
public DeploymentConfig updateDeploymentConfigEnvVars(String name, Map<String, String> envVars) {
DeploymentConfig dc = getDeploymentConfig(name);
List<EnvVar> vars = envVars.entrySet().stream().map(x -> new EnvVarBuilder().withName(x.getKey()).withValue(x.getValue()).build()).collect(Collectors.toList());
dc.getSpec().getTemplate().getSpec().getContainers().get(0).getEnv().removeIf(x -> envVars.containsKey(x.getName()));
dc.getSpec().getTemplate().getSpec().getContainers().get(0).getEnv().addAll(vars);
return updateDeploymentconfig(dc);
}
public boolean deleteDeploymentConfig(DeploymentConfig deploymentConfig) {
return deleteDeploymentConfig(deploymentConfig, false);
}
public boolean deleteDeploymentConfig(DeploymentConfig deploymentConfig, boolean cascading) {
return client.deploymentConfigs().withName(deploymentConfig.getMetadata().getName()).cascading(cascading).delete();
}
/**
* Scales deployment config to specified number of replicas.
*
* @param name name of deploymentConfig
* @param replicas number of target replicas
*/
public void scale(String name, int replicas) {
client.deploymentConfigs().withName(name).scale(replicas);
}
/**
* Redeploys deployment config to latest version.
*
* @param name name of deploymentConfig
*/
public void deployLatest(String name) {
client.deploymentConfigs().withName(name).deployLatest();
}
// Builds
public Build getBuild(String name) {
return client.inNamespace(namespace).builds().withName(name).get();
}
public Build getLatestBuild(String buildConfigName) {
long lastVersion = client.buildConfigs().withName(buildConfigName).get().getStatus().getLastVersion();
return getBuild(buildConfigName + "-" + lastVersion);
}
public List<Build> getBuilds() {
return client.builds().list().getItems();
}
public String getBuildLog(Build build) {
return client.builds().withName(build.getMetadata().getName()).getLog();
}
public Reader getBuildLogReader(Build build) {
return client.builds().withName(build.getMetadata().getName()).getLogReader();
}
public boolean deleteBuild(Build build) {
return client.builds().delete(build);
}
public Build startBuild(String buildConfigName) {
BuildRequest request = new BuildRequestBuilder().withNewMetadata().withName(buildConfigName).endMetadata().build();
return client.buildConfigs().withName(buildConfigName).instantiate(request);
}
public Build startBinaryBuild(String buildConfigName, File file) {
return client.buildConfigs().withName(buildConfigName).instantiateBinary().fromFile(file);
}
// BuildConfigs
public BuildConfig createBuildConfig(BuildConfig buildConfig) {
return client.buildConfigs().create(buildConfig);
}
public BuildConfig getBuildConfig(String name) {
return client.buildConfigs().withName(name).get();
}
public List<BuildConfig> getBuildConfigs() {
return client.buildConfigs().list().getItems();
}
/**
* Returns environment variables of buildConfig specified under sourceStrategy.
*
* @param name name of buildConfig
* @return environment variables
*/
public Map<String, String> getBuildConfigEnvVars(String name) {
return getBuildConfig(name).getSpec().getStrategy().getSourceStrategy().getEnv().stream().collect(Collectors.toMap(EnvVar::getName, EnvVar::getValue));
}
public BuildConfig updateBuildConfig(BuildConfig buildConfig) {
return client.buildConfigs().withName(buildConfig.getMetadata().getName()).replace(buildConfig);
}
/**
* Updates build config with specified environment variables.
*
* @param name name of buildConfig
* @param envVars environment variables
*/
public BuildConfig updateBuildConfigEnvVars(String name, Map<String, String> envVars) {
BuildConfig bc = getBuildConfig(name);
List<EnvVar> vars = envVars.entrySet().stream().map(x -> new EnvVarBuilder().withName(x.getKey()).withValue(x.getValue()).build()).collect(Collectors.toList());
bc.getSpec().getStrategy().getSourceStrategy().getEnv().removeIf(x -> envVars.containsKey(x.getName()));
bc.getSpec().getStrategy().getSourceStrategy().getEnv().addAll(vars);
return updateBuildConfig(bc);
}
public boolean deleteBuildConfig(BuildConfig buildConfig) {
return client.buildConfigs().delete(buildConfig);
}
// ServiceAccounts
public ServiceAccount createServiceAccount(ServiceAccount serviceAccount) {
return client.serviceAccounts().create(serviceAccount);
}
public ServiceAccount getServiceAccount(String name) {
return client.serviceAccounts().withName(name).get();
}
public List<ServiceAccount> getServiceAccounts() {
return client.serviceAccounts().list().getItems();
}
/**
* Retrieves service accounts that aren't considered default.
* Service accounts that are left out from list:
* <ul>
* <li>builder</li>
* <li>default</li>
* <li>deployer</li>
* </ul>
*
* @return List of service accounts that aren't considered default.
*/
public List<ServiceAccount> getUserServiceAccounts() {
return client.serviceAccounts().withoutLabel(KEEP_LABEL).list().getItems().stream()
.filter(sa -> !sa.getMetadata().getName().matches("builder|default|deployer"))
.collect(Collectors.toList());
}
public boolean deleteServiceAccount(ServiceAccount serviceAccount) {
return client.serviceAccounts().delete(serviceAccount);
}
// RoleBindings
public RoleBinding createRoleBinding(RoleBinding roleBinding) {
return client.roleBindings().create(roleBinding);
}
public RoleBinding getRoleBinding(String name) {
return client.roleBindings().withName(name).get();
}
public List<RoleBinding> getRoleBindings() {
return client.roleBindings().list().getItems();
}
public List<Role> getRoles() {
return client.roles().list().getItems();
}
/**
* Retrieves role bindings that aren't considered default.
* Role bindings that are left out from list:
* <ul>
* <li>admin</li>
* <li>system:deployers</li>
* <li>system:image-builders</li>
* <li>system:image-pullers</li>
* </ul>
*
* @return List of role bindings that aren't considered default.
*/
public List<RoleBinding> getUserRoleBindings() {
return client.roleBindings().withoutLabel(KEEP_LABEL).list().getItems().stream()
.filter(rb -> !rb.getMetadata().getName().matches("admin|system:deployers|system:image-builders|system:image-pullers"))
.collect(Collectors.toList());
}
public boolean deleteRoleBinding(RoleBinding roleBinding) {
return client.roleBindings().delete(roleBinding);
}
public RoleBinding addRoleToUser(String roleName, String username) {
RoleBinding roleBinding = getOrCreateRoleBinding(roleName);
addSubjectToRoleBinding(roleBinding, "User", username);
addUserNameToRoleBinding(roleBinding, username);
return updateRoleBinding(roleBinding);
}
public RoleBinding addRoleToServiceAccount(String roleName, String serviceAccountName) {
RoleBinding roleBinding = getOrCreateRoleBinding(roleName);
addSubjectToRoleBinding(roleBinding, "ServiceAccount", serviceAccountName);
addUserNameToRoleBinding(roleBinding, String.format("system:serviceaccount:%s:%s", namespace, serviceAccountName));
return updateRoleBinding(roleBinding);
}
public RoleBinding addRoleToGroup(String roleName, String groupName) {
RoleBinding roleBinding = getOrCreateRoleBinding(roleName);
addSubjectToRoleBinding(roleBinding, "SystemGroup", groupName);
addGroupNameToRoleBinding(roleBinding, groupName);
return updateRoleBinding(roleBinding);
}
private RoleBinding getOrCreateRoleBinding(String name) {
RoleBinding roleBinding = getRoleBinding(name);
if (roleBinding == null) {
roleBinding = new RoleBindingBuilder()
.withNewMetadata().withName(name).endMetadata()
.withNewRoleRef().withName(name).endRoleRef()
.build();
createRoleBinding(roleBinding);
}
return roleBinding;
}
public RoleBinding updateRoleBinding(RoleBinding roleBinding) {
return client.roleBindings().withName(roleBinding.getMetadata().getName()).replace(roleBinding);
}
private void addSubjectToRoleBinding(RoleBinding roleBinding, String entityKind, String entityName) {
ObjectReference subject = new ObjectReferenceBuilder().withKind(entityKind).withName(entityName).build();
if (roleBinding.getSubjects().stream().noneMatch(x -> x.getName().equals(subject.getName()) && x.getKind().equals(subject.getKind()))) {
roleBinding.getSubjects().add(subject);
}
}
private void addUserNameToRoleBinding(RoleBinding roleBinding, String userName) {
if (roleBinding.getUserNames() == null) {
roleBinding.setUserNames(new ArrayList<>());
}
if (!roleBinding.getUserNames().contains(userName)) {
roleBinding.getUserNames().add(userName);
}
}
private void addGroupNameToRoleBinding(RoleBinding roleBinding, String groupName) {
if (roleBinding.getGroupNames() == null) {
roleBinding.setGroupNames(new ArrayList<>());
}
if (!roleBinding.getGroupNames().contains(groupName)) {
roleBinding.getGroupNames().add(groupName);
}
}
public RoleBinding removeRoleFromServiceAccount(String roleName, String serviceAccountName) {
return removeRoleFromEntity(roleName, "ServiceAccount", serviceAccountName, String.format("system:serviceaccount:%s:%s", namespace, serviceAccountName));
}
public RoleBinding removeRoleFromEntity(String roleName, String entityKind, String entityName, String userName) {
RoleBinding roleBinding = client.roleBindings().withName(roleName).get();
if (roleBinding != null) {
roleBinding.getSubjects().remove(new ObjectReferenceBuilder().withKind(entityKind).withName(entityName).withNamespace(namespace).build());
roleBinding.getUserNames().remove(userName);
return updateRoleBinding(roleBinding);
}
return null;
}
// ResourceQuotas
public ResourceQuota createResourceQuota(ResourceQuota resourceQuota) {
return client.resourceQuotas().create(resourceQuota);
}
public ResourceQuota getResourceQuota(String name) {
return client.resourceQuotas().withName(name).get();
}
public boolean deleteResourceQuota(ResourceQuota resourceQuota) {
return client.resourceQuotas().delete(resourceQuota);
}
// Persistent volume claims
public PersistentVolumeClaim createPersistentVolumeClaim(PersistentVolumeClaim pvc) {
return client.persistentVolumeClaims().create(pvc);
}
public PersistentVolumeClaim getPersistentVolumeClaim(String name) {
return client.persistentVolumeClaims().withName(name).get();
}
public List<PersistentVolumeClaim> getPersistentVolumeClaims() {
return client.persistentVolumeClaims().list().getItems();
}
public boolean deletePersistentVolumeClaim(PersistentVolumeClaim pvc) {
return client.persistentVolumeClaims().delete(pvc);
}
// HorizontalPodAutoscalers
public HorizontalPodAutoscaler createHorizontalPodAutoscaler(HorizontalPodAutoscaler hpa) {
return client.autoscaling().horizontalPodAutoscalers().create(hpa);
}
public HorizontalPodAutoscaler getHorizontalPodAutoscaler(String name) {
return client.autoscaling().horizontalPodAutoscalers().withName(name).get();
}
public List<HorizontalPodAutoscaler> getHorizontalPodAutoscalers() {
return client.autoscaling().horizontalPodAutoscalers().list().getItems();
}
public boolean deleteHorizontalPodAutoscaler(HorizontalPodAutoscaler hpa) {
return client.autoscaling().horizontalPodAutoscalers().delete(hpa);
}
// ConfigMaps
public ConfigMap createConfigMap(ConfigMap configMap) {
return client.configMaps().create(configMap);
}
public ConfigMap getConfigMap(String name) {
return client.configMaps().withName(name).get();
}
public List<ConfigMap> getConfigMaps() {
return client.configMaps().list().getItems();
}
public boolean deleteConfigMap(ConfigMap configMap) {
return client.configMaps().delete(configMap);
}
// Templates
public Template createTemplate(Template template) {
return client.templates().create(template);
}
public Template getTemplate(String name) {
return client.templates().withName(name).get();
}
public List<Template> getTemplates() {
return client.templates().list().getItems();
}
public boolean deleteTemplate(String name) {
return client.templates().withName(name).delete();
}
public boolean deleteTemplate(Template template) {
return client.templates().delete(template);
}
public Template loadAndCreateTemplate(InputStream is) {
Template t = client().templates().load(is).get();
deleteTemplate(t);
return createTemplate(t);
}
public KubernetesList recreateAndProcessTemplate(Template template, Map<String, String> parameters) {
deleteTemplate(template.getMetadata().getName());
createTemplate(template);
return processTemplate(template.getMetadata().getName(), parameters);
}
public KubernetesList recreateAndProcessAndDeployTemplate(Template template, Map<String, String> parameters) {
return createResources(recreateAndProcessTemplate(template, parameters));
}
public KubernetesList processTemplate(String name, Map<String, String> parameters) {
ParameterValue[] values = processParameters(parameters);
return client.templates().withName(name).process(values);
}
public KubernetesList processAndDeployTemplate(String name, Map<String, String> parameters) {
return createResources(processTemplate(name, parameters));
}
private ParameterValue[] processParameters(Map<String, String> parameters) {
return parameters.entrySet().stream().map(entry -> new ParameterValue(entry.getKey(), entry.getValue())).collect(Collectors.toList()).toArray(new ParameterValue[parameters.size()]);
}
// Nodes
public Node getNode(String name) {
return client.nodes().withName(name).get();
}
public List<Node> getNodes() {
return client.nodes().list().getItems();
}
public List<Node> getNodes(Map<String, String> labels) {
return client.nodes().withLabels(labels).list().getItems();
}
// Events
public List<Event> getEvents() {
return client.events().list().getItems();
}
// Clean up function
/**
* Deletes all* resources in namespace. Waits till all are deleted. <br/>
* <br/>
* <p>
* * Only user created secrets, service accounts and role bindings are deleted. Default will remain.
*
* @throws TimeoutException in case that some user resources will remain even after timeout.
* @see #getUserSecrets()
* @see #getUserServiceAccounts()
* @see #getUserRoleBindings()
*/
public void cleanAndWait() throws TimeoutException {
clean();
waiters.isProjectClean().execute();
}
/**
* Deletes all* resources in namespace. Waits till all are deleted. <br/>
* <br/>
* <p>
* * Only user created secrets, service accounts and role bindings are deleted. Default will remain.
*
* @throws AssertionError in case that some user resources will remain even after timeout.
* @see #getUserSecrets()
* @see #getUserServiceAccounts()
* @see #getUserRoleBindings()
*/
public void cleanAndAssert() {
clean();
waiters.isProjectClean().assertEventually();
}
/**
* Deletes all* resources in namespace. Doesn't wait till all are deleted. <br/>
* <br/>
* <p>
* * Only user created secrets, service accounts and role bindings are deleted. Default will remain.
*
* @see #getUserSecrets()
* @see #getUserServiceAccounts()
* @see #getUserRoleBindings()
*/
public void clean() {
// keep the order for deletion to prevent K8s creating resources again
client.templates().delete();
client.extensions().deployments().delete();
client.apps().statefulSets().delete();
client.extensions().jobs().delete();
getDeploymentConfigs().forEach(this::deleteDeploymentConfig);
getReplicationControllers().forEach(this::deleteReplicationController);
client.buildConfigs().delete();
client.imageStreams().delete();
client.endpoints().delete();
client.services().delete();
client.builds().delete();
client.routes().delete();
client.pods().withGracePeriod(0).delete();
client.persistentVolumeClaims().delete();
client.autoscaling().horizontalPodAutoscalers().delete();
client.configMaps().delete();
getUserSecrets().forEach(this::deleteSecret);
getUserServiceAccounts().forEach(this::deleteServiceAccount);
getUserRoleBindings().forEach(this::deleteRoleBinding);
client.roles().delete();
}
@Override
public void close() {
client.close();
}
// Logs storing
public Path storePodLog(Pod pod, Path dirPath, String fileName) throws IOException {
String log = getPodLog(pod);
return storeLog(log, dirPath, fileName);
}
public Path storeBuildLog(Build build, Path dirPath, String fileName) throws IOException {
String log = getBuildLog(build);
return storeLog(log, dirPath, fileName);
}
private Path storeLog(String log, Path dirPath, String fileName) throws IOException {
Path filePath = dirPath.resolve(fileName + ".log");
Files.createDirectories(dirPath);
Files.createFile(filePath);
Files.write(filePath, log.getBytes());
return filePath;
}
// Waiting
public OpenShiftWaiters waiters() {
return waiters;
}
}
| [fix] Use configurable wait timeout for recreating namespace
| utilities/src/main/java/cz/xtf/openshift/OpenShiftUtil.java | [fix] Use configurable wait timeout for recreating namespace |
|
Java | mit | 6ee625b44368ba7c4bbd9d827f93eba5d518201c | 0 | aterai/java-swing-tips,aterai/java-swing-tips,aterai/java-swing-tips,aterai/java-swing-tips | // -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.util.List;
import java.util.Objects;
import java.util.Random;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ExecutionException;
import javax.swing.*;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableColumn;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
public final class MainPanel extends JPanel {
private final String[] columnNames = {"No.", "Name", "Progress", ""};
private final DefaultTableModel model = new DefaultTableModel(null, columnNames);
private final JTable table = new JTable(model) {
@Override public void updateUI() {
super.updateUI();
removeColumn(getColumnModel().getColumn(3));
JProgressBar progress = new JProgressBar();
TableCellRenderer renderer = new DefaultTableCellRenderer();
TableColumn tc = getColumnModel().getColumn(2);
tc.setCellRenderer((tbl, value, isSelected, hasFocus, row, column) -> {
Component c;
progress.setValue(0);
if (value instanceof ProgressValue) {
ProgressValue pv = (ProgressValue) value;
Integer current = pv.getProgress();
Integer lengthOfTask = pv.getLengthOfTask();
if (current < 0) {
c = renderer.getTableCellRendererComponent(tbl, "Canceled", isSelected, hasFocus, row, column);
} else if (current < lengthOfTask) {
// progress.setMaximum(lengthOfTask);
// progress.setEnabled(true);
progress.setValue(current * 100 / lengthOfTask);
progress.setStringPainted(true);
progress.setString(String.format("%d/%d", current, lengthOfTask));
c = progress;
} else {
c = renderer.getTableCellRendererComponent(tbl, "Done", isSelected, hasFocus, row, column);
}
} else {
c = renderer.getTableCellRendererComponent(tbl, "Waiting...", isSelected, hasFocus, row, column);
}
return c;
});
}
};
private final Set<Integer> deletedRowSet = new TreeSet<>();
private int number;
private MainPanel() {
super(new BorderLayout());
table.setRowSorter(new TableRowSorter<>(model));
JScrollPane scrollPane = new JScrollPane(table);
scrollPane.getViewport().setBackground(Color.WHITE);
table.setComponentPopupMenu(new TablePopupMenu());
table.setFillsViewportHeight(true);
table.setIntercellSpacing(new Dimension());
table.setShowGrid(false);
table.putClientProperty("terminateEditOnFocusLost", Boolean.TRUE);
TableColumn column = table.getColumnModel().getColumn(0);
column.setMaxWidth(60);
column.setMinWidth(60);
column.setResizable(false);
JButton button = new JButton("add");
button.addActionListener(e -> addActionPerformed());
add(button, BorderLayout.SOUTH);
add(scrollPane);
setPreferredSize(new Dimension(320, 240));
}
public void addProgressValue(String name, ProgressValue value, SwingWorker<?, ?> worker) {
Object[] obj = {number, name, value, worker};
model.addRow(obj);
number++;
}
public void addActionPerformed() {
int key = model.getRowCount();
int lengthOfTask = new Random().nextInt(100) + 100;
SwingWorker<Integer, ProgressValue> worker = new BackgroundTask(lengthOfTask) {
@Override protected void process(List<ProgressValue> c) {
if (isCancelled()) {
return;
}
if (!isDisplayable()) {
System.out.println("process: DISPOSE_ON_CLOSE");
cancel(true);
// executor.shutdown();
return;
}
c.forEach(v -> model.setValueAt(v, key, 2));
}
@Override protected void done() {
if (!isDisplayable()) {
System.out.println("done: DISPOSE_ON_CLOSE");
cancel(true);
// executor.shutdown();
return;
}
String text;
int i = -1;
if (isCancelled()) {
text = "Cancelled";
} else {
try {
i = get();
text = i >= 0 ? "Done" : "Disposed";
} catch (InterruptedException | ExecutionException ex) {
text = ex.getMessage();
Thread.currentThread().interrupt();
}
}
System.out.format("%s:%s(%dms)%n", key, text, i);
}
};
addProgressValue("example(max: " + lengthOfTask + ")", new ProgressValue(lengthOfTask, 0), worker);
// executor.execute(worker);
worker.execute();
}
private class TablePopupMenu extends JPopupMenu {
private final JMenuItem cancelMenuItem;
private final JMenuItem deleteMenuItem;
protected TablePopupMenu() {
super();
add("add").addActionListener(e -> addActionPerformed());
addSeparator();
cancelMenuItem = add("cancel");
cancelMenuItem.addActionListener(e -> cancelActionPerformed());
deleteMenuItem = add("delete");
deleteMenuItem.addActionListener(e -> deleteActionPerformed());
}
@Override public void show(Component c, int x, int y) {
if (c instanceof JTable) {
boolean flag = ((JTable) c).getSelectedRowCount() > 0;
cancelMenuItem.setEnabled(flag);
deleteMenuItem.setEnabled(flag);
super.show(c, x, y);
}
}
private SwingWorker<?, ?> getSwingWorker(int identifier) {
return (SwingWorker<?, ?>) model.getValueAt(identifier, 3);
}
private void deleteActionPerformed() {
int[] selection = table.getSelectedRows();
if (selection.length == 0) {
return;
}
for (int i: selection) {
int mi = table.convertRowIndexToModel(i);
deletedRowSet.add(mi);
SwingWorker<?, ?> worker = getSwingWorker(mi);
if (Objects.nonNull(worker) && !worker.isDone()) {
worker.cancel(true);
}
// worker = null;
}
RowSorter<? extends TableModel> sorter = table.getRowSorter();
((TableRowSorter<? extends TableModel>) sorter).setRowFilter(new RowFilter<TableModel, Integer>() {
@Override public boolean include(Entry<? extends TableModel, ? extends Integer> entry) {
return !deletedRowSet.contains(entry.getIdentifier());
}
});
table.clearSelection();
table.repaint();
}
private void cancelActionPerformed() {
int[] selection = table.getSelectedRows();
for (int i: selection) {
int mi = table.convertRowIndexToModel(i);
SwingWorker<?, ?> worker = getSwingWorker(mi);
if (Objects.nonNull(worker) && !worker.isDone()) {
worker.cancel(true);
}
// worker = null;
}
table.repaint();
}
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
// frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
class BackgroundTask extends SwingWorker<Integer, ProgressValue> {
private final int lengthOfTask;
private final int sleepDummy = new Random().nextInt(50) + 1;
protected BackgroundTask(int lengthOfTask) {
super();
this.lengthOfTask = lengthOfTask;
}
@SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops")
@Override protected Integer doInBackground() throws InterruptedException {
int current = 0;
while (current <= lengthOfTask && !isCancelled()) {
publish(new ProgressValue(lengthOfTask, current));
Thread.sleep(sleepDummy);
current++;
}
return sleepDummy * lengthOfTask;
}
}
class ProgressValue {
private final Integer progress;
private final Integer lengthOfTask;
protected ProgressValue(Integer lengthOfTask, Integer progress) {
this.progress = progress;
this.lengthOfTask = lengthOfTask;
}
public Integer getProgress() {
return progress;
}
public Integer getLengthOfTask() {
return lengthOfTask;
}
}
| StringPaintedCellProgressBar/src/java/example/MainPanel.java | // -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Random;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import javax.swing.*;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableColumn;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
public class MainPanel extends JPanel {
protected final WorkerModel<ProgressValue> model = new WorkerModel<>();
protected final JTable table = new JTable(model);
protected final transient TableRowSorter<? extends TableModel> sorter = new TableRowSorter<>(model);
protected final Set<Integer> deleteRowSet = new TreeSet<>();
public MainPanel() {
super(new BorderLayout());
table.setRowSorter(sorter);
JScrollPane scrollPane = new JScrollPane(table);
scrollPane.getViewport().setBackground(Color.WHITE);
table.setComponentPopupMenu(new TablePopupMenu());
table.setFillsViewportHeight(true);
table.setIntercellSpacing(new Dimension());
table.setShowGrid(false);
table.putClientProperty("terminateEditOnFocusLost", Boolean.TRUE);
TableColumn column = table.getColumnModel().getColumn(0);
column.setMaxWidth(60);
column.setMinWidth(60);
column.setResizable(false);
column = table.getColumnModel().getColumn(2);
column.setCellRenderer(new ProgressRenderer());
JButton button = new JButton("add");
button.addActionListener(e -> addActionPerformed());
add(button, BorderLayout.SOUTH);
add(scrollPane);
setPreferredSize(new Dimension(320, 240));
}
protected final void addActionPerformed() {
int key = model.getRowCount();
int lengthOfTask = new Random().nextInt(100) + 100;
SwingWorker<Integer, ProgressValue> worker = new BackgroundTask(lengthOfTask) {
@Override protected void process(List<ProgressValue> c) {
if (isCancelled()) {
return;
}
if (!isDisplayable()) {
System.out.println("process: DISPOSE_ON_CLOSE");
cancel(true);
// executor.shutdown();
return;
}
c.forEach(v -> model.setValueAt(v, key, 2));
}
@Override protected void done() {
if (!isDisplayable()) {
System.out.println("done: DISPOSE_ON_CLOSE");
cancel(true);
// executor.shutdown();
return;
}
String text;
int i = -1;
if (isCancelled()) {
text = "Cancelled";
} else {
try {
i = get();
text = i >= 0 ? "Done" : "Disposed";
} catch (InterruptedException | ExecutionException ex) {
text = ex.getMessage();
}
}
System.out.format("%s:%s(%dms)%n", key, text, i);
}
};
model.addProgressValue("example(max: " + lengthOfTask + ")", new ProgressValue(lengthOfTask, 0), worker);
// executor.execute(worker);
worker.execute();
}
protected final void cancelActionPerformed() {
int[] selection = table.getSelectedRows();
for (int i: selection) {
int midx = table.convertRowIndexToModel(i);
SwingWorker<Integer, ProgressValue> worker = model.getSwingWorker(midx);
if (Objects.nonNull(worker) && !worker.isDone()) {
worker.cancel(true);
}
worker = null;
}
table.repaint();
}
protected final void deleteActionPerformed() {
int[] selection = table.getSelectedRows();
if (selection.length == 0) {
return;
}
for (int i: selection) {
int midx = table.convertRowIndexToModel(i);
deleteRowSet.add(midx);
SwingWorker<Integer, ProgressValue> worker = model.getSwingWorker(midx);
if (Objects.nonNull(worker) && !worker.isDone()) {
worker.cancel(true);
}
worker = null;
}
sorter.setRowFilter(new RowFilter<TableModel, Integer>() {
@Override public boolean include(Entry<? extends TableModel, ? extends Integer> entry) {
return !deleteRowSet.contains(entry.getIdentifier());
}
});
table.clearSelection();
table.repaint();
}
private class TablePopupMenu extends JPopupMenu {
private final JMenuItem cancelMenuItem;
private final JMenuItem deleteMenuItem;
protected TablePopupMenu() {
super();
add("add").addActionListener(e -> addActionPerformed());
addSeparator();
cancelMenuItem = add("cancel");
cancelMenuItem.addActionListener(e -> cancelActionPerformed());
deleteMenuItem = add("delete");
deleteMenuItem.addActionListener(e -> deleteActionPerformed());
}
@Override public void show(Component c, int x, int y) {
if (c instanceof JTable) {
boolean flag = ((JTable) c).getSelectedRowCount() > 0;
cancelMenuItem.setEnabled(flag);
deleteMenuItem.setEnabled(flag);
super.show(c, x, y);
}
}
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
// frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
class BackgroundTask extends SwingWorker<Integer, ProgressValue> {
private final int lengthOfTask;
private final int sleepDummy = new Random().nextInt(100) + 1;
protected BackgroundTask(int lengthOfTask) {
super();
this.lengthOfTask = lengthOfTask;
}
@SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops")
@Override protected Integer doInBackground() {
int current = 0;
while (current <= lengthOfTask && !isCancelled()) {
publish(new ProgressValue(lengthOfTask, current));
try {
Thread.sleep(sleepDummy);
} catch (InterruptedException ex) {
break;
}
current++;
}
return sleepDummy * lengthOfTask;
}
}
class WorkerModel<E extends ProgressValue> extends DefaultTableModel {
private static final ColumnContext[] COLUMN_ARRAY = {
new ColumnContext("No.", Integer.class, false),
new ColumnContext("Name", String.class, false),
new ColumnContext("Progress", ProgressValue.class, false)
};
private final Map<Integer, SwingWorker<Integer, E>> swmap = new ConcurrentHashMap<>();
private int number;
public void addProgressValue(String name, E t, SwingWorker<Integer, E> worker) {
Object[] obj = {number, name, t.getProgress()};
super.addRow(obj);
if (Objects.nonNull(worker)) {
swmap.put(number, worker);
}
number++;
}
public SwingWorker<Integer, E> getSwingWorker(int identifier) {
Integer key = (Integer) getValueAt(identifier, 0);
return swmap.get(key);
}
@Override public boolean isCellEditable(int row, int col) {
return COLUMN_ARRAY[col].isEditable;
}
@Override public Class<?> getColumnClass(int column) {
return COLUMN_ARRAY[column].columnClass;
}
@Override public int getColumnCount() {
return COLUMN_ARRAY.length;
}
@Override public String getColumnName(int column) {
return COLUMN_ARRAY[column].columnName;
}
private static class ColumnContext {
public final String columnName;
public final Class<?> columnClass;
public final boolean isEditable;
protected ColumnContext(String columnName, Class<?> columnClass, boolean isEditable) {
this.columnName = columnName;
this.columnClass = columnClass;
this.isEditable = isEditable;
}
}
}
class ProgressValue {
private final Integer progress;
private final Integer lengthOfTask;
protected ProgressValue(Integer lengthOfTask, Integer progress) {
this.progress = progress;
this.lengthOfTask = lengthOfTask;
}
public Integer getProgress() {
return progress;
}
public Integer getLengthOfTask() {
return lengthOfTask;
}
}
class ProgressRenderer extends DefaultTableCellRenderer {
private final JProgressBar progress = new JProgressBar();
private final JPanel renderer = new JPanel(new BorderLayout());
@Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
Component c;
renderer.removeAll();
progress.setValue(0);
if (value instanceof ProgressValue) {
ProgressValue pv = (ProgressValue) value;
Integer current = pv.getProgress();
Integer lengthOfTask = pv.getLengthOfTask();
if (current < 0) {
c = super.getTableCellRendererComponent(table, "Canceled", isSelected, hasFocus, row, column);
} else if (current < lengthOfTask) {
// progress.setMaximum(lengthOfTask);
// progress.setEnabled(true);
progress.setValue(current * 100 / lengthOfTask);
progress.setStringPainted(true);
progress.setString(String.format("%d/%d", current, lengthOfTask));
renderer.add(progress);
c = renderer;
} else {
c = super.getTableCellRendererComponent(table, "Done", isSelected, hasFocus, row, column);
}
} else {
// progress.setEnabled(false);
// progress.setValue(0);
// renderer.add(progress);
// c = renderer;
c = super.getTableCellRendererComponent(table, "Waiting...", isSelected, hasFocus, row, column);
}
return c;
}
@Override public void updateUI() {
super.updateUI();
setOpaque(true);
if (Objects.nonNull(renderer)) {
SwingUtilities.updateComponentTreeUI(renderer);
}
}
}
| SonarLint: make 'swmap' transient or serializable
| StringPaintedCellProgressBar/src/java/example/MainPanel.java | SonarLint: make 'swmap' transient or serializable |
|
Java | mit | f692e45e475ecfdb2862d0239c5073db76a1f332 | 0 | RSV2/lago | package com.thirdchannel.rabbitmq;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.rabbitmq.client.*;
import com.thirdchannel.rabbitmq.config.ExchangeConfig;
import com.thirdchannel.rabbitmq.config.RabbitMQConfig;
import com.thirdchannel.rabbitmq.exceptions.LagoDefaultExceptionHandler;
import com.thirdchannel.rabbitmq.interfaces.EventConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URISyntaxException;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.*;
import java.util.concurrent.TimeoutException;
/**
* Will keep a main channel open for publishing, although one can publish with an additional channel
*
* @author Steve Pember
*/
public class Lago implements com.thirdchannel.rabbitmq.interfaces.Lago {
private Logger log = LoggerFactory.getLogger(this.getClass());
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private Connection connection;
private Channel channel; // create a local channel just for Lago
private ConnectionFactory connectionFactory;
private ExceptionHandler exceptionHandler = new LagoDefaultExceptionHandler();
private final List<EventConsumer> registeredConsumers = new ArrayList<EventConsumer>();
private RabbitMQConfig config;
private PropertiesManager propertiesManager = new PropertiesManager();
public Lago() {
loadConfig();
}
protected void loadConfig() {
try {
config = propertiesManager.load();
} catch (FileNotFoundException e) {
log.error("No config file 'lago.yaml' found on the classpath");
}
}
public RabbitMQConfig getConfig() {
return config;
}
public void registerConsumer(EventConsumer consumer) {
if (consumer.isConfigured()) {
log.info(consumer.getClass().getSimpleName() +" appears to be already configured");
} else {
consumer.setConfig(config.findQueueConfig(consumer));
}
if (consumer.getConfig().getCount() > 0) {
log.debug("About to spin up " + consumer.getConfig().getCount() + " instances of " + consumer.getClass().getSimpleName());
bindConsumer(consumer, 0);
for (int i = 1; i < consumer.getConfig().getCount(); i++) {
bindConsumer(consumer.spawn(), i);
}
log.info("Registered Consumer: " + consumer.getClass().getSimpleName());
} else {
log.warn("Count of less then one provided for Consumer: " + consumer.getClass().getSimpleName());
}
}
private void bindConsumer(EventConsumer consumer, int count) {
consumer.setChannel(createChannel());
try {
log.debug("About to make queue with name: " + consumer.getQueueName());
consumer.getChannel().queueDeclare(
consumer.getQueueName(),
consumer.getConfig().isDurable(),
consumer.getConfig().getCount() > 1,
consumer.getConfig().isAutoDelete(),
null
);
consumer.setLago(this);
for(String key : consumer.getConfig().getKeys()) {
// bind the queue to each key
consumer.getChannel().queueBind(consumer.getQueueName(), consumer.getConfig().getExchangeName(), key);
}
// but ony one bind for the consumer in general
consumer.getChannel().basicConsume(consumer.getQueueName(), consumer.getConfig().isAutoAck(),
consumer.getClass().getSimpleName() + "-" + (count + 1), consumer);
registeredConsumers.add(consumer);
} catch (IOException e) {
log.error("Could not declare queue and bind to consumer: " + e.getMessage(), e);
}
}
public List<EventConsumer> getRegisteredConsumers() {
return registeredConsumers;
}
public Connection connect () {
// if environment variable present, use that
// otherwise, use config. if no config, then throw exception
String connectionUrl = config.getConnectionEnvironmentUrl();
if (!connectionUrl.isEmpty()) {
connect(connectionUrl);
} else if (config.hasConnectionConfig()) {
connect(config.getUsername(), config.getPassword(), config.getVirtualHost(), config.getHost(), config.getPort());
} else {
throw new RuntimeException("Could not located rabbit mq configuration in environment or config");
}
return getConnection();
}
public Connection connect(String url) {
ConnectionFactory factory = new ConnectionFactory();
try {
factory.setUri(url);
return connect(factory);
} catch (URISyntaxException | NoSuchAlgorithmException | KeyManagementException | NullPointerException e) {
log.error("Could not connect to Rabbit over url " + url + ": ", e);
return null;
}
}
public Connection connect(String userName, String password, String virtualHost, String host, int port) {
ConnectionFactory factory = new ConnectionFactory();
factory.setUsername(userName);
factory.setPassword(password);
factory.setVirtualHost(virtualHost);
factory.setHost(host);
factory.setPort(port);
factory.setConnectionTimeout(config.getConnectionTimeout());
return connect(factory);
}
/**
* Connects usinsee ConnectionFacory}, allowing for custom configuration by the service.
* Warning: no configuration will be provided. Make sure that you've set values like automatic recovery
*
* @param factory the factory
* @return a connection
*/
public Connection connect(ConnectionFactory factory) {
try {
defaultFactorySettings(factory, config);
connectionFactory = factory;
connection = factory.newConnection();
log.info("Connected to Rabbit");
channel = createChannel();
log.debug("Declaring exchanges");
for (ExchangeConfig exchangeConfig : config.getExchanges()) {
channel.exchangeDeclare(exchangeConfig.getName(), exchangeConfig.getType(), exchangeConfig.isDurable(), exchangeConfig.isAutoDelete(), null);
}
// todo: declare internal api rpc consumer
} catch(IOException | TimeoutException e) {
log.error(e.getMessage(), e);
}
return connection;
}
/**
* Sets initial defaults for the factory during connection.
*/
private void defaultFactorySettings(ConnectionFactory factory, RabbitMQConfig config) {
// the Java client for Rabbit has inconsistent settings for timing values. e.g. second vs milliseconds
factory.setRequestedHeartbeat(config.getHeartbeatInterval());
factory.setConnectionTimeout(config.getConnectionTimeout());
factory.setAutomaticRecoveryEnabled(config.isAutomaticRecoveryEnabled());
factory.setTopologyRecoveryEnabled(config.isTopologyRecoveryEnabled());
factory.setExceptionHandler(exceptionHandler);
}
public Channel createChannel() {
try {
return connection.createChannel();
} catch (IOException e) {
log.error("Could not create channel: ", e);
return null;
}
}
public Channel getChannel() {
return channel;
}
public void setExceptionHandler(ExceptionHandler handler) {
exceptionHandler = handler;
}
public void close() {
try {
channel.close();
connection.close();
} catch (IOException | TimeoutException | NullPointerException e) {
log.error("Could not close connection: ", e);
}
}
@Override
public Connection getConnection() {
return connection;
}
public void publish(String exchangeName, String key, Object message, AMQP.BasicProperties properties) {
publish(exchangeName, key, message, properties, this.channel);
}
/**
*
* @param message Object containing the information you want to transmit. Could be as simple as a single value, a Map, an Object, etc. This object will be serialized using Jackson, so Jackson Annotations will be respected
* @param key String The routing key for outgoing message
* @param properties BasicProperties Standard RabbitMQ Basic Properties
* @param channel Channel The Channel to transmit on
* @param exchangeName String The name of the exchange to transmit on
*/
public void publish(String exchangeName, String key, Object message, AMQP.BasicProperties properties, Channel channel) {
try {
log.debug("Publishing to exchange '{}' with key '{}'", exchangeName, key);
channel.basicPublish(exchangeName, key, properties, OBJECT_MAPPER.writeValueAsString(message).getBytes());
} catch(IOException ioe) {
log.error("Failed to publish message: ", ioe);
}
}
/**
*
* @param exchangeName The name of the exchange to publish on
* @param key String The routing key to publish on
* @param message Object representing the outgoing data. Will typically encapsulate some sort of query information
* @param clazz Clazz The class of the expected return data
* @param channel Channel Channel to broadcast on
* @return Object Will be an instance of clazz
* @throws IOException If unable to connect or bind the queuetion
*/
public Object rpc(String exchangeName, String key, Object message, Class clazz, Channel channel) throws IOException {
return rpc(exchangeName, key, message, clazz, channel, UUID.randomUUID().toString());
}
/**
*
* @param exchangeName The name of the exchange to publish on
* @param key String The routing key to publish on
* @param message Object representing the outgoing data. Will typically encapsulate some sort of query information
* @param clazz Clazz The class of the expected return data
* @param channel Channel Channel to broadcas
* @param traceId A unique identifier for tracing communicationst on
* @return Object Will be an instance of clazz
* @throws IOException If unable to connect or bind the queuetion
*/
public Object rpc(String exchangeName, String key, Object message, Class clazz, Channel channel, String traceId) throws IOException {
// to do an RPC (synchronous, in this case) in RabbitMQ, we must do the following:
// 1. create a unique response queue for the rpc call
// 2. create a new channel for the queue //todo: eventually make this optional
// 3. define a response correlation id. create a basic properties object with the response id
// 4. publish
// 5. wait for the response on the unique queue. if timeout, prepare empty response
// 6. destroy unique queue
// 7. return response
// Also, allow configuration for logging response times, or timeouts on rpc calls
//
//
// Ok, furthermore, the RabbitMq java library has implementations of RPC and AsyncRPC on the channel class.
// Assuming they do what I think they do, they would be amazing to use. However:
// * I cannot find any documentation on how to use them, all searches for things like 'rabbitmq java client channel rpc' result in
// documentation about how to programatically do an rpc call (e.g. what we do here).
// * The official java rabbitmq documentation also says to do what we do here.
RpcStopWatch stopWatch = null;
if (config.isLogRpcTime()) {stopWatch = new RpcStopWatch().start();}
ObjectReader objectReader = OBJECT_MAPPER.readerFor(clazz);
String replyQueueName = channel.queueDeclare("", false, false, true, null).getQueue();
log.info("Listening for rpc response on " + replyQueueName);
QueueingConsumer consumer = new QueueingConsumer(channel);
channel.queueBind(replyQueueName, exchangeName, replyQueueName);
channel.basicConsume(replyQueueName, true, consumer);
RabbitMQDeliveryDetails rpcDetails = buildRpcRabbitMQDeliveryDetails(exchangeName, key, replyQueueName, traceId);
// then publish the query
publish(exchangeName, key, message, rpcDetails.getBasicProperties(), channel);
log.debug("Waiting for rpc response delivery on " + key);
QueueingConsumer.Delivery delivery = null;
try {
delivery = consumer.nextDelivery(config.getRpcTimeout());
} catch (InterruptedException e) {
log.error("Thread interrupted while waiting for rpc response:", e);
delivery = null;
}
if (delivery != null) {
log.trace("RPC response received.");
if (delivery.getProperties().getCorrelationId().equals(rpcDetails.getBasicProperties().getCorrelationId())) {
log.trace("Correlation ids are equal.");
channel.basicCancel(consumer.getConsumerTag());
//
} else {
log.warn("Correlation ids not equal! key: " + key);
return null;
}
} else {
log.warn("Timeout occurred on RPC message to key: " + key);
return null;
}
// // we must clean up!
channel.queueUnbind(replyQueueName, exchangeName, replyQueueName);
channel.queueDelete(replyQueueName);
if (config.isLogRpcTime() && stopWatch != null) {
stopWatch.stopAndPublish(rpcDetails);
}
log.debug("Received: {}", new String(delivery.getBody()));
return objectReader.readValue(delivery.getBody());
}
private RabbitMQDeliveryDetails buildRpcRabbitMQDeliveryDetails(String exchangeName, String key, String replyQueueName, String traceId ) {
Map<String, Object> headers = new HashMap<>();
headers.put(RpcStopWatch.TRACE_ID, traceId);
AMQP.BasicProperties props = new AMQP.BasicProperties.Builder()
.correlationId(UUID.randomUUID().toString())
.replyTo(replyQueueName)
.headers(headers)
.build();
return new RabbitMQDeliveryDetails(new Envelope(0, true, exchangeName, key), props, "temp-rpc");
}
}
| src/main/java/com/thirdchannel/rabbitmq/Lago.java | package com.thirdchannel.rabbitmq;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.rabbitmq.client.*;
import com.thirdchannel.rabbitmq.config.ExchangeConfig;
import com.thirdchannel.rabbitmq.config.RabbitMQConfig;
import com.thirdchannel.rabbitmq.exceptions.LagoDefaultExceptionHandler;
import com.thirdchannel.rabbitmq.interfaces.EventConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URISyntaxException;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.*;
import java.util.concurrent.TimeoutException;
/**
* Will keep a main channel open for publishing, although one can publish with an additional channel
*
* @author Steve Pember
*/
public class Lago implements com.thirdchannel.rabbitmq.interfaces.Lago {
private Logger log = LoggerFactory.getLogger(this.getClass());
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private Connection connection;
private Channel channel; // create a local channel just for Lago
private ConnectionFactory connectionFactory;
private ExceptionHandler exceptionHandler = new LagoDefaultExceptionHandler();
private final List<EventConsumer> registeredConsumers = new ArrayList<EventConsumer>();
private RabbitMQConfig config;
private PropertiesManager propertiesManager = new PropertiesManager();
public Lago() {
loadConfig();
}
protected void loadConfig() {
try {
config = propertiesManager.load();
} catch (FileNotFoundException e) {
log.error("No config file 'lago.yaml' found on the classpath");
}
}
public RabbitMQConfig getConfig() {
return config;
}
public void registerConsumer(EventConsumer consumer) {
if (consumer.isConfigured()) {
log.info(consumer.getClass().getSimpleName() +" appears to be already configured");
} else {
consumer.setConfig(config.findQueueConfig(consumer));
}
if (consumer.getConfig().getCount() > 0) {
log.debug("About to spin up " + consumer.getConfig().getCount() + " instances of " + consumer.getClass().getSimpleName());
bindConsumer(consumer, 0);
for (int i = 1; i < consumer.getConfig().getCount(); i++) {
bindConsumer(consumer.spawn(), i);
}
log.info("Registered Consumer: " + consumer.getClass().getSimpleName());
} else {
log.warn("Count of less then one provided for Consumer: " + consumer.getClass().getSimpleName());
}
}
private void bindConsumer(EventConsumer consumer, int count) {
consumer.setChannel(createChannel());
try {
log.debug("About to make queue with name: " + consumer.getQueueName());
consumer.getChannel().queueDeclare(
consumer.getQueueName(),
consumer.getConfig().isDurable(),
consumer.getConfig().getCount() > 1,
consumer.getConfig().isAutoDelete(),
null
);
consumer.setLago(this);
for(String key : consumer.getConfig().getKeys()) {
// bind the queue to each key
consumer.getChannel().queueBind(consumer.getQueueName(), consumer.getConfig().getExchangeName(), key);
}
// but ony one bind for the consumer in general
consumer.getChannel().basicConsume(consumer.getQueueName(), consumer.getConfig().isAutoAck(),
consumer.getClass().getSimpleName() + "-" + (count + 1), consumer);
registeredConsumers.add(consumer);
} catch (IOException e) {
log.error("Could not declare queue and bind to consumer: " + e.getMessage(), e);
}
}
public List<EventConsumer> getRegisteredConsumers() {
return registeredConsumers;
}
public Connection connect () {
// if environment variable present, use that
// otherwise, use config. if no config, then throw exception
String connectionUrl = config.getConnectionEnvironmentUrl();
if (!connectionUrl.isEmpty()) {
connect(connectionUrl);
} else if (config.hasConnectionConfig()) {
connect(config.getUsername(), config.getPassword(), config.getVirtualHost(), config.getHost(), config.getPort());
} else {
throw new RuntimeException("Could not located rabbit mq configuration in environment or config");
}
return getConnection();
}
public Connection connect(String url) {
ConnectionFactory factory = new ConnectionFactory();
try {
factory.setUri(url);
return connect(factory);
} catch (URISyntaxException | NoSuchAlgorithmException | KeyManagementException | NullPointerException e) {
log.error("Could not connect to Rabbit over url " + url + ": ", e);
return null;
}
}
public Connection connect(String userName, String password, String virtualHost, String host, int port) {
ConnectionFactory factory = new ConnectionFactory();
factory.setUsername(userName);
factory.setPassword(password);
factory.setVirtualHost(virtualHost);
factory.setHost(host);
factory.setPort(port);
factory.setConnectionTimeout(config.getConnectionTimeout());
return connect(factory);
}
/**
* Connects usinsee ConnectionFacory}, allowing for custom configuration by the service.
* Warning: no configuration will be provided. Make sure that you've set values like automatic recovery
*
* @param factory the factory
* @return a connection
*/
public Connection connect(ConnectionFactory factory) {
try {
defaultFactorySettings(factory, config);
connectionFactory = factory;
connection = factory.newConnection();
log.info("Connected to Rabbit");
channel = createChannel();
log.debug("Declaring exchanges");
for (ExchangeConfig exchangeConfig : config.getExchanges()) {
channel.exchangeDeclare(exchangeConfig.getName(), exchangeConfig.getType(), exchangeConfig.isDurable(), exchangeConfig.isAutoDelete(), null);
}
// todo: declare internal api rpc consumer
} catch(IOException | TimeoutException e) {
log.error(e.getMessage(), e);
}
return connection;
}
/**
* Sets initial defaults for the factory during connection.
*/
private void defaultFactorySettings(ConnectionFactory factory, RabbitMQConfig config) {
// the Java client for Rabbit has inconsistent settings for timing values. e.g. second vs milliseconds
factory.setRequestedHeartbeat(config.getHeartbeatInterval());
factory.setConnectionTimeout(config.getConnectionTimeout());
factory.setAutomaticRecoveryEnabled(config.isAutomaticRecoveryEnabled());
factory.setTopologyRecoveryEnabled(config.isTopologyRecoveryEnabled());
factory.setExceptionHandler(exceptionHandler);
}
public Channel createChannel() {
try {
return connection.createChannel();
} catch (IOException e) {
log.error("Could not create channel: ", e);
return null;
}
}
public Channel getChannel() {
return channel;
}
public void setExceptionHandler(ExceptionHandler handler) {
exceptionHandler = handler;
}
public void close() {
try {
channel.close();
connection.close();
} catch (IOException | TimeoutException | NullPointerException e) {
log.error("Could not close connection: ", e);
}
}
@Override
public Connection getConnection() {
return connection;
}
public void publish(String exchangeName, String key, Object message, AMQP.BasicProperties properties) {
publish(exchangeName, key, message, properties, this.channel);
}
/**
*
* @param message Object containing the information you want to transmit. Could be as simple as a single value, a Map, an Object, etc. This object will be serialized using Jackson, so Jackson Annotations will be respected
* @param key String The routing key for outgoing message
* @param properties BasicProperties Standard RabbitMQ Basic Properties
* @param channel Channel The Channel to transmit on
* @param exchangeName String The name of the exchange to transmit on
*/
public void publish(String exchangeName, String key, Object message, AMQP.BasicProperties properties, Channel channel) {
try {
log.debug("Publishing to exchange '{}' with key '{}'", exchangeName, key);
channel.basicPublish(exchangeName, key, properties, OBJECT_MAPPER.writeValueAsString(message).getBytes());
} catch(IOException ioe) {
log.error("Failed to publish message: ", ioe);
}
}
/**
*
* @param exchangeName The name of the exchange to publish on
* @param key String The routing key to publish on
* @param message Object representing the outgoing data. Will typically encapsulate some sort of query information
* @param clazz Clazz The class of the expected return data
* @param channel Channel Channel to broadcast on
* @return Object Will be an instance of clazz
* @throws IOException If unable to connect or bind the queuetion
*/
public Object rpc(String exchangeName, String key, Object message, Class clazz, Channel channel) throws IOException {
return rpc(exchangeName, key, message, clazz, channel, UUID.randomUUID().toString());
}
/**
*
* @param exchangeName The name of the exchange to publish on
* @param key String The routing key to publish on
* @param message Object representing the outgoing data. Will typically encapsulate some sort of query information
* @param clazz Clazz The class of the expected return data
* @param channel Channel Channel to broadcas
* @param traceId A unique identifier for tracing communicationst on
* @return Object Will be an instance of clazz
* @throws IOException If unable to connect or bind the queuetion
*/
public Object rpc(String exchangeName, String key, Object message, Class clazz, Channel channel, String traceId) throws IOException {
// to do an RPC (synchronous, in this case) in RabbitMQ, we must do the following:
// 1. create a unique response queue for the rpc call
// 2. create a new channel for the queue //todo: eventually make this optional
// 3. define a response correlation id. create a basic properties object with the response id
// 4. publish
// 5. wait for the response on the unique queue. if timeout, prepare empty response
// 6. destroy unique queue
// 7. return response
// Also, allow configuration for logging response times, or timeouts on rpc calls
//
//
// Ok, furthermore, the RabbitMq java library has implementations of RPC and AsyncRPC on the channel class.
// Assuming they do what I think they do, they would be amazing to use. However:
// * I cannot find any documentation on how to use them, all searches for things like 'rabbitmq java client channel rpc' result in
// documentation about how to programatically do an rpc call (e.g. what we do here).
// * The official java rabbitmq documentation also says to do what we do here.
RpcStopWatch stopWatch = null;
if (config.isLogRpcTime()) {stopWatch = new RpcStopWatch().start();}
ObjectReader objectReader = OBJECT_MAPPER.readerFor(clazz);
String replyQueueName = channel.queueDeclare().getQueue();
log.info("Listening for rpc response on " + replyQueueName);
QueueingConsumer consumer = new QueueingConsumer(channel);
channel.queueBind(replyQueueName, exchangeName, replyQueueName);
channel.basicConsume(replyQueueName, true, consumer);
RabbitMQDeliveryDetails rpcDetails = buildRpcRabbitMQDeliveryDetails(exchangeName, key, replyQueueName, traceId);
// then publish the query
publish(exchangeName, key, message, rpcDetails.getBasicProperties(), channel);
log.debug("Waiting for rpc response delivery on " + key);
QueueingConsumer.Delivery delivery = null;
try {
delivery = consumer.nextDelivery(config.getRpcTimeout());
} catch (InterruptedException e) {
log.error("Thread interrupted while waiting for rpc response:", e);
delivery = null;
}
if (delivery != null) {
log.trace("RPC response received.");
if (delivery.getProperties().getCorrelationId().equals(rpcDetails.getBasicProperties().getCorrelationId())) {
log.trace("Correlation ids are equal.");
channel.basicCancel(consumer.getConsumerTag());
//
} else {
log.warn("Correlation ids not equal! key: " + key);
return null;
}
} else {
log.warn("Timeout occurred on RPC message to key: " + key);
return null;
}
// // we must clean up!
channel.queueUnbind(replyQueueName, exchangeName, replyQueueName);
channel.queueDelete(replyQueueName);
if (config.isLogRpcTime() && stopWatch != null) {
stopWatch.stopAndPublish(rpcDetails);
}
log.debug("Received: {}", new String(delivery.getBody()));
return objectReader.readValue(delivery.getBody());
}
private RabbitMQDeliveryDetails buildRpcRabbitMQDeliveryDetails(String exchangeName, String key, String replyQueueName, String traceId ) {
Map<String, Object> headers = new HashMap<>();
headers.put(RpcStopWatch.TRACE_ID, traceId);
AMQP.BasicProperties props = new AMQP.BasicProperties.Builder()
.correlationId(UUID.randomUUID().toString())
.replyTo(replyQueueName)
.headers(headers)
.build();
return new RabbitMQDeliveryDetails(new Envelope(0, true, exchangeName, key), props, "temp-rpc");
}
}
| TC-3290 - non-exclusive queues for rpc
| src/main/java/com/thirdchannel/rabbitmq/Lago.java | TC-3290 - non-exclusive queues for rpc |
|
Java | agpl-3.0 | 18f2081c4431054c28fd933ce537c8bb7c3850d3 | 0 | CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine | /*
Derby - Class com.splicemachine.db.impl.drda.DRDAConnThread
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.splicemachine.db.impl.drda;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.OutputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.DataTruncation;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Properties;
import java.util.TimeZone;
import java.util.Vector;
import com.splicemachine.db.catalog.SystemProcedures;
import com.splicemachine.db.iapi.error.StandardException;
import com.splicemachine.db.iapi.error.ExceptionSeverity;
import com.splicemachine.db.iapi.jdbc.*;
import com.splicemachine.db.iapi.reference.Attribute;
import com.splicemachine.db.iapi.reference.DRDAConstants;
import com.splicemachine.db.iapi.reference.JDBC30Translation;
import com.splicemachine.db.iapi.reference.Property;
import com.splicemachine.db.iapi.reference.SQLState;
import com.splicemachine.db.iapi.services.info.JVMInfo;
import com.splicemachine.db.iapi.services.monitor.Monitor;
import com.splicemachine.db.iapi.services.sanity.SanityManager;
import com.splicemachine.db.iapi.services.stream.HeaderPrintWriter;
import com.splicemachine.db.iapi.sql.conn.LanguageConnectionContext;
import com.splicemachine.db.iapi.types.SQLRowId;
import com.splicemachine.db.iapi.tools.i18n.LocalizedResource;
import com.splicemachine.db.impl.jdbc.EmbedConnection;
import com.splicemachine.db.impl.jdbc.EmbedSQLException;
import com.splicemachine.db.impl.jdbc.Util;
import com.splicemachine.db.jdbc.InternalDriver;
/**
* This class translates DRDA protocol from an application requester to JDBC
* for Derby and then translates the results from Derby to DRDA
* for return to the application requester.
*/
class DRDAConnThread extends Thread {
private static final String leftBrace = "{";
private static final String rightBrace = "}";
private static final byte NULL_VALUE = (byte)0xff;
private static final String SYNTAX_ERR = "42X01";
// Manager Level 3 constant.
private static final int MGRLVL_3 = 0x03;
// Manager Level 4 constant.
private static final int MGRLVL_4 = 0x04;
// Manager Level 5 constant.
private static final int MGRLVL_5 = 0x05;
// Manager level 6 constant.
private static final int MGRLVL_6 = 0x06;
// Manager Level 7 constant.
private static final int MGRLVL_7 = 0x07;
// Commit or rollback UOWDSP values
private static final int COMMIT = 1;
private static final int ROLLBACK = 2;
private int correlationID;
private InputStream sockis;
private OutputStream sockos;
private DDMReader reader;
private DDMWriter writer;
private DRDAXAProtocol xaProto;
private static int [] ACCRDB_REQUIRED = {CodePoint.RDBACCCL,
CodePoint.CRRTKN,
CodePoint.PRDID,
CodePoint.TYPDEFNAM,
CodePoint.TYPDEFOVR};
private static int MAX_REQUIRED_LEN = 5;
private int currentRequiredLength = 0;
private int [] required = new int[MAX_REQUIRED_LEN];
private NetworkServerControlImpl server; // server who created me
private Session session; // information about the session
private long timeSlice; // time slice for this thread
private Object timeSliceSync = new Object(); // sync object for updating time slice
private boolean logConnections; // log connections to databases
private boolean sendWarningsOnCNTQRY = false; // Send Warnings for SELECT if true
private Object logConnectionsSync = new Object(); // sync object for log connect
private boolean close; // end this thread
private Object closeSync = new Object(); // sync object for parent to close us down
private static HeaderPrintWriter logStream;
private AppRequester appRequester; // pointer to the application requester
// for the session being serviced
private Database database; // pointer to the current database
private int sqlamLevel; // SQLAM Level - determines protocol
// DRDA diagnostic level, DIAGLVL0 by default
private byte diagnosticLevel = (byte)0xF0;
// manager processing
private Vector unknownManagers;
private Vector knownManagers;
private Vector errorManagers;
private Vector errorManagersLevel;
// database accessed failed
private SQLException databaseAccessException;
// these fields are needed to feed back to jcc about a statement/procedure's PKGNAMCSN
/** The value returned by the previous call to
* <code>parsePKGNAMCSN()</code>. */
private Pkgnamcsn prevPkgnamcsn = null;
/** Current RDB Package Name. */
private DRDAString rdbnam = null;
/** Current RDB Collection Identifier. */
private DRDAString rdbcolid = null;
/** Current RDB Package Identifier. */
private DRDAString pkgid = null;
/** Current RDB Package Consistency Token. */
private DRDAString pkgcnstkn = null;
/** Current RDB Package Section Number. */
private int pkgsn;
private final static String TIMEOUT_STATEMENT = "SET STATEMENT_TIMEOUT ";
private int pendingStatementTimeout; // < 0 means no pending timeout to set
// this flag is for an execute statement/procedure which actually returns a result set;
// do not commit the statement, otherwise result set is closed
// for decryption
private static DecryptionManager decryptionManager;
// public key generated by Deffie-Hellman algorithm, to be passed to the encrypter,
// as well as used to initialize the cipher
private byte[] myPublicKey;
// generated target seed to be used to generate the password substitute
// as part of SECMEC_USRSSBPWD security mechanism
private byte[] myTargetSeed;
// Some byte[] constants that are frequently written into messages. It is more efficient to
// use these constants than to convert from a String each time
// (This replaces the qryscraft_ and notQryscraft_ static exception objects.)
private static final byte[] eod00000 = { '0', '0', '0', '0', '0' };
private static final byte[] eod02000 = { '0', '2', '0', '0', '0' };
private static final byte[] nullSQLState = { ' ', ' ', ' ', ' ', ' ' };
private static final byte[] errD4_D6 = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; // 12x0
private static final byte[] warn0_warnA = { ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ' }; // 11x ' '
private final static String AUTHENTICATION_PROVIDER_BUILTIN_CLASS =
"com.splicemachine.db.impl.jdbc.authentication.BasicAuthenticationServiceImpl";
private final static String AUTHENTICATION_PROVIDER_NONE_CLASS =
"com.splicemachine.db.impl.jdbc.authentication.NoneAuthenticationServiceImpl";
// Work around a classloader bug involving interrupt handling during
// class loading. If the first request to load the
// DRDAProtocolExceptionInfo class occurs during shutdown, the
// loading of the class may be aborted when the Network Server calls
// Thread.interrupt() on the DRDAConnThread. By including a static
// reference to the DRDAProtocolExceptionInfo class here, we ensure
// that it is loaded as soon as the DRDAConnThread class is loaded,
// and therefore we know we won't be trying to load the class during
// shutdown. See DERBY-1338 for more background, including pointers
// to the apparent classloader bug in the JVM.
private static final DRDAProtocolExceptionInfo dummy =
new DRDAProtocolExceptionInfo(0,0,0,false);
/**
* Tells if the reset / connect request is a deferred request.
* This information is used to work around a bug (DERBY-3596) in a
* compatible manner, which also avoids any changes in the client driver.
* <p>
* The bug manifests itself when a connection pool data source is used and
* logical connections are obtained from the physical connection associated
* with the data source. Each new logical connection causes a new physical
* connection on the server, including a new transaction. These connections
* and transactions are not closed / cleaned up.
*/
private boolean deferredReset = false;
// constructor
/**
* Create a new Thread for processing session requests
*
* @param session Session requesting processing
* @param server Server starting thread
* @param timeSlice timeSlice for thread
* @param logConnections
**/
DRDAConnThread(Session session, NetworkServerControlImpl server,
long timeSlice,
boolean logConnections) {
super();
// Create a more meaningful name for this thread (but preserve its
// thread id from the default name).
NetworkServerControlImpl.setUniqueThreadName(this, "DRDAConnThread");
this.session = session;
this.server = server;
this.timeSlice = timeSlice;
this.logConnections = logConnections;
this.pendingStatementTimeout = -1;
initialize();
}
/**
* Main routine for thread, loops until the thread is closed
* Gets a session, does work for the session
*/
public void run() {
if (SanityManager.DEBUG)
trace("Starting new connection thread");
Session prevSession;
while(!closed())
{
// get a new session
prevSession = session;
session = server.getNextSession(session);
if (session == null)
close();
if (closed())
break;
if (session != prevSession)
{
initializeForSession();
}
try {
long timeStart = System.currentTimeMillis();
switch (session.state)
{
case Session.INIT:
sessionInitialState();
if (session == null)
break;
// else fallthrough
case Session.ATTEXC:
case Session.SECACC:
case Session.CHKSEC:
long currentTimeSlice;
do {
try {
processCommands();
} catch (DRDASocketTimeoutException ste) {
// Just ignore the exception. This was
// a timeout on the read call in
// DDMReader.fill(), which will happen
// only when timeSlice is set.
}
currentTimeSlice = getTimeSlice();
} while ((currentTimeSlice <= 0) ||
(System.currentTimeMillis() - timeStart < currentTimeSlice));
break;
default:
// this is an error
agentError("Session in invalid state:" + session.state);
}
} catch (Exception e) {
if (e instanceof DRDAProtocolException &&
((DRDAProtocolException)e).isDisconnectException())
{
// client went away - this is O.K. here
closeSession();
}
else
{
handleException(e);
}
} catch (Error error) {
// Do as little as possible, but try to cut loose the client
// to avoid that it hangs in a socket read-call.
// TODO: Could make use of Throwable.addSuppressed here when
// compiled as Java 7 (or newer).
try {
closeSession();
} catch (Throwable t) {
// One last attempt...
try {
session.clientSocket.close();
} catch (IOException ioe) {
// Ignore, we're in deeper trouble already.
}
} finally {
// Rethrow the original error, ignore errors that happened
// when trying to close the socket to the client.
throw error;
}
}
}
if (SanityManager.DEBUG)
trace("Ending connection thread");
server.removeThread(this);
}
/**
* Get input stream
*
* @return input stream
*/
protected InputStream getInputStream()
{
return sockis;
}
/**
* Get output stream
*
* @return output stream
*/
protected OutputStream getOutputStream()
{
return sockos;
}
/**
* get DDMReader
* @return DDMReader for this thread
*/
protected DDMReader getReader()
{
return reader;
}
/**
* get DDMWriter
* @return DDMWriter for this thread
*/
protected DDMWriter getWriter()
{
return writer;
}
/**
* Get correlation id
*
* @return correlation id
*/
protected int getCorrelationID ()
{
return correlationID;
}
/**
* Get session we are working on
*
* @return session
*/
protected Session getSession()
{
return session;
}
/**
* Get Database we are working on
*
* @return database
*/
protected Database getDatabase()
{
return database;
}
/**
* Get server
*
* @return server
*/
protected NetworkServerControlImpl getServer()
{
return server;
}
/**
* Get correlation token
*
* @return crrtkn
*/
protected byte[] getCrrtkn()
{
if (database != null)
return database.crrtkn;
return null;
}
/**
* Get database name
*
* @return database name
*/
protected String getDbName()
{
if (database != null)
return database.getDatabaseName();
return null;
}
/**
* Close DRDA connection thread
*/
protected void close()
{
synchronized (closeSync)
{
close = true;
}
}
/**
* Set logging of connections
*
* @param value value to set for logging connections
*/
protected void setLogConnections(boolean value)
{
synchronized(logConnectionsSync) {
logConnections = value;
}
}
/**
* Set time slice value
*
* @param value new value for time slice
*/
protected void setTimeSlice(long value)
{
synchronized(timeSliceSync) {
timeSlice = value;
}
}
/**
* Indicate a communications failure
*
* @param arg1 - info about the communications failure
* @param arg2 - info about the communications failure
* @param arg3 - info about the communications failure
* @param arg4 - info about the communications failure
*
* @exception DRDAProtocolException disconnect exception always thrown
*/
protected void markCommunicationsFailure(String arg1, String arg2, String arg3,
String arg4) throws DRDAProtocolException
{
markCommunicationsFailure(null,arg1,arg2,arg3, arg4);
}
/**
* Indicate a communications failure. Log to db.log
*
* @param e - Source exception that was thrown
* @param arg1 - info about the communications failure
* @param arg2 - info about the communications failure
* @param arg3 - info about the communications failure
* @param arg4 - info about the communications failure
*
* @exception DRDAProtocolException disconnect exception always thrown
*/
protected void markCommunicationsFailure(Exception e, String arg1, String arg2, String arg3,
String arg4) throws DRDAProtocolException
{
String dbname = null;
if (database != null)
{
dbname = database.getDatabaseName();
}
if (e != null) {
println2Log(dbname,session.drdaID, e.getMessage());
server.consoleExceptionPrintTrace(e);
}
Object[] oa = {arg1,arg2,arg3,arg4};
throw DRDAProtocolException.newDisconnectException(this,oa);
}
/**
* Syntax error
*
* @param errcd Error code
* @param cpArg code point value
* @exception DRDAProtocolException
*/
protected void throwSyntaxrm(int errcd, int cpArg)
throws DRDAProtocolException
{
throw new
DRDAProtocolException(DRDAProtocolException.DRDA_Proto_SYNTAXRM,
this,
cpArg,
errcd);
}
/**
* Agent error - something very bad happened
*
* @param msg Message describing error
*
* @exception DRDAProtocolException newAgentError always thrown
*/
protected void agentError(String msg) throws DRDAProtocolException
{
String dbname = null;
if (database != null)
dbname = database.getDatabaseName();
throw DRDAProtocolException.newAgentError(this, CodePoint.SVRCOD_PRMDMG,
dbname, msg);
}
/**
* Missing code point
*
* @param codePoint code point value
* @exception DRDAProtocolException
*/
protected void missingCodePoint(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_REQ_OBJ_NOT_FOUND, codePoint);
}
/**
* Print a line to the DB2j log
*
* @param dbname database name
* @param drdaID DRDA identifier
* @param msg message
*/
protected static void println2Log(String dbname, String drdaID, String msg)
{
if (logStream == null)
logStream = Monitor.getStream();
if (dbname != null)
{
int endOfName = dbname.indexOf(';');
if (endOfName != -1)
dbname = dbname.substring(0, endOfName);
}
logStream.printlnWithHeader("(DATABASE = " + dbname + "), (DRDAID = " + drdaID + "), " + msg);
}
/**
* Write RDBNAM
*
* @param rdbnam database name
* @exception DRDAProtocolException
*/
protected void writeRDBNAM(String rdbnam)
throws DRDAProtocolException
{
CcsidManager currentManager = writer.getCurrentCcsidManager();
int len = currentManager.getByteLength(rdbnam);
if (len < CodePoint.RDBNAM_LEN)
len = CodePoint.RDBNAM_LEN;
/* Write the string padded */
writer.writeScalarPaddedString(CodePoint.RDBNAM, rdbnam, len);
}
/***************************************************************************
* Private methods
***************************************************************************/
/**
* Initialize class
*/
private void initialize()
{
// set input and output sockets
// this needs to be done before creating reader
sockis = session.sessionInput;
sockos = session.sessionOutput;
reader = new DDMReader(this, session.dssTrace);
writer = new DDMWriter(this, session.dssTrace);
/* At this stage we can initialize the strings as we have
* the CcsidManager for the DDMWriter. */
rdbnam = new DRDAString(writer);
rdbcolid = new DRDAString(writer);
pkgid = new DRDAString(writer);
pkgcnstkn = new DRDAString(writer);
}
/**
* Initialize for a new session
*/
private void initializeForSession()
{
// set input and output sockets
sockis = session.sessionInput;
sockos = session.sessionOutput;
// intialize reader and writer
reader.initialize(this, session.dssTrace);
writer.reset(session.dssTrace);
// initialize local pointers to session info
database = session.database;
appRequester = session.appRequester;
// set sqlamLevel
if (session.state == Session.ATTEXC)
sqlamLevel = appRequester.getManagerLevel(CodePoint.SQLAM);
/* All sessions MUST start as EBCDIC */
reader.setEbcdicCcsid();
writer.setEbcdicCcsid();
}
/**
* In initial state for a session,
* determine whether this is a command
* session or a DRDA protocol session. A command session is for changing
* the configuration of the Net server, e.g., turning tracing on
* If it is a command session, process the command and close the session.
* If it is a DRDA session, exchange server attributes and change session
* state.
*/
private void sessionInitialState()
throws Exception
{
// process NetworkServerControl commands - if it is not either valid protocol let the
// DRDA error handling handle it
if (reader.isCmd())
{
try {
server.processCommands(reader, writer, session);
// reset reader and writer
reader.initialize(this, null);
writer.reset(null);
closeSession();
} catch (Throwable t) {
if (t instanceof InterruptedException)
throw (InterruptedException)t;
else
{
server.consoleExceptionPrintTrace(t);
}
}
}
else
{
// exchange attributes with application requester
exchangeServerAttributes();
}
}
/**
* Cleans up and closes a result set if an exception is thrown
* when collecting QRYDTA in response to OPNQRY or CNTQRY.
*
* @param stmt the DRDA statement to clean up
* @param sqle the exception that was thrown
* @param writerMark start index for the first DSS to clear from
* the output buffer
* @exception DRDAProtocolException if a DRDA protocol error is
* detected
*/
private void cleanUpAndCloseResultSet(DRDAStatement stmt,
SQLException sqle,
int writerMark)
throws DRDAProtocolException
{
if (stmt != null) {
writer.clearDSSesBackToMark(writerMark);
if (!stmt.rsIsClosed()) {
try {
stmt.rsClose();
} catch (SQLException ec) {
if (SanityManager.DEBUG) {
trace("Warning: Error closing result set");
}
}
writeABNUOWRM();
writeSQLCARD(sqle, CodePoint.SVRCOD_ERROR, 0, 0);
}
} else {
writeSQLCARDs(sqle, 0);
}
errorInChain(sqle);
}
/**
* Process DRDA commands we can receive once server attributes have been
* exchanged.
*
* @exception DRDAProtocolException
*/
private void processCommands() throws DRDAProtocolException
{
DRDAStatement stmt = null;
int updateCount = 0;
boolean PRPSQLSTTfailed = false;
boolean checkSecurityCodepoint = session.requiresSecurityCodepoint();
int i = 0;
String chainedCheckpoint = null;
do
{
i++;
correlationID = reader.readDssHeader();
int codePoint = reader.readLengthAndCodePoint( false );
int writerMark = writer.markDSSClearPoint();
if (database != null && (reader.isChainedWithSameID() || reader.isChainedWithDiffID()) && i==1 && codePoint == CodePoint.EXCSQLSTT) {
EngineConnection ec = database.getConnection();
// System.out.println("connection ---> " + ec);
if (ec instanceof EmbedConnection) {
LanguageConnectionContext lcc = ((EmbedConnection) ec).getLanguageConnection();
chainedCheckpoint = lcc.getUniqueSavepointName();
try {
lcc.getTransactionExecute().setSavePoint(chainedCheckpoint, "BATCH_SAVEPOINT");
} catch (Exception e) {
e.printStackTrace();
throw DRDAProtocolException.newDisconnectException(this,null);
}
}
}
if (checkSecurityCodepoint)
verifyInOrderACCSEC_SECCHK(codePoint,session.getRequiredSecurityCodepoint());
switch(codePoint)
{
case CodePoint.CNTQRY:
try{
stmt = parseCNTQRY();
if (stmt != null)
{
writeQRYDTA(stmt);
if (stmt.rsIsClosed())
{
writeENDQRYRM(CodePoint.SVRCOD_WARNING);
writeNullSQLCARDobject();
}
// Send any warnings if JCC can handle them
checkWarning(null, null, stmt.getResultSet(), 0, false, sendWarningsOnCNTQRY);
writePBSD();
}
}
catch(SQLException e)
{
// if we got a SQLException we need to clean up and
// close the result set Beetle 4758
cleanUpAndCloseResultSet(stmt, e, writerMark);
}
break;
case CodePoint.EXCSQLIMM:
try {
updateCount = parseEXCSQLIMM();
// RESOLVE: checking updateCount is not sufficient
// since it will be 0 for creates, we need to know when
// any logged changes are made to the database
// Not getting this right for JCC is probably O.K., this
// will probably be a problem for ODBC and XA
// The problem is that JDBC doesn't provide this information
// so we would have to expand the JDBC API or call a
// builtin method to check(expensive)
// For now we will assume that every execute immediate
// does an update (that is the most conservative thing)
if (database.RDBUPDRM_sent == false)
{
writeRDBUPDRM();
}
// we need to set update count in SQLCARD
checkWarning(null, database.getDefaultStatement().getStatement(),
null, updateCount, true, true);
writePBSD();
} catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.EXCSQLSET:
try {
if (parseEXCSQLSET())
// all went well.
writeSQLCARDs(null,0);
}
catch (SQLWarning w)
{
writeSQLCARD(w, CodePoint.SVRCOD_WARNING, 0, 0);
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.PRPSQLSTT:
int sqldaType;
PRPSQLSTTfailed = false;
try {
database.getConnection().clearWarnings();
sqldaType = parsePRPSQLSTT();
database.getCurrentStatement().sqldaType = sqldaType;
if (sqldaType > 0) // do write SQLDARD
writeSQLDARD(database.getCurrentStatement(),
(sqldaType == CodePoint.TYPSQLDA_LIGHT_OUTPUT),
database.getConnection().getWarnings());
else
checkWarning(database.getConnection(), null, null, 0, true, true);
} catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
writeSQLCARDs(e, 0, true);
PRPSQLSTTfailed = true;
errorInChain(e);
}
break;
case CodePoint.OPNQRY:
PreparedStatement ps = null;
try {
if (PRPSQLSTTfailed) {
// read the command objects
// for ps with parameter
// Skip objects/parameters
skipRemainder(true);
// If we failed to prepare, then we fail
// to open, which means OPNQFLRM.
writeOPNQFLRM(null);
break;
}
Pkgnamcsn pkgnamcsn = parseOPNQRY();
if (pkgnamcsn != null)
{
stmt = database.getDRDAStatement(pkgnamcsn);
ps = stmt.getPreparedStatement();
ps.clearWarnings();
if (pendingStatementTimeout >= 0) {
ps.setQueryTimeout(pendingStatementTimeout);
pendingStatementTimeout = -1;
}
stmt.execute();
writeOPNQRYRM(false, stmt);
checkWarning(null, ps, null, 0, false, true);
long sentVersion = stmt.versionCounter;
long currentVersion =
((EnginePreparedStatement)stmt.ps).
getVersionCounter();
if (stmt.sqldaType ==
CodePoint.TYPSQLDA_LIGHT_OUTPUT &&
currentVersion != sentVersion) {
// DERBY-5459. The prepared statement has a
// result set and has changed on the server
// since we last informed the client about its
// shape, so re-send metadata.
//
// NOTE: This is an extension of the standard
// DRDA protocol since we send the SQLDARD
// even if it isn't requested in this case.
// This is OK because there is already code on the
// client to handle an unrequested SQLDARD at
// this point in the protocol.
writeSQLDARD(stmt, true, null);
}
writeQRYDSC(stmt, false);
stmt.rsSuspend();
if (stmt.getQryprctyp() == CodePoint.LMTBLKPRC &&
stmt.getQryrowset() != 0) {
// The DRDA spec allows us to send
// QRYDTA here if there are no LOB
// columns.
DRDAResultSet drdars =
stmt.getCurrentDrdaResultSet();
try {
if (drdars != null &&
!drdars.hasLobColumns()) {
writeQRYDTA(stmt);
}
} catch (SQLException sqle) {
cleanUpAndCloseResultSet(stmt, sqle,
writerMark);
}
}
}
writePBSD();
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
// The fix for DERBY-1196 removed code
// here to close the prepared statement
// if OPNQRY failed.
writeOPNQFLRM(e);
}
break;
case CodePoint.RDBCMM:
try
{
if (SanityManager.DEBUG)
trace("Received commit");
if (!database.getConnection().getAutoCommit())
{
database.getConnection().clearWarnings();
database.commit();
writeENDUOWRM(COMMIT);
checkWarning(database.getConnection(), null, null, 0, true, true);
}
// we only want to write one of these per transaction
// so set to false in preparation for next command
database.RDBUPDRM_sent = false;
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
// Even in case of error, we have to write the ENDUOWRM.
writeENDUOWRM(COMMIT);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.RDBRLLBCK:
try
{
if (SanityManager.DEBUG)
trace("Received rollback");
database.getConnection().clearWarnings();
database.rollback();
writeENDUOWRM(ROLLBACK);
checkWarning(database.getConnection(), null, null, 0, true, true);
// we only want to write one of these per transaction
// so set to false in preparation for next command
database.RDBUPDRM_sent = false;
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
// Even in case of error, we have to write the ENDUOWRM.
writeENDUOWRM(ROLLBACK);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.CLSQRY:
try{
stmt = parseCLSQRY();
stmt.rsClose();
writeSQLCARDs(null, 0);
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.EXCSAT:
parseEXCSAT();
writeEXCSATRD();
break;
case CodePoint.ACCSEC:
int securityCheckCode = parseACCSEC();
writeACCSECRD(securityCheckCode);
/* ACCSECRD is the last reply that is mandatorily in EBCDIC */
if (appRequester.supportsUtf8Ccsid()) {
switchToUtf8();
} else {
/* This thread might serve several requests.
* Revert if not supported by current client. */
switchToEbcdic();
}
checkSecurityCodepoint = true;
break;
case CodePoint.SECCHK:
if(parseDRDAConnection())
// security all checked and connection ok
checkSecurityCodepoint = false;
break;
/* since we don't support sqlj, we won't get bind commands from jcc, we
* might get it from ccc; just skip them.
*/
case CodePoint.BGNBND:
reader.skipBytes();
writeSQLCARDs(null, 0);
break;
case CodePoint.BNDSQLSTT:
reader.skipBytes();
parseSQLSTTDss();
writeSQLCARDs(null, 0);
break;
case CodePoint.SQLSTTVRB:
// optional
reader.skipBytes();
break;
case CodePoint.ENDBND:
reader.skipBytes();
writeSQLCARDs(null, 0);
break;
case CodePoint.DSCSQLSTT:
if (PRPSQLSTTfailed) {
reader.skipBytes();
writeSQLCARDs(null, 0);
break;
}
try {
boolean rtnOutput = parseDSCSQLSTT();
writeSQLDARD(database.getCurrentStatement(), rtnOutput,
null);
} catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
server.consoleExceptionPrint(e);
try {
writeSQLDARD(database.getCurrentStatement(), true, e);
} catch (SQLException e2) { // should not get here since doing nothing with ps
agentError("Why am I getting another SQLException?");
}
errorInChain(e);
}
break;
case CodePoint.EXCSQLSTT:
if (PRPSQLSTTfailed) {
// Skip parameters too if they are chained Beetle 4867
skipRemainder(true);
writeSQLCARDs(null, 0);
break;
}
try {
parseEXCSQLSTT();
DRDAStatement curStmt = database.getCurrentStatement();
if (curStmt != null)
curStmt.rsSuspend();
writePBSD();
} catch (SQLException e)
{
skipRemainder(true);
writer.clearDSSesBackToMark(writerMark);
if (SanityManager.DEBUG)
{
server.consoleExceptionPrint(e);
}
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.SYNCCTL:
if (xaProto == null)
xaProto = new DRDAXAProtocol(this);
xaProto.parseSYNCCTL();
try {
writePBSD();
} catch (SQLException se) {
server.consoleExceptionPrint(se);
errorInChain(se);
}
break;
default:
codePointNotSupported(codePoint);
}
if (SanityManager.DEBUG) {
String cpStr = CodePointNameTable.lookup(codePoint);
try {
PiggyBackedSessionData pbsd =
database.getPiggyBackedSessionData(false);
// DERBY-3596
// Don't perform this assert if a deferred reset is
// happening or has recently taken place, because the
// connection state has been changed under the feet of the
// piggy-backing mechanism.
if (!this.deferredReset && pbsd != null) {
// Session data has already been piggy-backed. Refresh
// the data from the connection, to make sure it has
// not changed behind our back.
pbsd.refresh();
SanityManager.ASSERT(!pbsd.isModified(),
"Unexpected PBSD modification: " + pbsd +
" after codePoint " + cpStr);
}
// Not having a pbsd here is ok. No data has been
// piggy-backed and the client has no cached values.
// If needed it will send an explicit request to get
// session data
} catch (SQLException sqle) {
server.consoleExceptionPrint(sqle);
SanityManager.THROWASSERT("Unexpected exception after " +
"codePoint "+cpStr, sqle);
}
}
// Set the correct chaining bits for whatever
// reply DSS(es) we just wrote. If we've reached
// the end of the chain, this method will send
// the DSS(es) across.
finalizeChain();
}
while (reader.isChainedWithSameID() || reader.isChainedWithDiffID());
if (chainedCheckpoint != null) {
EngineConnection ec = database.getConnection();
// System.out.println("afterChained ---> " + ec);
if (ec instanceof EmbedConnection) {
LanguageConnectionContext lcc = ((EmbedConnection) ec).getLanguageConnection();
try {
lcc.getTransactionExecute().releaseSavePoint(chainedCheckpoint, "BATCH_SAVEPOINT");
} catch (Exception e) {
e.printStackTrace();
// throw DRDAProtocolException.newDisconnectException(this, null);
}
}
}
}
/**
* If there's a severe error in the DDM chain, and if the header indicates
* "terminate chain on error", we stop processing further commands in the chain
* nor do we send any reply for them. In accordance to this, a SQLERRRM message
* indicating the severe error must have been sent! (otherwise application requestor,
* such as JCC, would not terminate the receiving of chain replies.)
*
* Each DRDA command is processed independently. DRDA defines no interdependencies
* across chained commands. A command is processed the same when received within
* a set of chained commands or received separately. The chaining was originally
* defined as a way to save network costs.
*
* @param e the SQLException raised
* @exception DRDAProtocolException
*/
private void errorInChain(SQLException e) throws DRDAProtocolException
{
if (reader.terminateChainOnErr() && (getExceptionSeverity(e) > CodePoint.SVRCOD_ERROR))
{
if (SanityManager.DEBUG) trace("terminating the chain on error...");
skipRemainder(false);
}
}
/**
* Exchange server attributes with application requester
*
* @exception DRDAProtocolException
*/
private void exchangeServerAttributes()
throws DRDAProtocolException
{
int codePoint;
correlationID = reader.readDssHeader();
if (SanityManager.DEBUG) {
if (correlationID == 0)
{
SanityManager.THROWASSERT(
"Unexpected value for correlationId = " + correlationID);
}
}
codePoint = reader.readLengthAndCodePoint( false );
// The first code point in the exchange of attributes must be EXCSAT
if (codePoint != CodePoint.EXCSAT)
{
//Throw PRCCNVRM
throw
new DRDAProtocolException(DRDAProtocolException.DRDA_Proto_PRCCNVRM,
this, codePoint,
CodePoint.PRCCNVCD_EXCSAT_FIRST_AFTER_CONN);
}
parseEXCSAT();
writeEXCSATRD();
finalizeChain();
session.setState(session.ATTEXC);
}
private boolean parseDRDAConnection() throws DRDAProtocolException
{
int codePoint;
boolean sessionOK = true;
int securityCheckCode = parseSECCHK();
if (SanityManager.DEBUG)
trace("*** SECCHKRM securityCheckCode is: "+securityCheckCode);
writeSECCHKRM(securityCheckCode);
//at this point if the security check failed, we're done, the session failed
if (securityCheckCode != 0)
{
return false;
}
correlationID = reader.readDssHeader();
codePoint = reader.readLengthAndCodePoint( false );
verifyRequiredObject(codePoint,CodePoint.ACCRDB);
int svrcod = parseACCRDB();
//If network server gets a null connection form InternalDriver, reply with
//RDBAFLRM and SQLCARD with null SQLException
if(database.getConnection() == null && databaseAccessException == null){
writeRDBfailure(CodePoint.RDBAFLRM);
return false;
}
//if earlier we couldn't access the database
if (databaseAccessException != null)
{
//if the Database was not found we will try DS
int failureType = getRdbAccessErrorCodePoint();
if (failureType == CodePoint.RDBNFNRM
|| failureType == CodePoint.RDBATHRM)
{
writeRDBfailure(failureType);
}
else
{
writeRDBfailure(CodePoint.RDBAFLRM);
}
return false;
}
else if (database.accessCount > 1 ) // already in conversation with database
{
writeRDBfailure(CodePoint.RDBACCRM);
return false;
}
else // everything is fine
writeACCRDBRM(svrcod);
// compare this application requester with previously stored
// application requesters and if we have already seen this one
// use stored application requester
session.appRequester = server.getAppRequester(appRequester);
return sessionOK;
}
/**
* Switch the DDMWriter and DDMReader to UTF8 IF supported
*/
private void switchToUtf8() {
writer.setUtf8Ccsid();
reader.setUtf8Ccsid();
}
/**
* Switch the DDMWriter and DDMReader to EBCDIC
*/
private void switchToEbcdic() {
writer.setEbcdicCcsid();
reader.setEbcdicCcsid();
}
/**
* Write RDB Failure
*
* Instance Variables
* SVRCOD - Severity Code - required
* RDBNAM - Relational Database name - required
* SRVDGN - Server Diagnostics - optional (not sent for now)
*
* @param codePoint codepoint of failure
*/
private void writeRDBfailure(int codePoint) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(codePoint);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_ERROR);
writeRDBNAM(database.getDatabaseName());
writer.endDdmAndDss();
switch(codePoint){
case CodePoint.RDBAFLRM:
//RDBAFLRM requires TYPDEFNAM and TYPDEFOVR
writer.createDssObject();
writer.writeScalarString(CodePoint.TYPDEFNAM,
CodePoint.TYPDEFNAM_QTDSQLASC);
writeTYPDEFOVR();
writer.endDss();
case CodePoint.RDBNFNRM:
case CodePoint.RDBATHRM:
writeSQLCARD(databaseAccessException,CodePoint.SVRCOD_ERROR,0,0);
case CodePoint.RDBACCRM:
//Ignore anything that was chained to the ACCRDB.
skipRemainder(false);
// Finalize chain state for whatever we wrote in
// response to ACCRDB.
finalizeChain();
break;
}
}
/* Check the database access exception and return the appropriate
error codepoint.
RDBNFNRM - Database not found
RDBATHRM - Not Authorized
RDBAFLRM - Access failure
@return RDB Access codepoint
*/
private int getRdbAccessErrorCodePoint()
{
String sqlState = databaseAccessException.getSQLState();
// These tests are ok since DATABASE_NOT_FOUND and
// AUTH_INVALID_USER_NAME are not ambigious error codes (on the first
// five characters) in SQLState. If they were, we would have to
// perform a similar check as done in method isAuthenticationException
if (sqlState.regionMatches(0,SQLState.DATABASE_NOT_FOUND,0,5)) {
// RDB not found codepoint
return CodePoint.RDBNFNRM;
} else {
if (isAuthenticationException(databaseAccessException) ||
sqlState.regionMatches(0,SQLState.AUTH_INVALID_USER_NAME,0,5)) {
// Not Authorized To RDB reply message codepoint
return CodePoint.RDBATHRM;
} else {
// RDB Access Failed Reply Message codepoint
return CodePoint.RDBAFLRM;
}
}
}
/**
* There are multiple reasons for not getting a connection, and
* all these should throw SQLExceptions with SQL state 08004
* according to the SQL standard. Since only one of these SQL
* states indicate that an authentication error has occurred, it
* is not enough to check that the SQL state is 08004 and conclude
* that authentication caused the exception to be thrown.
*
* This method tries to cast the exception to an EmbedSQLException
* and use getMessageId on that object to check for authentication
* error instead of the SQL state we get from
* SQLExceptions#getSQLState. getMessageId returns the entire id
* as defined in SQLState (e.g. 08004.C.1), while getSQLState only
* return the 5 first characters (i.e. 08004 instead of 08004.C.1)
*
* If the cast to EmbedSQLException is not successful, the
* assumption that SQL State 08004 is caused by an authentication
* failure is followed even though this is not correct. This was
* the pre DERBY-3060 way of solving the issue.
*
* @param sqlException The exception that is checked to see if
* this is really caused by an authentication failure
* @return true if sqlException is (or has to be assumed to be)
* caused by an authentication failure, false otherwise.
* @see SQLState
*/
private boolean isAuthenticationException (SQLException sqlException) {
boolean authFail = false;
// get exception which carries Derby messageID and args
SQLException se = Util.getExceptionFactory().
getArgumentFerry(sqlException);
if (se instanceof EmbedSQLException) {
// DERBY-3060: if this is an EmbedSQLException, we can
// check the messageId to find out what caused the
// exception.
String msgId = ((EmbedSQLException)se).getMessageId();
// Of the 08004.C.x messages, only
// SQLState.NET_CONNECT_AUTH_FAILED is an authentication
// exception
if (msgId.equals(SQLState.NET_CONNECT_AUTH_FAILED)) {
authFail = true;
}
} else {
String sqlState = se.getSQLState();
if (sqlState.regionMatches(0,SQLState.LOGIN_FAILED,0,5)) {
// Unchanged by DERBY-3060: This is not an
// EmbedSQLException, so we cannot check the
// messageId. As before DERBY-3060, we assume that all
// 08004 error codes are due to an authentication
// failure, even though this ambigious
authFail = true;
}
}
return authFail;
}
/**
* Verify userId and password
*
* Username and password is verified by making a connection to the
* database
*
* @return security check code, 0 is O.K.
* @exception DRDAProtocolException
*/
private int verifyUserIdPassword() throws DRDAProtocolException
{
databaseAccessException = null;
int retSecChkCode = 0;
String realName = database.getDatabaseName(); //first strip off properties
int endOfName = realName.indexOf(';');
if (endOfName != -1)
realName = realName.substring(0, endOfName);
retSecChkCode = getConnFromDatabaseName();
return retSecChkCode;
}
/**
* Get connection from a database name
*
* Username and password is verified by making a connection to the
* database
*
* @return security check code, 0 is O.K.
* @exception DRDAProtocolException
*/
private int getConnFromDatabaseName() throws DRDAProtocolException
{
Properties p = new Properties();
databaseAccessException = null;
//if we haven't got the correlation token yet, use session number for drdaID
if (session.drdaID == null)
session.drdaID = leftBrace + session.connNum + rightBrace;
p.put(Attribute.DRDAID_ATTR, session.drdaID);
// We pass extra property information for the authentication provider
// to successfully re-compute the substitute (hashed) password and
// compare it with what we've got from the requester (source).
//
// If a password attribute appears as part of the connection URL
// attributes, we then don't use the substitute hashed password
// to authenticate with the engine _as_ the one (if any) as part
// of the connection URL attributes, will be used to authenticate
// against Derby's BUILT-IN authentication provider - As a reminder,
// Derby allows password to be mentioned as part of the connection
// URL attributes, as this extra capability could be useful to pass
// passwords to external authentication providers for Derby; hence
// a password defined as part of the connection URL attributes cannot
// be substituted (single-hashed) as it is not recoverable.
if ((database.securityMechanism == CodePoint.SECMEC_USRSSBPWD) &&
(database.getDatabaseName().indexOf(Attribute.PASSWORD_ATTR) == -1))
{
p.put(Attribute.DRDA_SECMEC,
String.valueOf(database.securityMechanism));
p.put(Attribute.DRDA_SECTKN_IN,
DecryptionManager.toHexString(database.secTokenIn, 0,
database.secTokenIn.length));
p.put(Attribute.DRDA_SECTKN_OUT,
DecryptionManager.toHexString(database.secTokenOut, 0,
database.secTokenOut.length));
}
try {
database.makeConnection(p);
} catch (SQLException se) {
String sqlState = se.getSQLState();
databaseAccessException = se;
for (; se != null; se = se.getNextException())
{
if (SanityManager.DEBUG)
trace(se.getMessage());
println2Log(database.getDatabaseName(), session.drdaID, se.getMessage());
}
if (isAuthenticationException(databaseAccessException)) {
// need to set the security check code based on the
// reason the connection was denied, Derby doesn't say
// whether the userid or password caused the problem,
// so we will just return userid invalid
return CodePoint.SECCHKCD_USERIDINVALID;
} else {
return 0;
}
}
catch (Exception e)
{
// If Derby has shut down for some reason,
// we will send an agent error and then try to
// get the driver loaded again. We have to get
// rid of the client first in case they are holding
// the DriverManager lock.
println2Log(database.getDatabaseName(), session.drdaID,
"Driver not loaded"
+ e.getMessage());
try {
agentError("Driver not loaded");
}
catch (DRDAProtocolException dpe)
{
// Retry starting the server before rethrowing
// the protocol exception. Then hopfully all
// will be well when they try again.
try {
server.startNetworkServer();
} catch (Exception re) {
println2Log(database.getDatabaseName(), session.drdaID, "Failed attempt to reload driver " +re.getMessage() );
}
throw dpe;
}
}
// Everything worked so log connection to the database.
if (getLogConnections())
println2Log(database.getDatabaseName(), session.drdaID,
"Apache Derby Network Server connected to database " +
database.getDatabaseName());
return 0;
}
/**
* Parses EXCSAT (Exchange Server Attributes)
* Instance variables
* EXTNAM(External Name) - optional
* MGRLVLLS(Manager Levels) - optional
* SPVNAM(Supervisor Name) - optional
* SRVCLSNM(Server Class Name) - optional
* SRVNAM(Server Name) - optional, ignorable
* SRVRLSLV(Server Product Release Level) - optional, ignorable
*
* @exception DRDAProtocolException
*/
private void parseEXCSAT() throws DRDAProtocolException
{
int codePoint;
String strVal;
// There are three kinds of EXCSAT's we might get.
// 1) Initial Exchange attributes.
// For this we need to initialize the apprequester.
// Session state is set to ATTEXC and then the AR must
// follow up with ACCSEC and SECCHK to get the connection.
// 2) Send of EXCSAT as ping or mangager level adjustment.
// (see parseEXCSAT2())
// For this we just ignore the EXCSAT objects that
// are already set.
// 3) Send of EXCSAT for connection reset. (see parseEXCSAT2())
// This is treated just like ping and will be followed up
// by an ACCSEC request if in fact it is a connection reset.
// If we have already exchanged attributes once just
// process any new manager levels and return (case 2 and 3 above)
this.deferredReset = false; // Always reset, only set to true below.
if (appRequester != null)
{
// DERBY-3596
// Don't mess with XA requests, as the logic for these are handled
// by the server side (embedded) objects. Note that XA requests
// results in a different database object implementation, and it
// does not have the bug we are working around.
if (!appRequester.isXARequester()) {
this.deferredReset = true; // Non-XA deferred reset detected.
}
parseEXCSAT2();
return;
}
// set up a new Application Requester to store information about the
// application requester for this session
appRequester = new AppRequester();
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.EXTNAM:
appRequester.extnam = reader.readString();
if (SanityManager.DEBUG)
trace("extName = " + appRequester.extnam);
if (appRequester.extnam.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.EXTNAM);
break;
// optional
case CodePoint.MGRLVLLS:
parseMGRLVLLS(1);
break;
// optional
case CodePoint.SPVNAM:
appRequester.spvnam = reader.readString();
// This is specified as a null parameter so length should
// be zero
if (appRequester.spvnam != null)
badObjectLength(CodePoint.SPVNAM);
break;
// optional
case CodePoint.SRVNAM:
appRequester.srvnam = reader.readString();
if (SanityManager.DEBUG)
trace("serverName = " + appRequester.srvnam);
if (appRequester.srvnam.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.SRVNAM);
break;
// optional
case CodePoint.SRVRLSLV:
appRequester.srvrlslv = reader.readString();
if (SanityManager.DEBUG)
trace("serverlslv = " + appRequester.srvrlslv);
if (appRequester.srvrlslv.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.SRVRLSLV);
break;
// optional
case CodePoint.SRVCLSNM:
appRequester.srvclsnm = reader.readString();
if (SanityManager.DEBUG)
trace("serverClassName = " + appRequester.srvclsnm);
if (appRequester.srvclsnm.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.SRVCLSNM);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
}
/**
* Parses EXCSAT2 (Exchange Server Attributes)
* Instance variables
* EXTNAM(External Name) - optional
* MGRLVLLS(Manager Levels) - optional
* SPVNAM(Supervisor Name) - optional
* SRVCLSNM(Server Class Name) - optional
* SRVNAM(Server Name) - optional, ignorable
* SRVRLSLV(Server Product Release Level) - optional, ignorable
*
* @exception DRDAProtocolException
*
* This parses a second occurrence of an EXCSAT command
* The target must ignore the values for extnam, srvclsnm, srvnam and srvrlslv.
* I am also going to ignore spvnam since it should be null anyway.
* Only new managers can be added.
*/
private void parseEXCSAT2() throws DRDAProtocolException
{
int codePoint;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.EXTNAM:
case CodePoint.SRVNAM:
case CodePoint.SRVRLSLV:
case CodePoint.SRVCLSNM:
case CodePoint.SPVNAM:
reader.skipBytes();
break;
// optional
case CodePoint.MGRLVLLS:
parseMGRLVLLS(2);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
}
/**
* Parse manager levels
* Instance variables
* MGRLVL - repeatable, required
* CODEPOINT
* CCSIDMGR - CCSID Manager
* CMNAPPC - LU 6.2 Conversational Communications Manager
* CMNSYNCPT - SNA LU 6.2 SyncPoint Conversational Communications Manager
* CMNTCPIP - TCP/IP Communication Manager
* DICTIONARY - Dictionary
* RDB - Relational Database
* RSYNCMGR - Resynchronization Manager
* SECMGR - Security Manager
* SQLAM - SQL Application Manager
* SUPERVISOR - Supervisor
* SYNCPTMGR - Sync Point Manager
* VALUE
*
* On the second appearance of this codepoint, it can only add managers
*
* @param time 1 for first time this is seen, 2 for subsequent ones
* @exception DRDAProtocolException
*
*/
private void parseMGRLVLLS(int time) throws DRDAProtocolException
{
int manager, managerLevel;
int currentLevel;
// set up vectors to keep track of manager information
unknownManagers = new Vector();
knownManagers = new Vector();
errorManagers = new Vector();
errorManagersLevel = new Vector();
if (SanityManager.DEBUG)
trace("Manager Levels");
while (reader.moreDdmData())
{
manager = reader.readNetworkShort();
managerLevel = reader.readNetworkShort();
if (CodePoint.isKnownManager(manager))
{
knownManagers.add(new Integer(manager));
//if the manager level hasn't been set, set it
currentLevel = appRequester.getManagerLevel(manager);
if (currentLevel == appRequester.MGR_LEVEL_UNKNOWN)
appRequester.setManagerLevel(manager, managerLevel);
else
{
//if the level is still the same we'll ignore it
if (currentLevel != managerLevel)
{
//keep a list of conflicting managers
errorManagers.add(new Integer(manager));
errorManagersLevel.add(new Integer (managerLevel));
}
}
}
else
unknownManagers.add(new Integer(manager));
if (SanityManager.DEBUG)
trace("Manager = " + java.lang.Integer.toHexString(manager) +
" ManagerLevel " + managerLevel);
}
sqlamLevel = appRequester.getManagerLevel(CodePoint.SQLAM);
// did we have any errors
if (errorManagers.size() > 0)
{
Object [] oa = new Object[errorManagers.size()*2];
int j = 0;
for (int i = 0; i < errorManagers.size(); i++)
{
oa[j++] = errorManagers.get(i);
oa[j++] = errorManagersLevel.get(i);
}
throw new DRDAProtocolException(DRDAProtocolException.DRDA_Proto_MGRLVLRM,
this, 0,
0, oa);
}
}
/**
* Write reply to EXCSAT command
* Instance Variables
* EXTNAM - External Name (optional)
* MGRLVLLS - Manager Level List (optional)
* SRVCLSNM - Server Class Name (optional) - used by JCC
* SRVNAM - Server Name (optional)
* SRVRLSLV - Server Product Release Level (optional)
*
* @exception DRDAProtocolException
*/
private void writeEXCSATRD() throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.EXCSATRD);
writer.writeScalarString(CodePoint.EXTNAM, server.att_extnam);
//only reply with manager levels if we got sent some
if (knownManagers != null && knownManagers.size() > 0)
writeMGRLEVELS();
writer.writeScalarString(CodePoint.SRVCLSNM, server.att_srvclsnm);
writer.writeScalarString(CodePoint.SRVNAM, server.ATT_SRVNAM);
writer.writeScalarString(CodePoint.SRVRLSLV, server.att_srvrlslv);
writer.endDdmAndDss();
}
/**
* Write manager levels
* The target server must not provide information for any target
* managers unless the source explicitly requests it.
* For each manager class, if the target server's support level
* is greater than or equal to the source server's level, then the source
* server's level is returned for that class if the target server can operate
* at the source's level; otherwise a level 0 is returned. If the target
* server's support level is less than the source server's level, the
* target server's level is returned for that class. If the target server
* does not recognize the code point of a manager class or does not support
* that class, it returns a level of 0. The target server then waits
* for the next command or for the source server to terminate communications.
* When the source server receives EXCSATRD, it must compare each of the entries
* in the mgrlvlls parameter it received to the corresponding entries in the mgrlvlls
* parameter it sent. If any level mismatches, the source server must decide
* whether it can use or adjust to the lower level of target support for that manager
* class. There are no architectural criteria for making this decision.
* The source server can terminate communications or continue at the target
* servers level of support. It can also attempt to use whatever
* commands its user requests while receiving error reply messages for real
* functional mismatches.
* The manager levels the source server specifies or the target server
* returns must be compatible with the manager-level dependencies of the specified
* manangers. Incompatible manager levels cannot be specified.
* Instance variables
* MGRLVL - repeatable, required
* CODEPOINT
* CCSIDMGR - CCSID Manager
* CMNAPPC - LU 6.2 Conversational Communications Manager
* CMNSYNCPT - SNA LU 6.2 SyncPoint Conversational Communications Manager
* CMNTCPIP - TCP/IP Communication Manager
* DICTIONARY - Dictionary
* RDB - Relational Database
* RSYNCMGR - Resynchronization Manager
* SECMGR - Security Manager
* SQLAM - SQL Application Manager
* SUPERVISOR - Supervisor
* SYNCPTMGR - Sync Point Manager
* XAMGR - XA manager
* VALUE
*/
private void writeMGRLEVELS() throws DRDAProtocolException
{
int manager;
int appLevel;
int serverLevel;
writer.startDdm(CodePoint.MGRLVLLS);
for (int i = 0; i < knownManagers.size(); i++)
{
manager = ((Integer)knownManagers.get(i)).intValue();
appLevel = appRequester.getManagerLevel(manager);
serverLevel = server.getManagerLevel(manager);
if (serverLevel >= appLevel)
{
//Note appLevel has already been set to 0 if we can't support
//the original app Level
writer.writeCodePoint4Bytes(manager, appLevel);
}
else
{
writer.writeCodePoint4Bytes(manager, serverLevel);
// reset application manager level to server level
appRequester.setManagerLevel(manager, serverLevel);
}
}
// write 0 for all unknown managers
for (int i = 0; i < unknownManagers.size(); i++)
{
manager = ((Integer)unknownManagers.get(i)).intValue();
writer.writeCodePoint4Bytes(manager, 0);
}
writer.endDdm();
}
/**
* Parse Access Security
*
* If the target server supports the SECMEC requested by the application requester
* then a single value is returned and it is identical to the SECMEC value
* in the ACCSEC command. If the target server does not support the SECMEC
* requested, then one or more values are returned and the application requester
* must choose one of these values for the security mechanism.
* We currently support
* - user id and password (default for JCC)
* - encrypted user id and password
* - strong password substitute (USRSSBPWD w/
* Derby network client only)
*
* Instance variables
* SECMGRNM - security manager name - optional
* SECMEC - security mechanism - required
* RDBNAM - relational database name - optional
* SECTKN - security token - optional, (required if sec mech. needs it)
*
* @return security check code - 0 if everything O.K.
*/
private int parseACCSEC() throws DRDAProtocolException
{
int securityCheckCode = 0;
int securityMechanism = 0;
byte [] secTokenIn = null;
reader.markCollection();
int codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch(codePoint)
{
//optional
case CodePoint.SECMGRNM:
// this is defined to be 0 length
if (reader.getDdmLength() != 0)
badObjectLength(CodePoint.SECMGRNM);
break;
//required
case CodePoint.SECMEC:
checkLength(CodePoint.SECMEC, 2);
securityMechanism = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("parseACCSEC - Security mechanism = " + securityMechanism);
// if Property.DRDA_PROP_SECURITYMECHANISM has been set, then
// network server only accepts connections which use that
// security mechanism. No other types of connections
// are accepted.
// Make check to see if this property has been set.
// if set, and if the client requested security mechanism
// is not the same, then return a security check code
// that the server does not support/allow this security
// mechanism
if ( (server.getSecurityMechanism() !=
NetworkServerControlImpl.INVALID_OR_NOTSET_SECURITYMECHANISM)
&& securityMechanism != server.getSecurityMechanism())
{
securityCheckCode = CodePoint.SECCHKCD_NOTSUPPORTED;
if (SanityManager.DEBUG) {
trace("parseACCSEC - SECCHKCD_NOTSUPPORTED [1] - " +
securityMechanism + " <> " +
server.getSecurityMechanism() + "\n");
}
}
else
{
// for plain text userid,password USRIDPWD, and USRIDONL
// no need of decryptionManager
if (securityMechanism != CodePoint.SECMEC_USRIDPWD &&
securityMechanism != CodePoint.SECMEC_USRIDONL)
{
// These are the only other mechanisms we understand
if (((securityMechanism != CodePoint.SECMEC_EUSRIDPWD) ||
(securityMechanism == CodePoint.SECMEC_EUSRIDPWD &&
!server.supportsEUSRIDPWD())
) &&
(securityMechanism !=
CodePoint.SECMEC_USRSSBPWD))
//securityCheckCode = CodePoint.SECCHKCD_NOTSUPPORTED;
{
securityCheckCode = CodePoint.SECCHKCD_NOTSUPPORTED;
if (SanityManager.DEBUG) {
trace("parseACCSEC - SECCHKCD_NOTSUPPORTED [2]\n");
}
}
else
{
// We delay the initialization and required
// processing for SECMEC_USRSSBPWD as we need
// to ensure the database is booted so that
// we can verify that the current auth scheme
// is set to BUILT-IN or NONE. For this we need
// to have the RDBNAM codepoint available.
//
// See validateSecMecUSRSSBPWD() call below
if (securityMechanism ==
CodePoint.SECMEC_USRSSBPWD)
break;
// SECMEC_EUSRIDPWD initialization
try {
if (decryptionManager == null)
decryptionManager = new DecryptionManager();
myPublicKey = decryptionManager.obtainPublicKey();
} catch (SQLException e) {
println2Log(null, session.drdaID, e.getMessage());
// Local security service non-retryable error.
securityCheckCode = CodePoint.SECCHKCD_0A;
}
}
}
}
break;
//optional (currently required for Derby - needed for
// DERBY-528 as well)
case CodePoint.RDBNAM:
String dbname = parseRDBNAM();
Database d = session.getDatabase(dbname);
if (d == null)
initializeDatabase(dbname);
else
{
// reset database for connection re-use
// DERBY-3596
// If we are reusing resources for a new physical
// connection, reset the database object. If the client
// is in the process of creating a new logical
// connection only, don't reset the database object.
if (!deferredReset) {
d.reset();
}
database = d;
}
break;
//optional - depending on security Mechanism
case CodePoint.SECTKN:
secTokenIn = reader.readBytes();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for required CodePoint's
if (securityMechanism == 0)
missingCodePoint(CodePoint.SECMEC);
if (database == null)
initializeDatabase(null);
database.securityMechanism = securityMechanism;
database.secTokenIn = secTokenIn;
// If security mechanism is SECMEC_USRSSBPWD, then ensure it can be
// used for the database or system based on the client's connection
// URL and its identity.
if (securityCheckCode == 0 &&
(database.securityMechanism == CodePoint.SECMEC_USRSSBPWD))
{
if (SanityManager.DEBUG)
SanityManager.ASSERT((securityCheckCode == 0),
"SECMEC_USRSSBPWD: securityCheckCode should not " +
"already be set, found it initialized with " +
"a value of '" + securityCheckCode + "'.");
securityCheckCode = validateSecMecUSRSSBPWD();
}
// need security token
if (securityCheckCode == 0 &&
(database.securityMechanism == CodePoint.SECMEC_EUSRIDPWD ||
database.securityMechanism == CodePoint.SECMEC_USRSSBPWD) &&
database.secTokenIn == null)
securityCheckCode = CodePoint.SECCHKCD_SECTKNMISSING_OR_INVALID;
// shouldn't have security token
if (securityCheckCode == 0 &&
(database.securityMechanism == CodePoint.SECMEC_USRIDPWD ||
database.securityMechanism == CodePoint.SECMEC_USRIDONL) &&
database.secTokenIn != null)
securityCheckCode = CodePoint.SECCHKCD_SECTKNMISSING_OR_INVALID;
if (SanityManager.DEBUG)
trace("** ACCSECRD securityCheckCode is: " + securityCheckCode);
// If the security check was successful set the session state to
// security accesseed. Otherwise go back to attributes exchanged so we
// require another ACCSEC
if (securityCheckCode == 0)
session.setState(session.SECACC);
else
session.setState(session.ATTEXC);
return securityCheckCode;
}
/**
* Parse OPNQRY
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* QRYBLKSZ - Query Block Size - required
* QRYBLKCTL - Query Block Protocol Control - optional
* MAXBLKEXT - Maximum Number of Extra Blocks - optional - default value 0
* OUTOVROPT - Output Override Option
* QRYROWSET - Query Rowset Size - optional - level 7
* MONITOR - Monitor events - optional.
*
* @return RDB Package Name, Consistency Token, and Section Number
* @exception DRDAProtocolException
*/
private Pkgnamcsn parseOPNQRY() throws DRDAProtocolException, SQLException
{
Pkgnamcsn pkgnamcsn = null;
boolean gotQryblksz = false;
int blksize = 0;
int qryblkctl = CodePoint.QRYBLKCTL_DEFAULT;
int maxblkext = CodePoint.MAXBLKEXT_DEFAULT;
int qryrowset = CodePoint.QRYROWSET_DEFAULT;
int qryclsimp = DRDAResultSet.QRYCLSIMP_DEFAULT;
int outovropt = CodePoint.OUTOVRFRS;
reader.markCollection();
int codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch(codePoint)
{
//optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.OPNQRY);
break;
//required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
//required
case CodePoint.QRYBLKSZ:
blksize = parseQRYBLKSZ();
gotQryblksz = true;
break;
//optional
case CodePoint.QRYBLKCTL:
qryblkctl = reader.readNetworkShort();
//The only type of query block control we can specify here
//is forced fixed row
if (qryblkctl != CodePoint.FRCFIXROW)
invalidCodePoint(qryblkctl);
if (SanityManager.DEBUG)
trace("!!qryblkctl = "+Integer.toHexString(qryblkctl));
gotQryblksz = true;
break;
//optional
case CodePoint.MAXBLKEXT:
maxblkext = reader.readSignedNetworkShort();
if (SanityManager.DEBUG)
trace("maxblkext = "+maxblkext);
break;
// optional
case CodePoint.OUTOVROPT:
outovropt = parseOUTOVROPT();
break;
//optional
case CodePoint.QRYROWSET:
//Note minimum for OPNQRY is 0
qryrowset = parseQRYROWSET(0);
break;
case CodePoint.QRYCLSIMP:
// Implicitly close non-scrollable cursor
qryclsimp = parseQRYCLSIMP();
break;
case CodePoint.QRYCLSRLS:
// Ignore release of read locks. Nothing we can do here
parseQRYCLSRLS();
break;
// optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for required variables
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
if (!gotQryblksz)
missingCodePoint(CodePoint.QRYBLKSZ);
// get the statement we are opening
DRDAStatement stmt = database.getDRDAStatement(pkgnamcsn);
if (stmt == null)
{
//XXX should really throw a SQL Exception here
invalidValue(CodePoint.PKGNAMCSN);
}
// check that this statement is not already open
// commenting this check out for now
// it turns out that JCC doesn't send a close if executeQuery is
// done again without closing the previous result set
// this check can't be done since the second executeQuery should work
//if (stmt.state != DRDAStatement.NOT_OPENED)
//{
// writeQRYPOPRM();
// pkgnamcsn = null;
//}
//else
//{
stmt.setOPNQRYOptions(blksize,qryblkctl,maxblkext,outovropt,
qryrowset, qryclsimp);
//}
// read the command objects
// for ps with parameter
if (reader.isChainedWithSameID())
{
if (SanityManager.DEBUG)
trace("&&&&&& parsing SQLDTA");
parseOPNQRYobjects(stmt);
}
return pkgnamcsn;
}
/**
* Parse OPNQRY objects
* Objects
* TYPDEFNAM - Data type definition name - optional
* TYPDEFOVR - Type defintion overrides - optional
* SQLDTA- SQL Program Variable Data - optional
*
* If TYPDEFNAM and TYPDEFOVR are supplied, they apply to the objects
* sent with the statement. Once the statement is over, the default values
* sent in the ACCRDB are once again in effect. If no values are supplied,
* the values sent in the ACCRDB are used.
* Objects may follow in one DSS or in several DSS chained together.
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void parseOPNQRYobjects(DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
int codePoint;
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, stmt, parseTYPDEFNAM());
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(stmt);
break;
// optional
case CodePoint.SQLDTA:
parseSQLDTA(stmt);
break;
// optional
case CodePoint.EXTDTA:
readAndSetAllExtParams(stmt, false);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
}
/**
* Parse OUTOVROPT - this indicates whether output description can be
* overridden on just the first CNTQRY or on any CNTQRY
*
* @return output override option
* @exception DRDAProtocolException
*/
private int parseOUTOVROPT() throws DRDAProtocolException
{
checkLength(CodePoint.OUTOVROPT, 1);
int outovropt = reader.readUnsignedByte();
if (SanityManager.DEBUG)
trace("output override option: "+outovropt);
if (outovropt != CodePoint.OUTOVRFRS && outovropt != CodePoint.OUTOVRANY)
invalidValue(CodePoint.OUTOVROPT);
return outovropt;
}
/**
* Parse QRYBLSZ - this gives the maximum size of the query blocks that
* can be returned to the requester
*
* @return query block size
* @exception DRDAProtocolException
*/
private int parseQRYBLKSZ() throws DRDAProtocolException
{
checkLength(CodePoint.QRYBLKSZ, 4);
int blksize = reader.readNetworkInt();
if (SanityManager.DEBUG)
trace("qryblksz = "+blksize);
if (blksize < CodePoint.QRYBLKSZ_MIN || blksize > CodePoint.QRYBLKSZ_MAX)
invalidValue(CodePoint.QRYBLKSZ);
return blksize;
}
/**
* Parse QRYROWSET - this is the number of rows to return
*
* @param minVal - minimum value
* @return query row set size
* @exception DRDAProtocolException
*/
private int parseQRYROWSET(int minVal) throws DRDAProtocolException
{
checkLength(CodePoint.QRYROWSET, 4);
int qryrowset = reader.readNetworkInt();
if (SanityManager.DEBUG)
trace("qryrowset = " + qryrowset);
if (qryrowset < minVal || qryrowset > CodePoint.QRYROWSET_MAX)
invalidValue(CodePoint.QRYROWSET);
return qryrowset;
}
/** Parse a QRYCLSIMP - Implicitly close non-scrollable cursor
* after end of data.
* @return true to close on end of data
*/
private int parseQRYCLSIMP() throws DRDAProtocolException
{
checkLength(CodePoint.QRYCLSIMP, 1);
int qryclsimp = reader.readUnsignedByte();
if (SanityManager.DEBUG)
trace ("qryclsimp = " + qryclsimp);
if (qryclsimp != CodePoint.QRYCLSIMP_SERVER_CHOICE &&
qryclsimp != CodePoint.QRYCLSIMP_YES &&
qryclsimp != CodePoint.QRYCLSIMP_NO )
invalidValue(CodePoint.QRYCLSIMP);
return qryclsimp;
}
private int parseQRYCLSRLS() throws DRDAProtocolException
{
reader.skipBytes();
return 0;
}
/**
* Write a QRYPOPRM - Query Previously opened
* Instance Variables
* SVRCOD - Severity Code - required - 8 ERROR
* RDBNAM - Relational Database Name - required
* PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number - required
*
* @exception DRDAProtocolException
*/
private void writeQRYPOPRM() throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.QRYPOPRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_ERROR);
writeRDBNAM(database.getDatabaseName());
writePKGNAMCSN();
writer.endDdmAndDss();
}
/**
* Write a QRYNOPRM - Query Not Opened
* Instance Variables
* SVRCOD - Severity Code - required - 4 Warning 8 ERROR
* RDBNAM - Relational Database Name - required
* PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number - required
*
* @param svrCod Severity Code
* @exception DRDAProtocolException
*/
private void writeQRYNOPRM(int svrCod) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.QRYNOPRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, svrCod);
writeRDBNAM(database.getDatabaseName());
writePKGNAMCSN();
writer.endDdmAndDss();
}
/**
* Write a OPNQFLRM - Open Query Failure
* Instance Variables
* SVRCOD - Severity Code - required - 8 ERROR
* RDBNAM - Relational Database Name - required
*
* @param e Exception describing failure
*
* @exception DRDAProtocolException
*/
private void writeOPNQFLRM(SQLException e) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.OPNQFLRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_ERROR);
writeRDBNAM(database.getDatabaseName());
writer.endDdm();
writer.startDdm(CodePoint.SQLCARD);
writeSQLCAGRP(e, 0, 0);
writer.endDdmAndDss();
}
/**
* Write PKGNAMCSN
* Instance Variables
* NAMESYMDR - database name - not validated
* RDBCOLID - RDB Collection Identifier
* PKGID - RDB Package Identifier
* PKGCNSTKN - RDB Package Consistency Token
* PKGSN - RDB Package Section Number
*
* There are two possible formats, fixed and extended which includes length
* information for the strings
*
* @throws DRDAProtocolException
*/
private void writePKGNAMCSN(byte[] pkgcnstkn) throws DRDAProtocolException
{
writer.startDdm(CodePoint.PKGNAMCSN);
if (rdbnam.length() <= CodePoint.RDBNAM_LEN &&
rdbcolid.length() <= CodePoint.RDBCOLID_LEN &&
pkgid.length() <= CodePoint.PKGID_LEN)
{ // if none of RDBNAM, RDBCOLID and PKGID have a length of
// more than 18, use fixed format
writer.writeScalarPaddedString(rdbnam, CodePoint.RDBNAM_LEN);
writer.writeScalarPaddedString(rdbcolid, CodePoint.RDBCOLID_LEN);
writer.writeScalarPaddedString(pkgid, CodePoint.PKGID_LEN);
writer.writeScalarPaddedBytes(pkgcnstkn,
CodePoint.PKGCNSTKN_LEN, (byte) 0);
writer.writeShort(pkgsn);
}
else // extended format
{
int len = Math.max(CodePoint.RDBNAM_LEN, rdbnam.length());
writer.writeShort(len);
writer.writeScalarPaddedString(rdbnam, len);
len = Math.max(CodePoint.RDBCOLID_LEN, rdbcolid.length());
writer.writeShort(len);
writer.writeScalarPaddedString(rdbcolid, len);
len = Math.max(CodePoint.PKGID_LEN, pkgid.length());
writer.writeShort(len);
writer.writeScalarPaddedString(pkgid, len);
writer.writeScalarPaddedBytes(pkgcnstkn,
CodePoint.PKGCNSTKN_LEN, (byte) 0);
writer.writeShort(pkgsn);
}
writer.endDdm();
}
private void writePKGNAMCSN() throws DRDAProtocolException
{
writePKGNAMCSN(pkgcnstkn.getBytes());
}
/**
* Parse CNTQRY - Continue Query
* Instance Variables
* RDBNAM - Relational Database Name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number - required
* QRYBLKSZ - Query Block Size - required
* QRYRELSCR - Query Relative Scrolling Action - optional
* QRYSCRORN - Query Scroll Orientation - optional - level 7
* QRYROWNBR - Query Row Number - optional
* QRYROWSNS - Query Row Sensitivity - optional - level 7
* QRYBLKRST - Query Block Reset - optional - level 7
* QRYRTNDTA - Query Returns Data - optional - level 7
* QRYROWSET - Query Rowset Size - optional - level 7
* QRYRFRTBL - Query Refresh Answer Set Table - optional
* NBRROW - Number of Fetch or Insert Rows - optional
* MAXBLKEXT - Maximum number of extra blocks - optional
* RTNEXTDTA - Return of EXTDTA Option - optional
* MONITOR - Monitor events - optional.
*
* @return DRDAStatement we are continuing
* @throws DRDAProtocolException
* @throws SQLException
*/
private DRDAStatement parseCNTQRY() throws DRDAProtocolException, SQLException
{
byte val;
Pkgnamcsn pkgnamcsn = null;
boolean gotQryblksz = false;
boolean qryrelscr = true;
long qryrownbr = 1;
boolean qryrfrtbl = false;
int nbrrow = 1;
int blksize = 0;
int maxblkext = -1;
long qryinsid;
boolean gotQryinsid = false;
int qryscrorn = CodePoint.QRYSCRREL;
boolean qryrowsns = false;
boolean gotQryrowsns = false;
boolean qryblkrst = false;
boolean qryrtndta = true;
int qryrowset = CodePoint.QRYROWSET_DEFAULT;
int rtnextdta = CodePoint.RTNEXTROW;
reader.markCollection();
int codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch(codePoint)
{
//optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.CNTQRY);
break;
//required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
//required
case CodePoint.QRYBLKSZ:
blksize = parseQRYBLKSZ();
gotQryblksz = true;
break;
//optional
case CodePoint.QRYRELSCR:
qryrelscr = readBoolean(CodePoint.QRYRELSCR);
if (SanityManager.DEBUG)
trace("qryrelscr = "+qryrelscr);
break;
//optional
case CodePoint.QRYSCRORN:
checkLength(CodePoint.QRYSCRORN, 1);
qryscrorn = reader.readUnsignedByte();
if (SanityManager.DEBUG)
trace("qryscrorn = "+qryscrorn);
switch (qryscrorn)
{
case CodePoint.QRYSCRREL:
case CodePoint.QRYSCRABS:
case CodePoint.QRYSCRAFT:
case CodePoint.QRYSCRBEF:
break;
default:
invalidValue(CodePoint.QRYSCRORN);
}
break;
//optional
case CodePoint.QRYROWNBR:
checkLength(CodePoint.QRYROWNBR, 8);
qryrownbr = reader.readNetworkLong();
if (SanityManager.DEBUG)
trace("qryrownbr = "+qryrownbr);
break;
//optional
case CodePoint.QRYROWSNS:
checkLength(CodePoint.QRYROWSNS, 1);
qryrowsns = readBoolean(CodePoint.QRYROWSNS);
if (SanityManager.DEBUG)
trace("qryrowsns = "+qryrowsns);
gotQryrowsns = true;
break;
//optional
case CodePoint.QRYBLKRST:
checkLength(CodePoint.QRYBLKRST, 1);
qryblkrst = readBoolean(CodePoint.QRYBLKRST);
if (SanityManager.DEBUG)
trace("qryblkrst = "+qryblkrst);
break;
//optional
case CodePoint.QRYRTNDTA:
qryrtndta = readBoolean(CodePoint.QRYRTNDTA);
if (SanityManager.DEBUG)
trace("qryrtndta = "+qryrtndta);
break;
//optional
case CodePoint.QRYROWSET:
//Note minimum for CNTQRY is 1
qryrowset = parseQRYROWSET(1);
if (SanityManager.DEBUG)
trace("qryrowset = "+qryrowset);
break;
//optional
case CodePoint.QRYRFRTBL:
qryrfrtbl = readBoolean(CodePoint.QRYRFRTBL);
if (SanityManager.DEBUG)
trace("qryrfrtbl = "+qryrfrtbl);
break;
//optional
case CodePoint.NBRROW:
checkLength(CodePoint.NBRROW, 4);
nbrrow = reader.readNetworkInt();
if (SanityManager.DEBUG)
trace("nbrrow = "+nbrrow);
break;
//optional
case CodePoint.MAXBLKEXT:
checkLength(CodePoint.MAXBLKEXT, 2);
maxblkext = reader.readSignedNetworkShort();
if (SanityManager.DEBUG)
trace("maxblkext = "+maxblkext);
break;
//optional
case CodePoint.RTNEXTDTA:
checkLength(CodePoint.RTNEXTDTA, 1);
rtnextdta = reader.readUnsignedByte();
if (rtnextdta != CodePoint.RTNEXTROW &&
rtnextdta != CodePoint.RTNEXTALL)
invalidValue(CodePoint.RTNEXTDTA);
if (SanityManager.DEBUG)
trace("rtnextdta = "+rtnextdta);
break;
// required for SQLAM >= 7
case CodePoint.QRYINSID:
checkLength(CodePoint.QRYINSID, 8);
qryinsid = reader.readNetworkLong();
gotQryinsid = true;
if (SanityManager.DEBUG)
trace("qryinsid = "+qryinsid);
break;
// optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for required variables
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
if (!gotQryblksz)
missingCodePoint(CodePoint.QRYBLKSZ);
if (sqlamLevel >= MGRLVL_7 && !gotQryinsid)
missingCodePoint(CodePoint.QRYINSID);
// get the statement we are continuing
DRDAStatement stmt = database.getDRDAStatement(pkgnamcsn);
if (stmt == null)
{
//XXX should really throw a SQL Exception here
invalidValue(CodePoint.CNTQRY);
}
if (stmt.rsIsClosed())
{
writeQRYNOPRM(CodePoint.SVRCOD_ERROR);
skipRemainder(true);
return null;
}
stmt.setQueryOptions(blksize,qryrelscr,qryrownbr,qryrfrtbl,nbrrow,maxblkext,
qryscrorn,qryrowsns,qryblkrst,qryrtndta,qryrowset,
rtnextdta);
if (reader.isChainedWithSameID())
parseCNTQRYobjects(stmt);
return stmt;
}
/**
* Skip remainder of current DSS and all chained DSS'es
*
* @param onlySkipSameIds True if we _only_ want to skip DSS'es
* that are chained with the SAME id as the current DSS.
* False means skip ALL chained DSSes, whether they're
* chained with same or different ids.
* @exception DRDAProtocolException
*/
private void skipRemainder(boolean onlySkipSameIds) throws DRDAProtocolException
{
reader.skipDss();
while (reader.isChainedWithSameID() ||
(!onlySkipSameIds && reader.isChainedWithDiffID()))
{
reader.readDssHeader();
reader.skipDss();
}
}
/**
* Parse CNTQRY objects
* Instance Variables
* OUTOVR - Output Override Descriptor - optional
*
* @param stmt DRDA statement we are working on
* @exception DRDAProtocolException
*/
private void parseCNTQRYobjects(DRDAStatement stmt) throws DRDAProtocolException, SQLException
{
int codePoint;
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// optional
case CodePoint.OUTOVR:
parseOUTOVR(stmt);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
}
/**
* Parse OUTOVR - Output Override Descriptor
* This specifies the output format for data to be returned as output to a SQL
* statement or as output from a query.
*
* @param stmt DRDA statement this applies to
* @exception DRDAProtocolException
*/
private void parseOUTOVR(DRDAStatement stmt) throws DRDAProtocolException, SQLException
{
boolean first = true;
int numVars;
int dtaGrpLen;
int tripType;
int tripId;
int precision;
int start = 0;
while (true)
{
dtaGrpLen = reader.readUnsignedByte();
tripType = reader.readUnsignedByte();
tripId = reader.readUnsignedByte();
// check if we have reached the end of the data
if (tripType == FdocaConstants.RLO_TRIPLET_TYPE)
{
//read last part of footer
reader.skipBytes();
break;
}
numVars = (dtaGrpLen - 3) / 3;
if (SanityManager.DEBUG)
trace("num of vars is: "+numVars);
int[] outovr_drdaType = null;
if (first)
{
outovr_drdaType = new int[numVars];
first = false;
}
else
{
int[] oldoutovr_drdaType = stmt.getOutovr_drdaType();
int oldlen = oldoutovr_drdaType.length;
// create new array and copy over already read stuff
outovr_drdaType = new int[oldlen + numVars];
System.arraycopy(oldoutovr_drdaType, 0,
outovr_drdaType,0,
oldlen);
start = oldlen;
}
for (int i = start; i < numVars + start; i++)
{
int drdaType = reader.readUnsignedByte();
if (!database.supportsLocator()) {
// ignore requests for locator when it is not supported
if ((drdaType >= DRDAConstants.DRDA_TYPE_LOBLOC)
&& (drdaType <= DRDAConstants.DRDA_TYPE_NCLOBLOC)) {
if (SanityManager.DEBUG) {
trace("ignoring drdaType: " + drdaType);
}
reader.readNetworkShort(); // Skip rest
continue;
}
}
outovr_drdaType[i] = drdaType;
if (SanityManager.DEBUG)
trace("drdaType is: "+ outovr_drdaType[i]);
precision = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("drdaLength is: "+precision);
outovr_drdaType[i] |= (precision << 8);
}
stmt.setOutovr_drdaType(outovr_drdaType);
}
}
/**
* Piggy-back any modified session attributes on the current message. Writes
* a PBSD conataining one or both of PBSD_ISO and PBSD_SCHEMA. PBSD_ISO is
* followed by the jdbc isolation level as an unsigned byte. PBSD_SCHEMA is
* followed by the name of the current schema as an UTF-8 String.
* @throws java.sql.SQLException
* @throws com.splicemachine.db.impl.drda.DRDAProtocolException
*/
private void writePBSD() throws SQLException, DRDAProtocolException
{
if (!appRequester.supportsSessionDataCaching()) {
return;
}
PiggyBackedSessionData pbsd = database.getPiggyBackedSessionData(true);
if (SanityManager.DEBUG) {
SanityManager.ASSERT(pbsd != null, "pbsd is not expected to be null");
}
// DERBY-3596
// Reset the flag. In sane builds it is used to avoid an assert, but
// we want to reset it as soon as possible to avoid masking real bugs.
// We have to do this because we are changing the connection state
// at an unexpected time (deferred reset, see parseSECCHK). This was
// done to avoid having to change the client code.
this.deferredReset = false;
pbsd.refresh();
if (pbsd.isModified()) {
writer.createDssReply();
writer.startDdm(CodePoint.PBSD);
if (pbsd.isIsoModified()) {
writer.writeScalar1Byte(CodePoint.PBSD_ISO, pbsd.getIso());
}
if (pbsd.isSchemaModified()) {
writer.startDdm(CodePoint.PBSD_SCHEMA);
writer.writeString(pbsd.getSchema());
writer.endDdm();
}
writer.endDdmAndDss();
}
pbsd.setUnmodified();
if (SanityManager.DEBUG) {
PiggyBackedSessionData pbsdNew =
database.getPiggyBackedSessionData(true);
SanityManager.ASSERT(pbsdNew == pbsd,
"pbsdNew and pbsd are expected to reference " +
"the same object");
pbsd.refresh();
SanityManager.ASSERT
(!pbsd.isModified(),
"pbsd=("+pbsd+") is not expected to be modified");
}
}
/**
* Write OPNQRYRM - Open Query Complete
* Instance Variables
* SVRCOD - Severity Code - required
* QRYPRCTYP - Query Protocol Type - required
* SQLCSRHLD - Hold Cursor Position - optional
* QRYATTSCR - Query Attribute for Scrollability - optional - level 7
* QRYATTSNS - Query Attribute for Sensitivity - optional - level 7
* QRYATTUPD - Query Attribute for Updatability -optional - level 7
* QRYINSID - Query Instance Identifier - required - level 7
* SRVDGN - Server Diagnostic Information - optional
*
* @param isDssObject - return as a DSS object (part of a reply)
* @param stmt - DRDA statement we are processing
*
* @exception DRDAProtocolException
*/
private void writeOPNQRYRM(boolean isDssObject, DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
if (SanityManager.DEBUG)
trace("WriteOPNQRYRM");
if (isDssObject)
writer.createDssObject();
else
writer.createDssReply();
writer.startDdm(CodePoint.OPNQRYRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD,CodePoint.SVRCOD_INFO);
// There is currently a problem specifying LMTBLKPRC for LOBs with JCC
// JCC will throw an ArrayOutOfBounds exception. Once this is fixed, we
// don't need to pass the two arguments for getQryprctyp.
int prcType = stmt.getQryprctyp();
if (SanityManager.DEBUG)
trace("sending QRYPRCTYP: " + prcType);
writer.writeScalar2Bytes(CodePoint.QRYPRCTYP, prcType);
//pass the SQLCSRHLD codepoint only if statement producing the ResultSet has
//hold cursors over commit set. In case of stored procedures which use server-side
//JDBC, the holdability of the ResultSet will be the holdability of the statement
//in the stored procedure, not the holdability of the calling statement.
if (stmt.getCurrentDrdaResultSet().withHoldCursor == ResultSet.HOLD_CURSORS_OVER_COMMIT)
writer.writeScalar1Byte(CodePoint.SQLCSRHLD, CodePoint.TRUE);
if (sqlamLevel >= MGRLVL_7)
{
writer.writeScalarHeader(CodePoint.QRYINSID, 8);
//This is implementer defined. DB2 uses this for the nesting level
//of the query. A query from an application would be nesting level 0,
//from a stored procedure, nesting level 1, from a recursive call of
//a stored procedure, nesting level 2, etc.
writer.writeInt(0);
//This is a unique sequence number per session
writer.writeInt(session.qryinsid++);
//Write the scroll attributes if they are set
if (stmt.isScrollable())
{
writer.writeScalar1Byte(CodePoint.QRYATTSCR, CodePoint.TRUE);
if ((stmt.getConcurType() == ResultSet.CONCUR_UPDATABLE) &&
(stmt.getResultSet().getType() ==
ResultSet.TYPE_SCROLL_INSENSITIVE)) {
writer.writeScalar1Byte(CodePoint.QRYATTSNS,
CodePoint.QRYSNSSTC);
} else {
writer.writeScalar1Byte(CodePoint.QRYATTSNS,
CodePoint.QRYINS);
}
}
if (stmt.getConcurType() == ResultSet.CONCUR_UPDATABLE) {
if (stmt.getResultSet() != null) {
// Resultset concurrency can be less than statement
// concurreny if the underlying language resultset
// is not updatable.
if (stmt.getResultSet().getConcurrency() ==
ResultSet.CONCUR_UPDATABLE) {
writer.writeScalar1Byte(CodePoint.QRYATTUPD,
CodePoint.QRYUPD);
} else {
writer.writeScalar1Byte(CodePoint.QRYATTUPD,
CodePoint.QRYRDO);
}
} else {
writer.writeScalar1Byte(CodePoint.QRYATTUPD,
CodePoint.QRYUPD);
}
} else {
writer.writeScalar1Byte(CodePoint.QRYATTUPD, CodePoint.QRYRDO);
}
}
writer.endDdmAndDss ();
}
/**
* Write ENDQRYRM - query process has terminated in such a manner that the
* query or result set is now closed. It cannot be resumed with the CNTQRY
* command or closed with the CLSQRY command
* @param svrCod Severity code - WARNING or ERROR
* @exception DRDAProtocolException
*/
private void writeENDQRYRM(int svrCod) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.ENDQRYRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD,svrCod);
writer.endDdmAndDss();
}
/**
* Write ABNUOWRM - query process has terminated in an error condition
* such as deadlock or lock timeout.
* Severity code is always error
* * @exception DRDAProtocolException
*/
private void writeABNUOWRM() throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.ABNUOWRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD,CodePoint.SVRCOD_ERROR);
writeRDBNAM(database.getDatabaseName());
writer.endDdmAndDss();
}
/**
* Parse database name
*
* @return database name
*
* @exception DRDAProtocolException
*/
private String parseRDBNAM() throws DRDAProtocolException
{
String name;
byte [] rdbName = reader.readBytes();
if (rdbName.length == 0)
{
// throw RDBNFNRM
rdbNotFound(null);
}
//SQLAM level 7 allows db name up to 255, level 6 fixed len 18
if (rdbName.length < CodePoint.RDBNAM_LEN || rdbName.length > CodePoint.MAX_NAME)
badObjectLength(CodePoint.RDBNAM);
name = reader.convertBytes(rdbName);
// trim trailing blanks from the database name
name = name.trim();
if (SanityManager.DEBUG)
trace("RdbName " + name);
return name;
}
/**
* Write ACCSECRD
* If the security mechanism is known, we just send it back along with
* the security token if encryption is going to be used.
* If the security mechanism is not known, we send a list of the ones
* we know.
* Instance Variables
* SECMEC - security mechanism - required
* SECTKN - security token - optional (required if security mechanism
* uses encryption)
* SECCHKCD - security check code - error occurred in processing ACCSEC
*
* @param securityCheckCode
*
* @exception DRDAProtocolException
*/
private void writeACCSECRD(int securityCheckCode)
throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.ACCSECRD);
if (securityCheckCode != CodePoint.SECCHKCD_NOTSUPPORTED)
writer.writeScalar2Bytes(CodePoint.SECMEC, database.securityMechanism);
else
{
// if server doesnt recognize or allow the client requested security mechanism,
// then need to return the list of security mechanisms supported/allowed by the server
// check if server is set to accept connections from client at a certain
// security mechanism, if so send only the security mechanism that the
// server will accept, to the client
if ( server.getSecurityMechanism() != NetworkServerControlImpl.INVALID_OR_NOTSET_SECURITYMECHANISM )
writer.writeScalar2Bytes(CodePoint.SECMEC, server.getSecurityMechanism());
else
{
// note: per the DDM manual , ACCSECRD response is of
// form SECMEC (value{value..})
// Need to fix the below to send a list of supported security
// mechanisms for value of one SECMEC codepoint (JIRA 926)
// these are the ones we know about
writer.writeScalar2Bytes(CodePoint.SECMEC, CodePoint.SECMEC_USRIDPWD);
// include EUSRIDPWD in the list of supported secmec only if
// server can truely support it in the jvm that is running in
if ( server.supportsEUSRIDPWD())
writer.writeScalar2Bytes(CodePoint.SECMEC, CodePoint.SECMEC_EUSRIDPWD);
writer.writeScalar2Bytes(CodePoint.SECMEC, CodePoint.SECMEC_USRIDONL);
writer.writeScalar2Bytes(CodePoint.SECMEC, CodePoint.SECMEC_USRSSBPWD);
}
}
if (securityCheckCode != 0)
{
writer.writeScalar1Byte(CodePoint.SECCHKCD, securityCheckCode);
}
else
{
// we need to send back the key if encryption is being used
if (database.securityMechanism == CodePoint.SECMEC_EUSRIDPWD)
writer.writeScalarBytes(CodePoint.SECTKN, myPublicKey);
else if (database.securityMechanism == CodePoint.SECMEC_USRSSBPWD)
writer.writeScalarBytes(CodePoint.SECTKN, myTargetSeed);
}
writer.endDdmAndDss ();
if (securityCheckCode != 0) {
// then we have an error and so can ignore the rest of the
// DSS request chain.
skipRemainder(false);
}
finalizeChain();
}
/**
* Parse security check
* Instance Variables
* SECMGRNM - security manager name - optional, ignorable
* SECMEC - security mechanism - required
* SECTKN - security token - optional, (required if encryption used)
* PASSWORD - password - optional, (required if security mechanism uses it)
* NEWPASSWORD - new password - optional, (required if sec mech. uses it)
* USRID - user id - optional, (required if sec mec. uses it)
* RDBNAM - database name - optional (required if databases can have own sec.)
*
*
* @return security check code
* @exception DRDAProtocolException
*/
private int parseSECCHK() throws DRDAProtocolException
{
int codePoint, securityCheckCode = 0;
int securityMechanism = 0;
databaseAccessException = null;
reader.markCollection();
codePoint = reader.getCodePoint();
if (this.deferredReset) {
// Skip the SECCHK, but assure a minimal degree of correctness.
while (codePoint != -1) {
switch (codePoint) {
// Note the fall-through.
// Minimal level of checking to detect protocol errors.
// NOTE: SECMGR level 8 code points are not handled.
case CodePoint.SECMGRNM:
case CodePoint.SECMEC:
case CodePoint.SECTKN:
case CodePoint.PASSWORD:
case CodePoint.NEWPASSWORD:
case CodePoint.USRID:
case CodePoint.RDBNAM:
reader.skipBytes();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
} else {
while (codePoint != -1)
{
switch (codePoint)
{
//optional, ignorable
case CodePoint.SECMGRNM:
reader.skipBytes();
break;
//required
case CodePoint.SECMEC:
checkLength(CodePoint.SECMEC, 2);
securityMechanism = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("parseSECCHK - Security mechanism = " + securityMechanism);
//RESOLVE - spec is not clear on what should happen
//in this case
if (securityMechanism != database.securityMechanism)
invalidValue(CodePoint.SECMEC);
break;
//optional - depending on security Mechanism
case CodePoint.SECTKN:
if ((database.securityMechanism !=
CodePoint.SECMEC_EUSRIDPWD) &&
(database.securityMechanism !=
CodePoint.SECMEC_USRSSBPWD))
{
securityCheckCode = CodePoint.SECCHKCD_SECTKNMISSING_OR_INVALID;
reader.skipBytes();
}
else if (database.securityMechanism ==
CodePoint.SECMEC_EUSRIDPWD)
{
if (database.decryptedUserId == null)
{
try {
database.decryptedUserId =
reader.readEncryptedString(
decryptionManager,
database.securityMechanism,
myPublicKey,
database.secTokenIn);
} catch (SQLException se) {
println2Log(database.getDatabaseName(), session.drdaID,
se.getMessage());
if (securityCheckCode == 0)
//userid invalid
securityCheckCode = CodePoint.SECCHKCD_13;
}
database.userId = database.decryptedUserId;
if (SanityManager.DEBUG)
trace("**decrypted userid is: "+database.userId);
}
else if (database.decryptedPassword == null)
{
try {
database.decryptedPassword =
reader.readEncryptedString(
decryptionManager,
database.securityMechanism,
myPublicKey,
database.secTokenIn);
} catch (SQLException se) {
println2Log(database.getDatabaseName(), session.drdaID,
se.getMessage());
if (securityCheckCode == 0)
//password invalid
securityCheckCode = CodePoint.SECCHKCD_0F;
}
database.password = database.decryptedPassword;
if (SanityManager.DEBUG)
trace("**decrypted password is: " +
database.password);
}
}
else if (database.securityMechanism ==
CodePoint.SECMEC_USRSSBPWD)
{
if (database.passwordSubstitute == null)
{
database.passwordSubstitute = reader.readBytes();
if (SanityManager.DEBUG)
trace("** Substitute Password is:" +
DecryptionManager.toHexString(
database.passwordSubstitute, 0,
database.passwordSubstitute.length));
database.password =
DecryptionManager.toHexString(
database.passwordSubstitute, 0,
database.passwordSubstitute.length);
}
}
else
{
tooMany(CodePoint.SECTKN);
}
break;
//optional - depending on security Mechanism
case CodePoint.PASSWORD:
database.password = reader.readString();
if (SanityManager.DEBUG) trace("PASSWORD " + database.password);
break;
//optional - depending on security Mechanism
//we are not supporting this method so we'll skip bytes
case CodePoint.NEWPASSWORD:
reader.skipBytes();
break;
//optional - depending on security Mechanism
case CodePoint.USRID:
database.userId = reader.readString();
if (SanityManager.DEBUG) trace("USERID " + database.userId);
break;
//optional - depending on security Mechanism
case CodePoint.RDBNAM:
String dbname = parseRDBNAM();
if (database != null)
{
if (database.getDatabaseName() == null) {
// we didn't get the RDBNAM on ACCSEC. Set it here
database.setDatabaseName(dbname);
session.addDatabase(database);
session.database = database;
}
else if (!database.getDatabaseName().equals(dbname))
rdbnamMismatch(CodePoint.SECCHK);
}
else
{
// we should already have added the database in ACCSEC
// added code here in case we make the SECMEC session rather
// than database wide
initializeDatabase(dbname);
}
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for SECMEC which is required
if (securityMechanism == 0)
missingCodePoint(CodePoint.SECMEC);
// Check that we have a database name.
if (database == null || database.getDatabaseName() == null)
missingCodePoint(CodePoint.RDBNAM);
//check if we have a userid and password when we need it
if (securityCheckCode == 0 &&
(database.securityMechanism == CodePoint.SECMEC_USRIDPWD||
database.securityMechanism == CodePoint.SECMEC_USRIDONL ))
{
if (database.userId == null)
securityCheckCode = CodePoint.SECCHKCD_USERIDMISSING;
else if (database.securityMechanism == CodePoint.SECMEC_USRIDPWD)
{
if (database.password == null)
securityCheckCode = CodePoint.SECCHKCD_PASSWORDMISSING;
}
//Note, we'll ignore encryptedUserId and encryptedPassword if they
//are also set
}
if (securityCheckCode == 0 &&
database.securityMechanism == CodePoint.SECMEC_USRSSBPWD)
{
if (database.userId == null)
securityCheckCode = CodePoint.SECCHKCD_USERIDMISSING;
else if (database.passwordSubstitute == null)
securityCheckCode = CodePoint.SECCHKCD_PASSWORDMISSING;
}
if (securityCheckCode == 0 &&
database.securityMechanism == CodePoint.SECMEC_EUSRIDPWD)
{
if (database.decryptedUserId == null)
securityCheckCode = CodePoint.SECCHKCD_USERIDMISSING;
else if (database.decryptedPassword == null)
securityCheckCode = CodePoint.SECCHKCD_PASSWORDMISSING;
}
// RESOLVE - when we do security we need to decrypt encrypted userid & password
// before proceeding
} // End "if (deferredReset) ... else ..." block
// verify userid and password, if we haven't had any errors thus far.
if ((securityCheckCode == 0) && (databaseAccessException == null))
{
// DERBY-3596: Reset server side (embedded) physical connection for
// use with a new logical connection on the client.
if (this.deferredReset) {
// Reset the existing connection here.
try {
database.getConnection().resetFromPool();
database.getConnection().setHoldability(
ResultSet.HOLD_CURSORS_OVER_COMMIT);
// Reset isolation level to default, as the client is in
// the process of creating a new logical connection.
database.getConnection().setTransactionIsolation(
Connection.TRANSACTION_READ_COMMITTED);
} catch (SQLException sqle) {
handleException(sqle);
}
} else {
securityCheckCode = verifyUserIdPassword();
}
}
// Security all checked
if (securityCheckCode == 0)
session.setState(session.CHKSEC);
return securityCheckCode;
}
/**
* Write security check reply
* Instance variables
* SVRCOD - serverity code - required
* SECCHKCD - security check code - required
* SECTKN - security token - optional, ignorable
* SVCERRNO - security service error number
* SRVDGN - Server Diagnostic Information
*
* @exception DRDAProtocolException
*/
private void writeSECCHKRM(int securityCheckCode) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.SECCHKRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, svrcodFromSecchkcd(securityCheckCode));
writer.writeScalar1Byte(CodePoint.SECCHKCD, securityCheckCode);
writer.endDdmAndDss ();
if (securityCheckCode != 0) {
// then we have an error and are going to end up ignoring the rest
// of the DSS request chain.
skipRemainder(false);
}
finalizeChain();
}
/**
* Calculate SVRCOD value from SECCHKCD
*
* @param securityCheckCode
* @return SVRCOD value
*/
private int svrcodFromSecchkcd(int securityCheckCode)
{
if (securityCheckCode == 0 || securityCheckCode == 2 ||
securityCheckCode == 5 || securityCheckCode == 8)
return CodePoint.SVRCOD_INFO;
else
return CodePoint.SVRCOD_ERROR;
}
/**
* Parse access RDB
* Instance variables
* RDBACCCL - RDB Access Manager Class - required must be SQLAM
* CRRTKN - Correlation Token - required
* RDBNAM - Relational database name -required
* PRDID - Product specific identifier - required
* TYPDEFNAM - Data Type Definition Name -required
* TYPDEFOVR - Type definition overrides -required
* RDBALWUPD - RDB Allow Updates optional
* PRDDTA - Product Specific Data - optional - ignorable
* STTDECDEL - Statement Decimal Delimiter - optional
* STTSTRDEL - Statement String Delimiter - optional
* TRGDFTRT - Target Default Value Return - optional
*
* @return severity code
*
* @exception DRDAProtocolException
*/
private int parseACCRDB() throws DRDAProtocolException
{
int codePoint;
int svrcod = 0;
copyToRequired(ACCRDB_REQUIRED);
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
//required
case CodePoint.RDBACCCL:
checkLength(CodePoint.RDBACCCL, 2);
int sqlam = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("RDBACCCL = " + sqlam);
// required to be SQLAM
if (sqlam != CodePoint.SQLAM)
invalidValue(CodePoint.RDBACCCL);
removeFromRequired(CodePoint.RDBACCCL);
break;
//required
case CodePoint.CRRTKN:
database.crrtkn = reader.readBytes();
if (SanityManager.DEBUG)
trace("crrtkn " + convertToHexString(database.crrtkn));
removeFromRequired(CodePoint.CRRTKN);
int l = database.crrtkn.length;
if (l > CodePoint.MAX_NAME)
tooBig(CodePoint.CRRTKN);
// the format of the CRRTKN is defined in the DRDA reference
// x.yz where x is 1 to 8 bytes (variable)
// y is 1 to 8 bytes (variable)
// x is 6 bytes fixed
// size is variable between 9 and 23
if (l < 9 || l > 23)
invalidValue(CodePoint.CRRTKN);
byte[] part1 = new byte[l - 6];
for (int i = 0; i < part1.length; i++)
part1[i] = database.crrtkn[i];
long time = SignedBinary.getLong(database.crrtkn,
l-8, SignedBinary.BIG_ENDIAN); // as "long" as unique
session.drdaID = reader.convertBytes(part1) +
time + leftBrace + session.connNum + rightBrace;
if (SanityManager.DEBUG)
trace("******************************************drdaID is: " + session.drdaID);
database.setDrdaID(session.drdaID);
break;
//required
case CodePoint.RDBNAM:
String dbname = parseRDBNAM();
if (database != null)
{
if (!database.getDatabaseName().equals(dbname))
rdbnamMismatch(CodePoint.ACCRDB);
}
else
{
//first time we have seen a database name
Database d = session.getDatabase(dbname);
if (d == null)
initializeDatabase(dbname);
else
{
database = d;
database.accessCount++;
}
}
removeFromRequired(CodePoint.RDBNAM);
break;
//required
case CodePoint.PRDID:
appRequester.setClientVersion(reader.readString());
if (SanityManager.DEBUG)
trace("prdId " + appRequester.prdid);
if (appRequester.prdid.length() > CodePoint.PRDID_MAX)
tooBig(CodePoint.PRDID);
if (appRequester.getClientType() != appRequester.DNC_CLIENT) {
invalidClient(appRequester.prdid);
}
// All versions of DNC,the only client supported, handle
// warnings on CNTQRY
sendWarningsOnCNTQRY = true;
// The client can not request DIAGLVL because when run with
// an older server it will cause an exception. Older version
// of the server do not recognize requests for DIAGLVL.
if ((appRequester.getClientType() == appRequester.DNC_CLIENT) &&
appRequester.greaterThanOrEqualTo(10, 2, 0)) {
diagnosticLevel = CodePoint.DIAGLVL1;
}
removeFromRequired(CodePoint.PRDID);
break;
//required
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(true, null, parseTYPDEFNAM());
removeFromRequired(CodePoint.TYPDEFNAM);
break;
//required
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(null);
removeFromRequired(CodePoint.TYPDEFOVR);
break;
//optional
case CodePoint.RDBALWUPD:
checkLength(CodePoint.RDBALWUPD, 1);
database.rdbAllowUpdates = readBoolean(CodePoint.RDBALWUPD);
if (SanityManager.DEBUG)
trace("rdbAllowUpdates = "+database.rdbAllowUpdates);
break;
//optional, ignorable
case CodePoint.PRDDTA:
// check that it fits in maximum but otherwise ignore for now
if (reader.getDdmLength() > CodePoint.MAX_NAME)
tooBig(CodePoint.PRDDTA);
reader.skipBytes();
break;
case CodePoint.TRGDFTRT:
byte b = reader.readByte();
if (b == (byte)0xF1)
database.sendTRGDFTRT = true;
break;
//optional - not used in JCC so skip for now
case CodePoint.STTDECDEL:
case CodePoint.STTSTRDEL:
codePointNotSupported(codePoint);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
checkRequired(CodePoint.ACCRDB);
// check that we can support the double-byte and mixed-byte CCSIDS
// set svrcod to warning if they are not supported
if ((database.ccsidDBC != 0 && !server.supportsCCSID(database.ccsidDBC)) ||
(database.ccsidMBC != 0 && !server.supportsCCSID(database.ccsidMBC)))
svrcod = CodePoint.SVRCOD_WARNING;
return svrcod;
}
/**
* Parse TYPDEFNAM
*
* @return typdefnam
* @exception DRDAProtocolException
*/
private String parseTYPDEFNAM() throws DRDAProtocolException
{
String typDefNam = reader.readString();
if (SanityManager.DEBUG) trace("typeDefName " + typDefNam);
if (typDefNam.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.TYPDEFNAM);
checkValidTypDefNam(typDefNam);
// check if the typedef is one we support
if (!typDefNam.equals(CodePoint.TYPDEFNAM_QTDSQLASC) &&
!typDefNam.equals(CodePoint.TYPDEFNAM_QTDSQLJVM) &&
!typDefNam.equals(CodePoint.TYPDEFNAM_QTDSQLX86))
valueNotSupported(CodePoint.TYPDEFNAM);
return typDefNam;
}
/**
* Set a statement or the database' byte order, depending on the arguments
*
* @param setDatabase if true, set database' byte order, otherwise set statement's
* @param stmt DRDAStatement, used when setDatabase is false
* @param typDefNam TYPDEFNAM value
*/
private void setStmtOrDbByteOrder(boolean setDatabase, DRDAStatement stmt, String typDefNam)
{
int byteOrder = (typDefNam.equals(CodePoint.TYPDEFNAM_QTDSQLX86) ?
SignedBinary.LITTLE_ENDIAN : SignedBinary.BIG_ENDIAN);
if (setDatabase)
{
database.typDefNam = typDefNam;
database.byteOrder = byteOrder;
}
else
{
stmt.typDefNam = typDefNam;
stmt.byteOrder = byteOrder;
}
}
/**
* Write Access to RDB Completed
* Instance Variables
* SVRCOD - severity code - 0 info, 4 warning -required
* PRDID - product specific identifier -required
* TYPDEFNAM - type definition name -required
* TYPDEFOVR - type definition overrides - required
* RDBINTTKN - token which can be used to interrupt DDM commands - optional
* CRRTKN - correlation token - only returned if we didn't get one from requester
* SRVDGN - server diagnostic information - optional
* PKGDFTCST - package default character subtype - optional
* USRID - User ID at the target system - optional
* SRVLST - Server List
*
* @exception DRDAProtocolException
*/
private void writeACCRDBRM(int svrcod) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.ACCRDBRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, svrcod);
writer.writeScalarString(CodePoint.PRDID, server.prdId);
//TYPDEFNAM -required - JCC doesn't support QTDSQLJVM so for now we
// just use ASCII, though we should eventually be able to use QTDSQLJVM
// at level 7
writer.writeScalarString(CodePoint.TYPDEFNAM,
CodePoint.TYPDEFNAM_QTDSQLASC);
writeTYPDEFOVR();
writer.endDdmAndDss ();
// Write the initial piggy-backed data, currently the isolation level
// and the schema name. Only write it if the client supports session
// data caching.
// Sending the session data on connection initialization was introduced
// in Derby 10.7.
if ((appRequester.getClientType() == appRequester.DNC_CLIENT) &&
appRequester.greaterThanOrEqualTo(10, 7, 0)) {
try {
writePBSD();
} catch (SQLException se) {
server.consoleExceptionPrint(se);
errorInChain(se);
}
}
finalizeChain();
}
private void writeTYPDEFOVR() throws DRDAProtocolException
{
//TYPDEFOVR - required - only single byte and mixed byte are specified
writer.startDdm(CodePoint.TYPDEFOVR);
writer.writeScalar2Bytes(CodePoint.CCSIDSBC, server.CCSIDSBC);
writer.writeScalar2Bytes(CodePoint.CCSIDMBC, server.CCSIDMBC);
// PKGDFTCST - Send character subtype and userid if requested
if (database.sendTRGDFTRT)
{
// default to multibyte character
writer.startDdm(CodePoint.PKGDFTCST);
writer.writeShort(CodePoint.CSTMBCS);
writer.endDdm();
// userid
writer.startDdm(CodePoint.USRID);
writer.writeString(database.userId);
writer.endDdm();
}
writer.endDdm();
}
/**
* Parse Type Defintion Overrides
* TYPDEF Overrides specifies the Coded Character SET Identifiers (CCSIDs)
* that are in a named TYPDEF.
* Instance Variables
* CCSIDSBC - CCSID for Single-Byte - optional
* CCSIDDBC - CCSID for Double-Byte - optional
* CCSIDMBC - CCSID for Mixed-byte characters -optional
*
* @param st Statement this TYPDEFOVR applies to
*
* @exception DRDAProtocolException
*/
private void parseTYPDEFOVR(DRDAStatement st) throws DRDAProtocolException
{
int codePoint;
int ccsidSBC = 0;
int ccsidDBC = 0;
int ccsidMBC = 0;
String ccsidSBCEncoding = null;
String ccsidDBCEncoding = null;
String ccsidMBCEncoding = null;
reader.markCollection();
codePoint = reader.getCodePoint();
// at least one of the following instance variable is required
// if the TYPDEFOVR is specified in a command object
if (codePoint == -1 && st != null)
missingCodePoint(CodePoint.CCSIDSBC);
while (codePoint != -1)
{
switch (codePoint)
{
case CodePoint.CCSIDSBC:
checkLength(CodePoint.CCSIDSBC, 2);
ccsidSBC = reader.readNetworkShort();
try {
ccsidSBCEncoding =
CharacterEncodings.getJavaEncoding(ccsidSBC);
} catch (Exception e) {
valueNotSupported(CodePoint.CCSIDSBC);
}
if (SanityManager.DEBUG)
trace("ccsidsbc = " + ccsidSBC + " encoding = " + ccsidSBCEncoding);
break;
case CodePoint.CCSIDDBC:
checkLength(CodePoint.CCSIDDBC, 2);
ccsidDBC = reader.readNetworkShort();
try {
ccsidDBCEncoding =
CharacterEncodings.getJavaEncoding(ccsidDBC);
} catch (Exception e) {
// we write a warning later for this so no error
// unless for a statement
ccsidDBCEncoding = null;
if (st != null)
valueNotSupported(CodePoint.CCSIDSBC);
}
if (SanityManager.DEBUG)
trace("ccsiddbc = " + ccsidDBC + " encoding = " + ccsidDBCEncoding);
break;
case CodePoint.CCSIDMBC:
checkLength(CodePoint.CCSIDMBC, 2);
ccsidMBC = reader.readNetworkShort();
try {
ccsidMBCEncoding =
CharacterEncodings.getJavaEncoding(ccsidMBC);
} catch (Exception e) {
// we write a warning later for this so no error
ccsidMBCEncoding = null;
if (st != null)
valueNotSupported(CodePoint.CCSIDMBC);
}
if (SanityManager.DEBUG)
trace("ccsidmbc = " + ccsidMBC + " encoding = " + ccsidMBCEncoding);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
if (st == null)
{
if (ccsidSBC != 0)
{
database.ccsidSBC = ccsidSBC;
database.ccsidSBCEncoding = ccsidSBCEncoding;
}
if (ccsidDBC != 0)
{
database.ccsidDBC = ccsidDBC;
database.ccsidDBCEncoding = ccsidDBCEncoding;
}
if (ccsidMBC != 0)
{
database.ccsidMBC = ccsidMBC;
database.ccsidMBCEncoding = ccsidMBCEncoding;
}
}
else
{
if (ccsidSBC != 0)
{
st.ccsidSBC = ccsidSBC;
st.ccsidSBCEncoding = ccsidSBCEncoding;
}
if (ccsidDBC != 0)
{
st.ccsidDBC = ccsidDBC;
st.ccsidDBCEncoding = ccsidDBCEncoding;
}
if (ccsidMBC != 0)
{
st.ccsidMBC = ccsidMBC;
st.ccsidMBCEncoding = ccsidMBCEncoding;
}
}
}
/**
* Parse PRPSQLSTT - Prepare SQL Statement
* Instance Variables
* RDBNAM - Relational Database Name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number - required
* RTNSQLDA - Return SQL Descriptor Area - optional
* MONITOR - Monitor events - optional.
*
* @return return 0 - don't return sqlda, 1 - return input sqlda,
* 2 - return output sqlda
* @throws DRDAProtocolException
* @throws SQLException
*/
private int parsePRPSQLSTT() throws DRDAProtocolException,SQLException
{
int codePoint;
boolean rtnsqlda = false;
boolean rtnOutput = true; // Return output SQLDA is default
String typdefnam;
Pkgnamcsn pkgnamcsn = null;
DRDAStatement stmt = null;
Database databaseToSet = null;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.PRPSQLSTT);
databaseToSet = database;
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
//optional
case CodePoint.RTNSQLDA:
// Return SQLDA with description of statement
rtnsqlda = readBoolean(CodePoint.RTNSQLDA);
break;
//optional
case CodePoint.TYPSQLDA:
rtnOutput = parseTYPSQLDA();
break;
//optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
stmt = database.newDRDAStatement(pkgnamcsn);
String sqlStmt = parsePRPSQLSTTobjects(stmt);
if (databaseToSet != null)
stmt.setDatabase(database);
stmt.explicitPrepare(sqlStmt);
// set the statement as the current statement
database.setCurrentStatement(stmt);
if (!rtnsqlda)
return 0;
else if (rtnOutput)
return 2;
else
return 1;
}
/**
* Parse PRPSQLSTT objects
* Objects
* TYPDEFNAM - Data type definition name - optional
* TYPDEFOVR - Type defintion overrides - optional
* SQLSTT - SQL Statement required
* SQLATTR - Cursor attributes on prepare - optional - level 7
*
* If TYPDEFNAM and TYPDEFOVR are supplied, they apply to the objects
* sent with the statement. Once the statement is over, the default values
* sent in the ACCRDB are once again in effect. If no values are supplied,
* the values sent in the ACCRDB are used.
* Objects may follow in one DSS or in several DSS chained together.
*
* @return SQL statement
* @throws DRDAProtocolException
* @throws SQLException
*/
private String parsePRPSQLSTTobjects(DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
String sqlStmt = null;
int codePoint;
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// required
case CodePoint.SQLSTT:
sqlStmt = parseEncodedString();
if (SanityManager.DEBUG)
trace("sqlStmt = " + sqlStmt);
break;
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, stmt, parseTYPDEFNAM());
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(stmt);
break;
// optional
case CodePoint.SQLATTR:
parseSQLATTR(stmt);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
if (sqlStmt == null)
missingCodePoint(CodePoint.SQLSTT);
return sqlStmt;
}
/**
* Parse TYPSQLDA - Type of the SQL Descriptor Area
*
* @return true if for output; false otherwise
* @exception DRDAProtocolException
*/
private boolean parseTYPSQLDA() throws DRDAProtocolException
{
checkLength(CodePoint.TYPSQLDA, 1);
byte sqldaType = reader.readByte();
if (SanityManager.DEBUG)
trace("typSQLDa " + sqldaType);
if (sqldaType == CodePoint.TYPSQLDA_STD_OUTPUT ||
sqldaType == CodePoint.TYPSQLDA_LIGHT_OUTPUT ||
sqldaType == CodePoint.TYPSQLDA_X_OUTPUT)
return true;
else if (sqldaType == CodePoint.TYPSQLDA_STD_INPUT ||
sqldaType == CodePoint.TYPSQLDA_LIGHT_INPUT ||
sqldaType == CodePoint.TYPSQLDA_X_INPUT)
return false;
else
invalidValue(CodePoint.TYPSQLDA);
// shouldn't get here but have to shut up compiler
return false;
}
/**
* Parse SQLATTR - Cursor attributes on prepare
* This is an encoded string. Can have combination of following, eg INSENSITIVE SCROLL WITH HOLD
* Possible strings are
* SENSITIVE DYNAMIC SCROLL [FOR UPDATE]
* SENSITIVE STATIC SCROLL [FOR UPDATE]
* INSENSITIVE SCROLL
* FOR UPDATE
* WITH HOLD
*
* @param stmt DRDAStatement
* @exception DRDAProtocolException
*/
protected void parseSQLATTR(DRDAStatement stmt) throws DRDAProtocolException
{
String attrs = parseEncodedString();
if (SanityManager.DEBUG)
trace("sqlattr = '" + attrs+"'");
//let Derby handle any errors in the types it doesn't support
//just set the attributes
boolean validAttribute = false;
if (attrs.indexOf("INSENSITIVE SCROLL") != -1 || attrs.indexOf("SCROLL INSENSITIVE") != -1) //CLI
{
stmt.scrollType = ResultSet.TYPE_SCROLL_INSENSITIVE;
stmt.concurType = ResultSet.CONCUR_READ_ONLY;
validAttribute = true;
}
if ((attrs.indexOf("SENSITIVE DYNAMIC SCROLL") != -1) || (attrs.indexOf("SENSITIVE STATIC SCROLL") != -1))
{
stmt.scrollType = ResultSet.TYPE_SCROLL_SENSITIVE;
validAttribute = true;
}
if ((attrs.indexOf("FOR UPDATE") != -1))
{
validAttribute = true;
stmt.concurType = ResultSet.CONCUR_UPDATABLE;
}
if (attrs.indexOf("WITH HOLD") != -1)
{
stmt.withHoldCursor = ResultSet.HOLD_CURSORS_OVER_COMMIT;
validAttribute = true;
}
if (!validAttribute)
{
invalidValue(CodePoint.SQLATTR);
}
}
/**
* Parse DSCSQLSTT - Describe SQL Statement previously prepared
* Instance Variables
* TYPSQLDA - sqlda type expected (output or input)
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* MONITOR - Monitor events - optional.
*
* @return expect "output sqlda" or not
* @throws DRDAProtocolException
* @throws SQLException
*/
private boolean parseDSCSQLSTT() throws DRDAProtocolException,SQLException
{
int codePoint;
boolean rtnOutput = true; // default
Pkgnamcsn pkgnamcsn = null;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.TYPSQLDA:
rtnOutput = parseTYPSQLDA();
break;
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.DSCSQLSTT);
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
DRDAStatement stmt = database.getDRDAStatement(pkgnamcsn);
if (stmt == null)
{
invalidValue(CodePoint.PKGNAMCSN);
}
break;
//optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
return rtnOutput;
}
/**
* Parse EXCSQLSTT - Execute non-cursor SQL Statement previously prepared
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* OUTEXP - Output expected
* NBRROW - Number of rows to be inserted if it's an insert
* PRCNAM - procedure name if specified by host variable, not needed for Derby
* QRYBLKSZ - query block size
* MAXRSLCNT - max resultset count
* MAXBLKEXT - Max number of extra blocks
* RSLSETFLG - resultset flag
* RDBCMTOK - RDB Commit Allowed - optional
* OUTOVROPT - output override option
* QRYROWSET - Query Rowset Size - Level 7
* MONITOR - Monitor events - optional.
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void parseEXCSQLSTT() throws DRDAProtocolException,SQLException
{
int codePoint;
String strVal;
reader.markCollection();
codePoint = reader.getCodePoint();
boolean outputExpected = false;
Pkgnamcsn pkgnamcsn = null;
int numRows = 1; // default value
int blkSize = 0;
int maxrslcnt = 0; // default value
int maxblkext = CodePoint.MAXBLKEXT_DEFAULT;
int qryrowset = CodePoint.QRYROWSET_DEFAULT;
int outovropt = CodePoint.OUTOVRFRS;
byte [] rslsetflg = null;
String procName = null;
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.EXCSQLSTT);
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
// optional
case CodePoint.OUTEXP:
outputExpected = readBoolean(CodePoint.OUTEXP);
if (SanityManager.DEBUG)
trace("outexp = "+ outputExpected);
break;
// optional
case CodePoint.NBRROW:
checkLength(CodePoint.NBRROW, 4);
numRows = reader.readNetworkInt();
if (SanityManager.DEBUG)
trace("# of rows: "+numRows);
break;
// optional
case CodePoint.PRCNAM:
procName = reader.readString();
if (SanityManager.DEBUG)
trace("Procedure Name = " + procName);
break;
// optional
case CodePoint.QRYBLKSZ:
blkSize = parseQRYBLKSZ();
break;
// optional
case CodePoint.MAXRSLCNT:
// this is the maximum result set count
// values are 0 - requester is not capabable of receiving result
// sets as reply data in the response to EXCSQLSTT
// -1 - requester is able to receive all result sets
checkLength(CodePoint.MAXRSLCNT, 2);
maxrslcnt = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("max rs count: "+maxrslcnt);
break;
// optional
case CodePoint.MAXBLKEXT:
// number of extra qury blocks of answer set data per result set
// 0 - no extra query blocks
// -1 - can receive entire result set
checkLength(CodePoint.MAXBLKEXT, 2);
maxblkext = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("max extra blocks: "+maxblkext);
break;
// optional
case CodePoint.RSLSETFLG:
//Result set flags
rslsetflg = reader.readBytes();
for (int i=0;i<rslsetflg.length;i++)
if (SanityManager.DEBUG)
trace("rslsetflg: "+rslsetflg[i]);
break;
// optional
case CodePoint.RDBCMTOK:
parseRDBCMTOK();
break;
// optional
case CodePoint.OUTOVROPT:
outovropt = parseOUTOVROPT();
break;
// optional
case CodePoint.QRYROWSET:
//Note minimum for OPNQRY is 0, we'll assume it is the same
//for EXCSQLSTT though the standard doesn't say
qryrowset = parseQRYROWSET(0);
break;
//optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
DRDAStatement stmt;
boolean needPrepareCall = false;
stmt = database.getDRDAStatement(pkgnamcsn);
boolean isProcedure = (procName !=null ||
(stmt != null &&
stmt.wasExplicitlyPrepared() &&
stmt.isCall));
if (isProcedure) // stored procedure call
{
if ( stmt == null || !(stmt.wasExplicitlyPrepared()))
{
stmt = database.newDRDAStatement(pkgnamcsn);
stmt.setQryprctyp(CodePoint.QRYBLKCTL_DEFAULT);
needPrepareCall = true;
}
stmt.procName = procName;
stmt.outputExpected = outputExpected;
}
else
{
// we can't find the statement
if (stmt == null)
{
invalidValue(CodePoint.PKGNAMCSN);
}
stmt.setQryprctyp(CodePoint.QRYBLKCTL_DEFAULT);
}
stmt.nbrrow = numRows;
stmt.qryrowset = qryrowset;
stmt.blksize = blkSize;
stmt.maxblkext = maxblkext;
stmt.maxrslcnt = maxrslcnt;
stmt.outovropt = outovropt;
stmt.rslsetflg = rslsetflg;
if (pendingStatementTimeout >= 0) {
stmt.getPreparedStatement().setQueryTimeout(pendingStatementTimeout);
pendingStatementTimeout = -1;
}
// set the statement as the current statement
database.setCurrentStatement(stmt);
boolean hasResultSet;
if (reader.isChainedWithSameID())
{
hasResultSet = parseEXCSQLSTTobjects(stmt);
} else
{
if (isProcedure && (needPrepareCall))
{
// if we had parameters the callable statement would
// be prepared with parseEXCQLSTTobjects, otherwise we
// have to do it here
String prepareString = "call " + stmt.procName +"()";
if (SanityManager.DEBUG)
trace ("$$$prepareCall is: "+prepareString);
database.getConnection().clearWarnings();
CallableStatement cs = (CallableStatement) stmt.prepare(prepareString);
}
stmt.ps.clearWarnings();
hasResultSet = stmt.execute();
}
ResultSet rs = null;
if (hasResultSet)
{
rs = stmt.getResultSet();
}
// temp until ps.execute() return value fixed
hasResultSet = (rs != null);
int numResults = 0;
if (hasResultSet)
{
numResults = stmt.getNumResultSets();
writeRSLSETRM(stmt);
}
// First of all, we send if there really are output params. Otherwise
// CLI (.Net driver) fails. DRDA spec (page 151,152) says send SQLDTARD
// if server has output param data to send.
boolean sendSQLDTARD = stmt.hasOutputParams() && outputExpected;
if (isProcedure)
{
if (sendSQLDTARD) {
writer.createDssObject();
writer.startDdm(CodePoint.SQLDTARD);
writer.startDdm(CodePoint.FDODSC);
writeQRYDSC(stmt, true);
writer.endDdm();
writer.startDdm(CodePoint.FDODTA);
writeFDODTA(stmt);
writer.endDdm();
writer.endDdmAndDss();
if (stmt.getExtDtaObjects() != null)
{
// writeScalarStream() ends the dss
writeEXTDTA(stmt);
}
}
else if (hasResultSet)
// DRDA spec says that we MUST return either an
// SQLDTARD or an SQLCARD--the former when we have
// output parameters, the latter when we don't.
// If we have a result set, then we have to write
// the SQLCARD _now_, since it is expected before
// we send the result set info below; if we don't
// have a result set and we don't send SQLDTARD,
// then we can wait until we reach the call to
// checkWarning() below, which will write an
// SQLCARD for us.
writeNullSQLCARDobject();
}
//We need to marke that params are finished so that we know we
// are ready to send resultset info.
stmt.finishParams();
PreparedStatement ps = stmt.getPreparedStatement();
int rsNum = 0;
do {
if (hasResultSet)
{
stmt.setCurrentDrdaResultSet(rsNum);
//indicate that we are going to return data
stmt.setQryrtndta(true);
if (! isProcedure)
checkWarning(null, ps, null, -1, true, true);
if (rsNum == 0)
writeSQLRSLRD(stmt);
writeOPNQRYRM(true, stmt);
writeSQLCINRD(stmt);
writeQRYDSC(stmt, false);
stmt.rsSuspend();
/* Currently, if LMTBLKPRC is used, a pre-condition is that no lob columns.
* But in the future, when we do support LOB in LMTBLKPRC, the drda spec still
* does not allow LOB to be sent with OPNQRYRM. So this "if" here will have
* to add "no lob columns".
*/
if (stmt.getQryprctyp() == CodePoint.LMTBLKPRC)
writeQRYDTA(stmt);
}
else if (! sendSQLDTARD)
{
int updateCount = ps.getUpdateCount();
if (false && (database.RDBUPDRM_sent == false) &&
! isProcedure)
{
writeRDBUPDRM();
}
checkWarning(database.getConnection(), stmt.ps, null, updateCount, true, true);
}
} while(hasResultSet && (++rsNum < numResults));
return; // we are done
}
/**
* Parse RDBCMTOK - tells the database whether to allow commits or rollbacks
* to be executed as part of the command
* Since we don't have a SQL commit or rollback command, we will just ignore
* this for now
*
* @exception DRDAProtocolException
*/
private void parseRDBCMTOK() throws DRDAProtocolException
{
boolean rdbcmtok = readBoolean(CodePoint.RDBCMTOK);
if (SanityManager.DEBUG)
trace("rdbcmtok = " + rdbcmtok);
}
/**
* Parse EXCSQLSTT command objects
* Command Objects
* TYPDEFNAM - Data Type Definition Name - optional
* TYPDEFOVR - TYPDEF Overrides -optional
* SQLDTA - optional, variable data, specified if prpared statement has input parameters
* EXTDTA - optional, externalized FD:OCA data
* OUTOVR - output override descriptor, not allowed for stored procedure calls
*
* If TYPDEFNAM and TYPDEFOVR are supplied, they apply to the objects
* sent with the statement. Once the statement is over, the default values
* sent in the ACCRDB are once again in effect. If no values are supplied,
* the values sent in the ACCRDB are used.
* Objects may follow in one DSS or in several DSS chained together.
*
* @param stmt the DRDAStatement to execute
* @throws DRDAProtocolException
* @throws SQLException
*/
private boolean parseEXCSQLSTTobjects(DRDAStatement stmt) throws DRDAProtocolException, SQLException
{
int codePoint;
boolean gotSQLDTA = false, gotEXTDTA = false;
boolean result = false;
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( true );
switch(codePoint)
{
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, stmt, parseTYPDEFNAM());
stmt.setTypDefValues();
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(stmt);
stmt.setTypDefValues();
break;
// required
case CodePoint.SQLDTA:
parseSQLDTA(stmt);
gotSQLDTA = true;
break;
// optional
case CodePoint.EXTDTA:
readAndSetAllExtParams(stmt, true);
stmt.ps.clearWarnings();
result = stmt.execute();
gotEXTDTA = true;
break;
// optional
case CodePoint.OUTOVR:
parseOUTOVR(stmt);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
// SQLDTA is required
if (! gotSQLDTA)
missingCodePoint(CodePoint.SQLDTA);
if (! gotEXTDTA) {
stmt.ps.clearWarnings();
result = stmt.execute();
}
return result;
}
/**
* Write SQLCINRD - result set column information
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLCINRD(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
ResultSet rs = stmt.getResultSet();
writer.createDssObject();
writer.startDdm(CodePoint.SQLCINRD);
if (sqlamLevel >= MGRLVL_7)
writeSQLDHROW(((EngineResultSet) rs).getHoldability());
ResultSetMetaData rsmeta = rs.getMetaData();
int ncols = rsmeta.getColumnCount();
writer.writeShort(ncols); // num of columns
if (sqlamLevel >= MGRLVL_7)
{
for (int i = 0; i < ncols; i++)
writeSQLDAGRP (rsmeta, null, i, true);
}
else
{
for (int i = 0; i < ncols; i++)
{
writeVCMorVCS(rsmeta.getColumnName(i+1));
writeVCMorVCS(rsmeta.getColumnLabel(i+1));
writeVCMorVCS(null);
}
}
writer.endDdmAndDss();
}
/**
* Write SQLRSLRD - result set reply data
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLRSLRD(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
int numResults = stmt.getNumResultSets();
writer.createDssObject();
writer.startDdm(CodePoint.SQLRSLRD);
writer.writeShort(numResults); // num of result sets
for (int i = 0; i < numResults; i ++)
{
writer.writeInt(i); // rsLocator
writeVCMorVCS(stmt.getResultSetCursorName(i));
writer.writeInt(1); // num of rows XXX resolve, it doesn't matter for now
}
writer.endDdmAndDss();
}
/**
* Write RSLSETRM
* Instance variables
* SVRCOD - Severity code - Information only - required
* PKGSNLST - list of PKGNAMCSN -required
* SRVDGN - Server Diagnostic Information -optional
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeRSLSETRM(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
int numResults = stmt.getNumResultSets();
writer.createDssReply();
writer.startDdm(CodePoint.RSLSETRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, 0);
writer.startDdm(CodePoint.PKGSNLST);
for (int i = 0; i < numResults; i++)
writePKGNAMCSN(stmt.getResultSetPkgcnstkn(i).getBytes());
writer.endDdm();
writer.endDdmAndDss();
}
/**
* Parse SQLDTA - SQL program variable data
* and handle exception.
* @see #parseSQLDTA_work
*/
private void parseSQLDTA(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
try {
parseSQLDTA_work(stmt);
}
catch (SQLException se)
{
skipRemainder(true);
throw se;
}
}
/**
* Parse SQLDTA - SQL program variable data
* Instance Variables
* FDODSC - FD:OCA data descriptor - required
* FDODTA - FD:OCA data - optional
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void parseSQLDTA_work(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
String strVal;
PreparedStatement ps = stmt.getPreparedStatement();
int codePoint;
ParameterMetaData pmeta = null;
// Clear params without releasing storage
stmt.clearDrdaParams();
int numVars = 0;
boolean rtnParam = false;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// required
case CodePoint.FDODSC:
while (reader.getDdmLength() > 6) //we get parameter info til last 6 byte
{
int dtaGrpLen = reader.readUnsignedByte();
int numVarsInGrp = (dtaGrpLen - 3) / 3;
if (SanityManager.DEBUG)
trace("num of vars in this group is: "+numVarsInGrp);
reader.readByte(); // tripletType
reader.readByte(); // id
for (int j = 0; j < numVarsInGrp; j++)
{
final byte t = reader.readByte();
if (SanityManager.DEBUG)
trace("drdaType is: "+ "0x" +
Integer.toHexString(t));
int drdaLength = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("drdaLength is: "+drdaLength);
stmt.addDrdaParam(t, drdaLength);
}
}
numVars = stmt.getDrdaParamCount();
if (SanityManager.DEBUG)
trace("numVars = " + numVars);
if (ps == null) // it is a CallableStatement under construction
{
StringBuffer marks = new StringBuffer(); // construct parameter marks
marks.append("(?");
for (int i = 1; i < numVars; i++)
marks.append(", ?");
String prepareString = "call " + stmt.procName + marks.toString() + ")";
if (SanityManager.DEBUG)
trace ("$$ prepareCall is: "+prepareString);
CallableStatement cs = null;
try {
cs = (CallableStatement)
stmt.prepare(prepareString);
stmt.registerAllOutParams();
} catch (SQLException se) {
if (! stmt.outputExpected ||
(!se.getSQLState().equals(SQLState.LANG_NO_METHOD_FOUND)))
throw se;
if (SanityManager.DEBUG)
trace("****** second try with return parameter...");
// Save first SQLException most likely suspect
if (numVars == 1)
prepareString = "? = call " + stmt.procName +"()";
else
prepareString = "? = call " + stmt.procName +"("+marks.substring(3) + ")";
if (SanityManager.DEBUG)
trace ("$$ prepareCall is: "+prepareString);
try {
cs = (CallableStatement) stmt.prepare(prepareString);
} catch (SQLException se2)
{
// The first exception is the most likely suspect
throw se;
}
rtnParam = true;
}
ps = cs;
stmt.ps = ps;
}
pmeta = stmt.getParameterMetaData();
reader.readBytes(6); // descriptor footer
break;
// optional
case CodePoint.FDODTA:
reader.readByte(); // row indicator
for (int i = 0; i < numVars; i++)
{
if ((stmt.getParamDRDAType(i+1) & 0x1) == 0x1) // nullable
{
int nullData = reader.readUnsignedByte();
if ((nullData & 0xFF) == FdocaConstants.NULL_DATA)
{
if (SanityManager.DEBUG)
trace("******param null");
if (pmeta.getParameterMode(i + 1)
!= JDBC30Translation.PARAMETER_MODE_OUT )
ps.setNull(i+1, pmeta.getParameterType(i+1));
if (stmt.isOutputParam(i+1))
stmt.registerOutParam(i+1);
continue;
}
}
// not null, read and set it
readAndSetParams(i, stmt, pmeta);
}
break;
case CodePoint.EXTDTA:
readAndSetAllExtParams(stmt, false);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
}
private int getByteOrder()
{
DRDAStatement stmt = database.getCurrentStatement();
return ((stmt != null && stmt.typDefNam != null) ? stmt.byteOrder : database.byteOrder);
}
/** A cached {@code Calendar} instance using the GMT time zone. */
private Calendar gmtCalendar;
/**
* Get a {@code Calendar} instance with time zone set to GMT. The instance
* is cached for reuse by this thread. This calendar can be used to
* consistently read and write date and time values using the same
* calendar. Since the local default calendar may not be able to represent
* all times (for instance because the time would fall into a non-existing
* hour of the day when switching to daylight saving time, see DERBY-4582),
* we use the GMT time zone which doesn't observe daylight saving time.
*
* @return a calendar in the GMT time zone
*/
private Calendar getGMTCalendar() {
if (gmtCalendar == null) {
TimeZone gmt = TimeZone.getTimeZone("GMT");
gmtCalendar = Calendar.getInstance(gmt);
}
return gmtCalendar;
}
/**
* Read different types of input parameters and set them in
* PreparedStatement
* @param i index of the parameter
* @param stmt drda statement
* @param pmeta parameter meta data
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void readAndSetParams(int i,
DRDAStatement stmt,
ParameterMetaData pmeta)
throws DRDAProtocolException, SQLException
{
PreparedStatement ps = stmt.getPreparedStatement();
// mask out null indicator
final int drdaType = ((stmt.getParamDRDAType(i+1) | 0x01) & 0xff);
final int paramLenNumBytes = stmt.getParamLen(i+1);
if (ps instanceof CallableStatement)
{
if (stmt.isOutputParam(i+1))
{
CallableStatement cs = (CallableStatement) ps;
cs.registerOutParameter(i+1, stmt.getOutputParamType(i+1));
}
}
switch (drdaType)
{
case DRDAConstants.DRDA_TYPE_NBOOLEAN:
{
boolean paramVal = (reader.readByte() == 1);
if (SanityManager.DEBUG)
trace("boolean parameter value is: " + paramVal);
ps.setBoolean(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NSMALL:
{
short paramVal = (short) reader.readShort(getByteOrder());
if (SanityManager.DEBUG)
trace("short parameter value is: "+paramVal);
ps.setShort(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NINTEGER:
{
int paramVal = reader.readInt(getByteOrder());
if (SanityManager.DEBUG)
trace("integer parameter value is: "+paramVal);
ps.setInt(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NINTEGER8:
{
long paramVal = reader.readLong(getByteOrder());
if (SanityManager.DEBUG)
trace("parameter value is: "+paramVal);
ps.setLong(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NFLOAT4:
{
float paramVal = reader.readFloat(getByteOrder());
if (SanityManager.DEBUG)
trace("parameter value is: "+paramVal);
ps.setFloat(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NFLOAT8:
{
double paramVal = reader.readDouble(getByteOrder());
if (SanityManager.DEBUG)
trace("nfloat8 parameter value is: "+paramVal);
ps.setDouble(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NDECIMAL:
{
int precision = (paramLenNumBytes >> 8) & 0xff;
int scale = paramLenNumBytes & 0xff;
BigDecimal paramVal = reader.readBigDecimal(precision, scale);
if (SanityManager.DEBUG)
trace("ndecimal parameter value is: "+paramVal);
ps.setBigDecimal(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NDATE:
{
String paramVal = reader.readStringData(10).trim(); //parameter may be char value
if (SanityManager.DEBUG)
trace("ndate parameter value is: \""+paramVal+"\"");
try {
Calendar cal = getGMTCalendar();
ps.setDate(i+1, parseDate(paramVal, cal), cal);
} catch (java.lang.IllegalArgumentException e) {
// Just use SQLSTATE as message since, if user wants to
// retrieve it, the message will be looked up by the
// sqlcamessage() proc, which will get the localized
// message based on SQLSTATE, and will ignore the
// the message we use here...
throw new SQLException(SQLState.LANG_DATE_SYNTAX_EXCEPTION,
SQLState.LANG_DATE_SYNTAX_EXCEPTION.substring(0,5));
}
break;
}
case DRDAConstants.DRDA_TYPE_NTIME:
{
String paramVal = reader.readStringData(8).trim(); //parameter may be char value
if (SanityManager.DEBUG)
trace("ntime parameter value is: "+paramVal);
try {
Calendar cal = getGMTCalendar();
ps.setTime(i+1, parseTime(paramVal, cal), cal);
} catch (java.lang.IllegalArgumentException e) {
throw new SQLException(SQLState.LANG_DATE_SYNTAX_EXCEPTION,
SQLState.LANG_DATE_SYNTAX_EXCEPTION.substring(0,5));
}
break;
}
case DRDAConstants.DRDA_TYPE_NTIMESTAMP:
{
// JCC represents ts in a slightly different format than Java standard, so
// we do the conversion to Java standard here.
int timestampLength = appRequester.getTimestampLength();
String paramVal = reader.readStringData( timestampLength ).trim(); //parameter may be char value
if (SanityManager.DEBUG)
trace("ntimestamp parameter value is: "+paramVal);
try {
Calendar cal = getGMTCalendar();
ps.setTimestamp(i+1, parseTimestamp(paramVal, cal), cal);
} catch (java.lang.IllegalArgumentException e1) {
// thrown by parseTimestamp(...) for bad syntax...
throw new SQLException(SQLState.LANG_DATE_SYNTAX_EXCEPTION,
SQLState.LANG_DATE_SYNTAX_EXCEPTION.substring(0,5));
}
break;
}
case DRDAConstants.DRDA_TYPE_NCHAR:
case DRDAConstants.DRDA_TYPE_NVARCHAR:
case DRDAConstants.DRDA_TYPE_NLONG:
case DRDAConstants.DRDA_TYPE_NVARMIX:
case DRDAConstants.DRDA_TYPE_NLONGMIX:
{
String paramVal = reader.readLDStringData(stmt.ccsidMBCEncoding);
if (SanityManager.DEBUG)
trace("char/varchar parameter value is: "+paramVal);
ps.setString(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NFIXBYTE:
{
byte[] paramVal = reader.readBytes();
if (SanityManager.DEBUG)
trace("fix bytes parameter value is: "+ convertToHexString(paramVal));
ps.setBytes(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NVARBYTE:
case DRDAConstants.DRDA_TYPE_NLONGVARBYTE:
{
int length = reader.readNetworkShort(); //protocol control data always follows big endian
if (SanityManager.DEBUG)
trace("===== binary param length is: " + length);
byte[] paramVal = reader.readBytes(length);
ps.setBytes(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NUDT:
{
Object paramVal = readUDT();
ps.setObject(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
case DRDAConstants.DRDA_TYPE_NLOBCSBCS:
case DRDAConstants.DRDA_TYPE_NLOBCDBCS:
{
long length = readLobLength(paramLenNumBytes);
if (length != 0) //can be -1 for CLI if "data at exec" mode, see clifp/exec test
{
stmt.addExtPosition(i);
}
else /* empty */
{
if (drdaType == DRDAConstants.DRDA_TYPE_NLOBBYTES)
ps.setBytes(i+1, new byte[0]);
else
ps.setString(i+1, "");
}
break;
}
case DRDAConstants.DRDA_TYPE_NLOBLOC:
{
//read the locator value
int paramVal = reader.readInt(getByteOrder());
if (SanityManager.DEBUG)
trace("locator value is: "+paramVal);
//Map the locator value to the Blob object in the
//Hash map.
java.sql.Blob blobFromLocator = (java.sql.Blob)
database.getConnection().getLOBMapping(paramVal);
//set the PreparedStatement parameter to the mapped
//Blob object.
ps.setBlob(i+1, blobFromLocator);
break;
}
case DRDAConstants.DRDA_TYPE_NCLOBLOC:
{
//read the locator value.
int paramVal = reader.readInt(getByteOrder());
if (SanityManager.DEBUG)
trace("locator value is: "+paramVal);
//Map the locator value to the Clob object in the
//Hash Map.
java.sql.Clob clobFromLocator = (java.sql.Clob)
database.getConnection().getLOBMapping(paramVal);
//set the PreparedStatement parameter to the mapped
//Clob object.
ps.setClob(i+1, clobFromLocator);
break;
}
case DRDAConstants.DRDA_TYPE_NROWID:
{
byte[] b = reader.readBytes();
SQLRowId paramVal = new SQLRowId(b);
ps.setRowId(i + 1, paramVal);
break;
}
default:
{
String paramVal = reader.readLDStringData(stmt.ccsidMBCEncoding);
if (SanityManager.DEBUG)
trace("default type parameter value is: "+paramVal);
ps.setObject(i+1, paramVal);
}
}
}
/** Read a UDT from the stream */
private Object readUDT() throws DRDAProtocolException
{
int length = reader.readNetworkShort(); //protocol control data always follows big endian
if (SanityManager.DEBUG) { trace("===== udt param length is: " + length); }
byte[] bytes = reader.readBytes(length);
try {
ByteArrayInputStream bais = new ByteArrayInputStream( bytes );
ObjectInputStream ois = new ObjectInputStream( bais );
return ois.readObject();
}
catch (Exception e)
{
markCommunicationsFailure
( e,"DRDAConnThread.readUDT()", "", e.getMessage(), "*" );
return null;
}
}
private long readLobLength(int extLenIndicator)
throws DRDAProtocolException
{
switch (extLenIndicator)
{
case 0x8002:
return (long) reader.readNetworkShort();
case 0x8004:
return (long) reader.readNetworkInt();
case 0x8006:
return (long) reader.readNetworkSixByteLong();
case 0x8008:
return (long) reader.readNetworkLong();
default:
throwSyntaxrm(CodePoint.SYNERRCD_INCORRECT_EXTENDED_LEN, extLenIndicator);
return 0L;
}
}
/**
* Parse a date string as it is received from the client.
*
* @param dateString the date string to parse
* @param cal the calendar in which the date is parsed
* @return a Date object representing the date in the specified calendar
* @see com.splicemachine.db.client.am.DateTime#dateToDateBytes
* @throws IllegalArgumentException if the date is not correctly formatted
*/
private java.sql.Date parseDate(String dateString, Calendar cal) {
// Get each component out of YYYY-MM-DD
String[] components = dateString.split("-");
if (components.length != 3) {
throw new IllegalArgumentException();
}
cal.clear();
// Set date components
cal.set(Calendar.YEAR, Integer.parseInt(components[0]));
cal.set(Calendar.MONTH, Integer.parseInt(components[1]) - 1);
cal.set(Calendar.DAY_OF_MONTH, Integer.parseInt(components[2]));
// Normalize time components as specified by java.sql.Date
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return new java.sql.Date(cal.getTimeInMillis());
}
/**
* Parse a time string as it is received from the client.
*
* @param timeString the time string to parse
* @param cal the calendar in which the time is parsed
* @return a Date object representing the time in the specified calendar
* @see com.splicemachine.db.client.am.DateTime#timeToTimeBytes
* @throws IllegalArgumentException if the time is not correctly formatted
*/
private Time parseTime(String timeString, Calendar cal) {
// Get each component out of HH:MM:SS
String[] components = timeString.split(":");
if (components.length != 3) {
throw new IllegalArgumentException();
}
cal.clear();
// Normalize date components as specified by java.sql.Time
cal.set(Calendar.YEAR, 1970);
cal.set(Calendar.MONTH, Calendar.JANUARY);
cal.set(Calendar.DAY_OF_MONTH, 1);
// Set time components
cal.set(Calendar.HOUR_OF_DAY, Integer.parseInt(components[0]));
cal.set(Calendar.MINUTE, Integer.parseInt(components[1]));
cal.set(Calendar.SECOND, Integer.parseInt(components[2]));
// No millisecond resolution for Time
cal.set(Calendar.MILLISECOND, 0);
return new Time(cal.getTimeInMillis());
}
/**
* Parse a timestamp string as it is received from the client.
*
* @param timeString the time string to parse
* @param cal the calendar in which the timestamp is parsed
* @return a Date object representing the timestamp in the specified
* calendar
* @see com.splicemachine.db.client.am.DateTime#timestampToTimestampBytes
* @throws IllegalArgumentException if the timestamp is not correctly
* formatted
*/
private Timestamp parseTimestamp(String timeString, Calendar cal) {
// Get each component out of YYYY-MM-DD-HH.MM.SS.fffffffff
String[] components = timeString.split("[-.]");
if (components.length != 7) {
throw new IllegalArgumentException();
}
cal.clear();
cal.set(Calendar.YEAR, Integer.parseInt(components[0]));
cal.set(Calendar.MONTH, Integer.parseInt(components[1]) - 1);
cal.set(Calendar.DAY_OF_MONTH, Integer.parseInt(components[2]));
cal.set(Calendar.HOUR_OF_DAY, Integer.parseInt(components[3]));
cal.set(Calendar.MINUTE, Integer.parseInt(components[4]));
cal.set(Calendar.SECOND, Integer.parseInt(components[5]));
int nanos = 0;
final int radix = 10;
String nanoString = components[6];
// Get up to nine digits from the nano second component
for (int i = 0; i < 9; i++) {
// Scale up the intermediate result
nanos *= radix;
// Add the next digit, if there is one. Continue the loop even if
// there are no more digits, since we still need to scale up the
// intermediate result as if the fraction part were padded with
// zeros.
if (i < nanoString.length()) {
int digit = Character.digit(nanoString.charAt(i), radix);
if (digit == -1) {
// not a digit
throw new IllegalArgumentException();
}
nanos += digit;
}
}
Timestamp ts = new Timestamp(cal.getTimeInMillis());
ts.setNanos(nanos);
return ts;
}
private void readAndSetAllExtParams(final DRDAStatement stmt, final boolean streamLOB)
throws SQLException, DRDAProtocolException
{
final int numExt = stmt.getExtPositionCount();
for (int i = 0; i < numExt; i++)
{
int paramPos = stmt.getExtPosition(i);
// Only the last EXTDTA is streamed. This is because all of
// the parameters have to be set before execution and are
// consecutive in the network server stream, so only the last
// one can be streamed.
final boolean doStreamLOB = (streamLOB && i == numExt -1);
readAndSetExtParam(paramPos,
stmt,
stmt.getParamDRDAType(paramPos+1),
stmt.getParamLen(paramPos+1),
doStreamLOB);
// Each extdta in it's own dss
if (i < numExt -1)
{
correlationID = reader.readDssHeader();
int codePoint = reader.readLengthAndCodePoint( true );
}
}
}
/**
* Read different types of input parameters and set them in PreparedStatement
* @param i zero-based index of the parameter
* @param stmt associated ps
* @param drdaType drda type of the parameter
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void readAndSetExtParam( int i, DRDAStatement stmt,
int drdaType, int extLen, boolean streamLOB)
throws DRDAProtocolException, SQLException
{
// Note the switch from zero-based to one-based index below.
drdaType = (drdaType & 0x000000ff); // need unsigned value
boolean checkNullability = false;
if (sqlamLevel >= MGRLVL_7 &&
FdocaConstants.isNullable(drdaType))
checkNullability = true;
final EXTDTAReaderInputStream stream =
reader.getEXTDTAReaderInputStream(checkNullability);
// Determine encoding first, mostly for debug/tracing purposes
String encoding = "na";
switch (drdaType) {
case DRDAConstants.DRDA_TYPE_LOBCSBCS:
case DRDAConstants.DRDA_TYPE_NLOBCSBCS:
encoding = stmt.ccsidSBCEncoding;
break;
case DRDAConstants.DRDA_TYPE_LOBCDBCS:
case DRDAConstants.DRDA_TYPE_NLOBCDBCS:
encoding = stmt.ccsidDBCEncoding;
break;
case DRDAConstants.DRDA_TYPE_LOBCMIXED:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
encoding = stmt.ccsidMBCEncoding;
break;
}
traceEXTDTARead(drdaType, i+1, stream, streamLOB, encoding);
try {
switch (drdaType)
{
case DRDAConstants.DRDA_TYPE_LOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
setAsBinaryStream(stmt, i+1, stream, streamLOB);
break;
case DRDAConstants.DRDA_TYPE_LOBCSBCS:
case DRDAConstants.DRDA_TYPE_NLOBCSBCS:
case DRDAConstants.DRDA_TYPE_LOBCDBCS:
case DRDAConstants.DRDA_TYPE_NLOBCDBCS:
case DRDAConstants.DRDA_TYPE_LOBCMIXED:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
setAsCharacterStream(stmt, i+1, stream, streamLOB,
encoding);
break;
default:
invalidValue(drdaType);
}
}
catch (java.io.UnsupportedEncodingException e) {
throw new SQLException (e.getMessage());
} catch( IOException e ){
throw new SQLException ( e.getMessage() );
}
}
/**
* Parse EXCSQLIMM - Execute Immediate Statement
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* RDBCMTOK - RDB Commit Allowed - optional
* MONITOR - Monitor Events - optional
*
* Command Objects
* TYPDEFNAM - Data Type Definition Name - optional
* TYPDEFOVR - TYPDEF Overrides -optional
* SQLSTT - SQL Statement -required
*
* @return update count
* @throws DRDAProtocolException
* @throws SQLException
*/
private int parseEXCSQLIMM() throws DRDAProtocolException,SQLException
{
int codePoint;
reader.markCollection();
Pkgnamcsn pkgnamcsn = null;
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.EXCSQLIMM);
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
case CodePoint.RDBCMTOK:
parseRDBCMTOK();
break;
//optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
DRDAStatement drdaStmt = database.getDefaultStatement(pkgnamcsn);
// initialize statement for reuse
drdaStmt.initialize();
String sqlStmt = parseEXECSQLIMMobjects();
Statement statement = drdaStmt.getStatement();
statement.clearWarnings();
if (pendingStatementTimeout >= 0) {
statement.setQueryTimeout(pendingStatementTimeout);
pendingStatementTimeout = -1;
}
int updCount = statement.executeUpdate(sqlStmt);
return updCount;
}
/**
* Parse EXCSQLSET - Execute Set SQL Environment
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCT - RDB Package Name, Consistency Token - optional
* MONITOR - Monitor Events - optional
*
* Command Objects
* TYPDEFNAM - Data Type Definition Name - required
* TYPDEFOVR - TYPDEF Overrides - required
* SQLSTT - SQL Statement - required (at least one; may be more)
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private boolean parseEXCSQLSET() throws DRDAProtocolException,SQLException
{
int codePoint;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.EXCSQLSET);
break;
// optional
case CodePoint.PKGNAMCT:
// we are going to ignore this for EXCSQLSET
// since we are just going to reuse an existing statement
String pkgnamct = parsePKGNAMCT();
break;
// optional
case CodePoint.MONITOR:
parseMONITOR();
break;
// required
case CodePoint.PKGNAMCSN:
// we are going to ignore this for EXCSQLSET.
// since we are just going to reuse an existing statement.
// NOTE: This codepoint is not in the DDM spec for 'EXCSQLSET',
// but since it DOES get sent by jcc1.2, we have to have
// a case for it...
Pkgnamcsn pkgnamcsn = parsePKGNAMCSN();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
parseEXCSQLSETobjects();
return true;
}
/**
* Parse EXCSQLIMM objects
* Objects
* TYPDEFNAM - Data type definition name - optional
* TYPDEFOVR - Type defintion overrides
* SQLSTT - SQL Statement required
*
* If TYPDEFNAM and TYPDEFOVR are supplied, they apply to the objects
* sent with the statement. Once the statement is over, the default values
* sent in the ACCRDB are once again in effect. If no values are supplied,
* the values sent in the ACCRDB are used.
* Objects may follow in one DSS or in several DSS chained together.
*
* @return SQL Statement
* @throws DRDAProtocolException
* @throws SQLException
*/
private String parseEXECSQLIMMobjects() throws DRDAProtocolException, SQLException
{
String sqlStmt = null;
int codePoint;
DRDAStatement stmt = database.getDefaultStatement();
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, stmt, parseTYPDEFNAM());
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(stmt);
break;
// required
case CodePoint.SQLSTT:
sqlStmt = parseEncodedString();
if (SanityManager.DEBUG)
trace("sqlStmt = " + sqlStmt);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
// SQLSTT is required
if (sqlStmt == null)
missingCodePoint(CodePoint.SQLSTT);
return sqlStmt;
}
/**
* Parse EXCSQLSET objects
* Objects
* TYPDEFNAM - Data type definition name - optional
* TYPDEFOVR - Type defintion overrides - optional
* SQLSTT - SQL Statement - required (a list of at least one)
*
* Objects may follow in one DSS or in several DSS chained together.
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void parseEXCSQLSETobjects()
throws DRDAProtocolException, SQLException
{
boolean gotSqlStt = false;
boolean hadUnrecognizedStmt = false;
String sqlStmt = null;
int codePoint;
DRDAStatement drdaStmt = database.getDefaultStatement();
drdaStmt.initialize();
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, drdaStmt, parseTYPDEFNAM());
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(drdaStmt);
break;
// required
case CodePoint.SQLSTT:
sqlStmt = parseEncodedString();
if (sqlStmt != null)
// then we have at least one SQL Statement.
gotSqlStt = true;
if (sqlStmt.startsWith(TIMEOUT_STATEMENT)) {
String timeoutString = sqlStmt.substring(TIMEOUT_STATEMENT.length());
pendingStatementTimeout = Integer.parseInt(timeoutString);
break;
}
if (canIgnoreStmt(sqlStmt)) {
// We _know_ Derby doesn't recognize this
// statement; don't bother trying to execute it.
// NOTE: at time of writing, this only applies
// to "SET CLIENT" commands, and it was decided
// that throwing a Warning for these commands
// would confuse people, so even though the DDM
// spec says to do so, we choose not to (but
// only for SET CLIENT cases). If this changes
// at some point in the future, simply remove
// the follwing line; we will then throw a
// warning.
// hadUnrecognizedStmt = true;
break;
}
if (SanityManager.DEBUG)
trace("sqlStmt = " + sqlStmt);
// initialize statement for reuse
drdaStmt.initialize();
drdaStmt.getStatement().clearWarnings();
try {
drdaStmt.getStatement().executeUpdate(sqlStmt);
} catch (SQLException e) {
// if this is a syntax error, then we take it
// to mean that the given SET statement is not
// recognized; take note (so we can throw a
// warning later), but don't interfere otherwise.
if (e.getSQLState().equals(SYNTAX_ERR))
hadUnrecognizedStmt = true;
else
// something else; assume it's serious.
throw e;
}
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
// SQLSTT is required.
if (!gotSqlStt)
missingCodePoint(CodePoint.SQLSTT);
// Now that we've processed all SET statements (assuming no
// severe exceptions), check for warnings and, if we had any,
// note this in the SQLCARD reply object (but DON'T cause the
// EXCSQLSET statement to fail).
if (hadUnrecognizedStmt) {
SQLWarning warn = new SQLWarning("One or more SET statements " +
"not recognized.", "01000");
throw warn;
} // end if.
return;
}
private boolean canIgnoreStmt(String stmt)
{
if (stmt.indexOf("SET CLIENT") != -1)
return true;
return false;
}
/**
* Write RDBUPDRM
* Instance variables
* SVRCOD - Severity code - Information only - required
* RDBNAM - Relational database name -required
* SRVDGN - Server Diagnostic Information -optional
*
* @exception DRDAProtocolException
*/
private void writeRDBUPDRM() throws DRDAProtocolException
{
database.RDBUPDRM_sent = true;
writer.createDssReply();
writer.startDdm(CodePoint.RDBUPDRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_INFO);
writeRDBNAM(database.getDatabaseName());
writer.endDdmAndDss();
}
private String parsePKGNAMCT() throws DRDAProtocolException
{
reader.skipBytes();
return null;
}
/**
* Parse PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number
* Instance Variables
* NAMESYMDR - database name - not validated
* RDBCOLID - RDB Collection Identifier
* PKGID - RDB Package Identifier
* PKGCNSTKN - RDB Package Consistency Token
* PKGSN - RDB Package Section Number
*
* @return <code>Pkgnamcsn</code> value
* @throws DRDAProtocolException
*/
private Pkgnamcsn parsePKGNAMCSN() throws DRDAProtocolException
{
if (reader.getDdmLength() == CodePoint.PKGNAMCSN_LEN)
{
// This is a scalar object with the following fields
reader.readString(rdbnam, CodePoint.RDBNAM_LEN, true);
if (SanityManager.DEBUG)
trace("rdbnam = " + rdbnam);
// A check that the rdbnam field corresponds to a database
// specified in a ACCRDB term.
// The check is not performed if the client is DNC_CLIENT
// with version before 10.3.0 because these clients
// are broken and send incorrect database name
// if multiple connections to different databases
// are created
// This check was added because of DERBY-1434
// check the client version first
if (appRequester.greaterThanOrEqualTo(10,3,0) ) {
// check the database name
if (!rdbnam.toString().equals(database.getDatabaseName()))
rdbnamMismatch(CodePoint.PKGNAMCSN);
}
reader.readString(rdbcolid, CodePoint.RDBCOLID_LEN, true);
if (SanityManager.DEBUG)
trace("rdbcolid = " + rdbcolid);
reader.readString(pkgid, CodePoint.PKGID_LEN, true);
if (SanityManager.DEBUG)
trace("pkgid = " + pkgid);
// we need to use the same UCS2 encoding, as this can be
// bounced back to jcc (or keep the byte array)
reader.readString(pkgcnstkn, CodePoint.PKGCNSTKN_LEN, false);
if (SanityManager.DEBUG)
trace("pkgcnstkn = " + pkgcnstkn);
pkgsn = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("pkgsn = " + pkgsn);
}
else // extended format
{
int length = reader.readNetworkShort();
if (length < CodePoint.RDBNAM_LEN || length > CodePoint.MAX_NAME)
badObjectLength(CodePoint.RDBNAM);
reader.readString(rdbnam, length, true);
if (SanityManager.DEBUG)
trace("rdbnam = " + rdbnam);
// A check that the rdbnam field corresponds to a database
// specified in a ACCRDB term.
// The check is not performed if the client is DNC_CLIENT
// with version before 10.3.0 because these clients
// are broken and send incorrect database name
// if multiple connections to different databases
// are created
// This check was added because of DERBY-1434
// check the client version first
if ( appRequester.getClientType() != AppRequester.DNC_CLIENT
|| appRequester.greaterThanOrEqualTo(10,3,0) ) {
// check the database name
if (!rdbnam.toString().equals(database.getDatabaseName()))
rdbnamMismatch(CodePoint.PKGNAMCSN);
}
//RDBCOLID can be variable length in this format
length = reader.readNetworkShort();
reader.readString(rdbcolid, length, true);
if (SanityManager.DEBUG)
trace("rdbcolid = " + rdbcolid);
length = reader.readNetworkShort();
if (length != CodePoint.PKGID_LEN)
badObjectLength(CodePoint.PKGID);
reader.readString(pkgid, CodePoint.PKGID_LEN, true);
if (SanityManager.DEBUG)
trace("pkgid = " + pkgid);
reader.readString(pkgcnstkn, CodePoint.PKGCNSTKN_LEN, false);
if (SanityManager.DEBUG)
trace("pkgcnstkn = " + pkgcnstkn);
pkgsn = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("pkgsn = " + pkgsn);
}
// In most cases, the pkgnamcsn object is equal to the
// previously returned object. To avoid allocation of a new
// object in these cases, we first check to see if the old
// object can be reused.
if ((prevPkgnamcsn == null) ||
rdbnam.wasModified() ||
rdbcolid.wasModified() ||
pkgid.wasModified() ||
pkgcnstkn.wasModified() ||
(prevPkgnamcsn.getPkgsn() != pkgsn))
{
// The byte array returned by pkgcnstkn.getBytes() might
// be modified by DDMReader.readString() later, so we have
// to create a copy of the array.
byte[] token = new byte[pkgcnstkn.length()];
System.arraycopy(pkgcnstkn.getBytes(), 0, token, 0, token.length);
prevPkgnamcsn =
new Pkgnamcsn(rdbnam.toString(), rdbcolid.toString(),
pkgid.toString(), pkgsn,
new ConsistencyToken(token));
}
return prevPkgnamcsn;
}
/**
* Parse SQLSTT Dss
* @exception DRDAProtocolException
*/
private String parseSQLSTTDss() throws DRDAProtocolException
{
correlationID = reader.readDssHeader();
int codePoint = reader.readLengthAndCodePoint( false );
String strVal = parseEncodedString();
if (SanityManager.DEBUG)
trace("SQL Statement = " + strVal);
return strVal;
}
/**
* Parse an encoded data string from the Application Requester
*
* @return string value
* @exception DRDAProtocolException
*/
private String parseEncodedString() throws DRDAProtocolException
{
if (sqlamLevel < 7)
return parseVCMorVCS();
else
return parseNOCMorNOCS();
}
/**
* Parse variable character mixed byte or variable character single byte
* Format
* I2 - VCM Length
* N bytes - VCM value
* I2 - VCS Length
* N bytes - VCS value
* Only 1 of VCM length or VCS length can be non-zero
*
* @return string value
*/
private String parseVCMorVCS() throws DRDAProtocolException
{
String strVal = null;
int vcm_length = reader.readNetworkShort();
if (vcm_length > 0)
strVal = parseCcsidMBC(vcm_length);
int vcs_length = reader.readNetworkShort();
if (vcs_length > 0)
{
if (strVal != null)
agentError ("Both VCM and VCS have lengths > 0");
strVal = parseCcsidSBC(vcs_length);
}
return strVal;
}
/**
* Parse nullable character mixed byte or nullable character single byte
* Format
* 1 byte - null indicator
* I4 - mixed character length
* N bytes - mixed character string
* 1 byte - null indicator
* I4 - single character length
* N bytes - single character length string
*
* @return string value
* @exception DRDAProtocolException
*/
private String parseNOCMorNOCS() throws DRDAProtocolException
{
byte nocm_nullByte = reader.readByte();
String strVal = null;
int length;
if (nocm_nullByte != NULL_VALUE)
{
length = reader.readNetworkInt();
strVal = parseCcsidMBC(length);
}
byte nocs_nullByte = reader.readByte();
if (nocs_nullByte != NULL_VALUE)
{
if (strVal != null)
agentError("Both CM and CS are non null");
length = reader.readNetworkInt();
strVal = parseCcsidSBC(length);
}
return strVal;
}
/**
* Parse mixed character string
*
* @return string value
* @exception DRDAProtocolException
*/
private String parseCcsidMBC(int length) throws DRDAProtocolException
{
String strVal = null;
DRDAStatement currentStatement;
currentStatement = database.getCurrentStatement();
if (currentStatement == null)
{
currentStatement = database.getDefaultStatement();
currentStatement.initialize();
}
String ccsidMBCEncoding = currentStatement.ccsidMBCEncoding;
if (length == 0)
return null;
byte [] byteStr = reader.readBytes(length);
if (ccsidMBCEncoding != null)
{
try {
strVal = new String(byteStr, 0, length, ccsidMBCEncoding);
} catch (UnsupportedEncodingException e) {
agentError("Unsupported encoding " + ccsidMBCEncoding +
"in parseCcsidMBC");
}
}
else
agentError("Attempt to decode mixed byte string without CCSID being set");
return strVal;
}
/**
* Parse single byte character string
*
* @return string value
* @exception DRDAProtocolException
*/
private String parseCcsidSBC(int length) throws DRDAProtocolException
{
String strVal = null;
DRDAStatement currentStatement;
currentStatement = database.getCurrentStatement();
if (currentStatement == null)
{
currentStatement = database.getDefaultStatement();
currentStatement.initialize();
}
String ccsidSBCEncoding = currentStatement.ccsidSBCEncoding;
System.out.println("ccsidSBCEncoding - " + ccsidSBCEncoding);
if (length == 0)
return null;
byte [] byteStr = reader.readBytes(length);
if (ccsidSBCEncoding != null)
{
try {
strVal = new String(byteStr, 0, length, ccsidSBCEncoding);
} catch (UnsupportedEncodingException e) {
agentError("Unsupported encoding " + ccsidSBCEncoding +
"in parseCcsidSBC");
}
}
else
agentError("Attempt to decode single byte string without CCSID being set");
return strVal;
}
/**
* Parse CLSQRY
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* QRYINSID - Query Instance Identifier - required - level 7
* MONITOR - Monitor events - optional.
*
* @return DRDAstatement being closed
* @throws DRDAProtocolException
* @throws SQLException
*/
private DRDAStatement parseCLSQRY() throws DRDAProtocolException, SQLException
{
Pkgnamcsn pkgnamcsn = null;
reader.markCollection();
long qryinsid = 0;
boolean gotQryinsid = false;
int codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.CLSQRY);
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
case CodePoint.QRYINSID:
qryinsid = reader.readNetworkLong();
gotQryinsid = true;
break;
// optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for required variables
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
if (sqlamLevel >= MGRLVL_7 && !gotQryinsid)
missingCodePoint(CodePoint.QRYINSID);
DRDAStatement stmt = database.getDRDAStatement(pkgnamcsn);
if (stmt == null)
{
//XXX should really throw a SQL Exception here
invalidValue(CodePoint.PKGNAMCSN);
}
if (stmt.wasExplicitlyClosed())
{
// JCC still sends a CLSQRY even though we have
// implicitly closed the resultSet.
// Then complains if we send the writeQRYNOPRM
// So for now don't send it
// Also metadata calls seem to get bound to the same
// PGKNAMCSN, so even for explicit closes we have
// to ignore.
//writeQRYNOPRM(CodePoint.SVRCOD_ERROR);
pkgnamcsn = null;
}
stmt.CLSQRY();
return stmt;
}
/**
* Parse MONITOR
* DRDA spec says this is optional. Since we
* don't currently support it, we just ignore.
*/
private void parseMONITOR()
throws DRDAProtocolException
{
// Just ignore it.
reader.skipBytes();
return;
}
private void writeSQLCARDs(SQLException e, int updateCount)
throws DRDAProtocolException
{
writeSQLCARDs(e, updateCount, false);
}
private void writeSQLCARDs(SQLException e, int updateCount, boolean sendSQLERRRM)
throws DRDAProtocolException
{
int severity = CodePoint.SVRCOD_INFO;
if (e == null)
{
writeSQLCARD(e,severity, updateCount, 0);
return;
}
// instead of writing a chain of sql error or warning, we send the first one, this is
// jcc/db2 limitation, see beetle 4629
// If it is a real SQL Error write a SQLERRRM first
severity = getExceptionSeverity(e);
if (severity > CodePoint.SVRCOD_ERROR)
{
// For a session ending error > CodePoint.SRVCOD_ERROR you cannot
// send a SQLERRRM. A CMDCHKRM is required. In XA if there is a
// lock timeout it ends the whole session. I am not sure this
// is the correct behaviour but if it occurs we have to send
// a CMDCHKRM instead of SQLERRM
writeCMDCHKRM(severity);
}
else if (sendSQLERRRM)
{
writeSQLERRRM(severity);
}
writeSQLCARD(e,severity, updateCount, 0);
}
private int getSqlCode(int severity)
{
if (severity == CodePoint.SVRCOD_WARNING) // warning
return 100; //CLI likes it
else if (severity == CodePoint.SVRCOD_INFO)
return 0;
else
return -1;
}
private void writeSQLCARD(SQLException e,int severity,
int updateCount, long rowCount ) throws DRDAProtocolException
{
writer.createDssObject();
writer.startDdm(CodePoint.SQLCARD);
writeSQLCAGRP(e, updateCount, rowCount);
writer.endDdmAndDss();
// If we have a shutdown exception, restart the server.
if (e != null) {
String sqlState = e.getSQLState();
if (sqlState.regionMatches(0,
SQLState.CLOUDSCAPE_SYSTEM_SHUTDOWN, 0, 5)) {
// then we're here because of a shutdown exception;
// "clean up" by restarting the server.
try {
server.startNetworkServer();
} catch (Exception restart)
// any error messages should have already been printed,
// so we ignore this exception here.
{}
}
}
}
/**
* Write a null SQLCARD as an object
*
* @exception DRDAProtocolException
*/
private void writeNullSQLCARDobject()
throws DRDAProtocolException
{
writer.createDssObject();
writer.startDdm(CodePoint.SQLCARD);
writeSQLCAGRP(nullSQLState, 0, 0, 0);
writer.endDdmAndDss();
}
/**
* Write SQLERRRM
*
* Instance Variables
* SVRCOD - Severity Code - required
*
* @param severity severity of error
*
* @exception DRDAProtocolException
*/
private void writeSQLERRRM(int severity) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.SQLERRRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, severity);
writer.endDdmAndDss ();
}
/**
* Write CMDCHKRM
*
* Instance Variables
* SVRCOD - Severity Code - required
*
* @param severity severity of error
*
* @exception DRDAProtocolException
*/
private void writeCMDCHKRM(int severity) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.CMDCHKRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, severity);
writer.endDdmAndDss ();
}
/**
* Translate from Derby exception severity to SVRCOD
*
* @param e SQLException
*/
private int getExceptionSeverity (SQLException e)
{
int severity= CodePoint.SVRCOD_INFO;
if (e == null)
return severity;
int ec = e.getErrorCode();
switch (ec)
{
case ExceptionSeverity.STATEMENT_SEVERITY:
case ExceptionSeverity.TRANSACTION_SEVERITY:
severity = CodePoint.SVRCOD_ERROR;
break;
case ExceptionSeverity.WARNING_SEVERITY:
severity = CodePoint.SVRCOD_WARNING;
break;
case ExceptionSeverity.SESSION_SEVERITY:
case ExceptionSeverity.DATABASE_SEVERITY:
case ExceptionSeverity.SYSTEM_SEVERITY:
severity = CodePoint.SVRCOD_SESDMG;
break;
default:
String sqlState = e.getSQLState();
if (sqlState != null && sqlState.startsWith("01")) // warning
severity = CodePoint.SVRCOD_WARNING;
else
severity = CodePoint.SVRCOD_ERROR;
}
return severity;
}
/**
* Write SQLCAGRP
*
* SQLCAGRP : FDOCA EARLY GROUP
* SQL Communcations Area Group Description
*
* FORMAT FOR SQLAM <= 6
* SQLCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLSTATE; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLERRPROC; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLCAXGRP; DRDA TYPE N-GDA; ENVLID 0x52; Length Override 0
*
* FORMAT FOR SQLAM >= 7
* SQLCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLSTATE; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLERRPROC; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLCAXGRP; DRDA TYPE N-GDA; ENVLID 0x52; Length Override 0
* SQLDIAGGRP; DRDA TYPE N-GDA; ENVLID 0x56; Length Override 0
*
* @param e SQLException encountered
*
* @exception DRDAProtocolException
*/
private void writeSQLCAGRP(SQLException e, int updateCount, long rowCount)
throws DRDAProtocolException
{
int sqlcode = 0;
if (e == null) {
// Forwarding to the optimized version when there is no
// exception object
writeSQLCAGRP(nullSQLState, sqlcode, updateCount, rowCount);
return;
}
// SQLWarnings should have warning severity, except if it's a
// DataTruncation warning for write operations (with SQLState 22001),
// which is supposed to be used as an exception even though it's a
// sub-class of SQLWarning.
if (e instanceof SQLWarning &&
!SQLState.LANG_STRING_TRUNCATION.equals(e.getSQLState())) {
sqlcode = ExceptionSeverity.WARNING_SEVERITY;
} else {
// Get the SQLCODE for exceptions. Note that this call will always
// return -1, so the real error code will be lost.
sqlcode = getSqlCode(getExceptionSeverity(e));
}
if (rowCount < 0 && updateCount < 0)
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
if (SanityManager.DEBUG && server.debugOutput && sqlcode < 0) {
trace("handle SQLException here");
trace("reason is: "+e.getMessage());
trace("SQLState is: "+e.getSQLState());
trace("vendorCode is: "+e.getErrorCode());
trace("nextException is: "+e.getNextException());
server.consoleExceptionPrint(e);
trace("wrapping SQLException into SQLCARD...");
}
//null indicator
writer.writeByte(0);
// SQLCODE
writer.writeInt(sqlcode);
// SQLSTATE
writer.writeString(e.getSQLState());
// SQLERRPROC
// Write the byte[] constant rather than the string, for efficiency
writer.writeBytes(server.prdIdBytes_);
// SQLCAXGRP
writeSQLCAXGRP(updateCount, rowCount, buildSqlerrmc(e), e.getNextException());
}
/**
* Same as writeSQLCAGRP, but optimized for the case
* when there is no real exception, i.e. the exception is null, or "End
* of data"
*
* SQLCAGRP : FDOCA EARLY GROUP
* SQL Communcations Area Group Description
*
* FORMAT FOR SQLAM <= 6
* SQLCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLSTATE; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLERRPROC; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLCAXGRP; DRDA TYPE N-GDA; ENVLID 0x52; Length Override 0
*
* FORMAT FOR SQLAM >= 7
* SQLCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLSTATE; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLERRPROC; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLCAXGRP; DRDA TYPE N-GDA; ENVLID 0x52; Length Override 0
* SQLDIAGGRP; DRDA TYPE N-GDA; ENVLID 0x56; Length Override 0
*
* @param sqlState SQLState (already converted to UTF8)
* @param sqlcode sqlcode
* @param updateCount
* @param rowCount
*
* @exception DRDAProtocolException
*/
private void writeSQLCAGRP(byte[] sqlState, int sqlcode,
int updateCount, long rowCount) throws DRDAProtocolException
{
if (rowCount < 0 && updateCount < 0) {
writer.writeByte(CodePoint.NULLDATA);
return;
}
//null indicator
writer.writeByte(0);
// SQLCODE
writer.writeInt(sqlcode);
// SQLSTATE
writer.writeBytes(sqlState);
// SQLERRPROC
writer.writeBytes(server.prdIdBytes_);
// SQLCAXGRP (Uses null as sqlerrmc since there is no error)
writeSQLCAXGRP(updateCount, rowCount, null, null);
}
// Delimiters for SQLERRMC values.
// The token delimiter value will be used to parse the MessageId from the
// SQLERRMC in MessageService.getLocalizedMessage and the MessageId will be
// used to retrive the localized message. If this delimiter value is changed
// please make sure to make appropriate changes in
// MessageService.getLocalizedMessage that gets called from
// SystemProcedures.SQLCAMESSAGE
/**
* <code>SQLERRMC_TOKEN_DELIMITER</code> separates message argument tokens
*/
private static String SQLERRMC_TOKEN_DELIMITER = new String(new char[] {(char)20});
/**
* <code>SQLERRMC_PREFORMATTED_MESSAGE_DELIMITER</code>, When full message text is
* sent for severe errors. This value separates the messages.
*/
private static String SQLERRMC_PREFORMATTED_MESSAGE_DELIMITER = "::";
/**
* Create error message or message argements to return to client.
* The SQLERRMC will normally be passed back to the server in a call
* to the SYSIBM.SQLCAMESSAGE but for severe exceptions the stored procedure
* call cannot be made. So for Severe messages we will just send the message text.
*
* This method will also truncate the value according the client capacity.
* CCC can only handle 70 characters.
*
* Server sends the sqlerrmc using UTF8 encoding to the client.
* To get the message, client sends back information to the server
* calling SYSIBM.SQLCAMESSAGE (see Sqlca.getMessage). Several parameters
* are sent to this procedure including the locale, the sqlerrmc that the
* client received from the server.
* On server side, the procedure SQLCAMESSAGE in SystemProcedures then calls
* the MessageService.getLocalizedMessage to retrieve the localized error message.
* In MessageService.getLocalizedMessage the sqlerrmc that is passed in,
* is parsed to retrieve the message id. The value it uses to parse the MessageId
* is char value of 20, otherwise it uses the entire sqlerrmc as the message id.
* This messageId is then used to retrieve the localized message if present, to
* the client.
*
* @param se SQLException to build SQLERRMC
*
* @return String which is either the message arguments to be passed to
* SYSIBM.SQLCAMESSAGE or just message text for severe errors.
*/
private String buildSqlerrmc (SQLException se)
{
boolean severe = (se.getErrorCode() >= ExceptionSeverity.SESSION_SEVERITY);
String sqlerrmc = null;
// get exception which carries Derby messageID and args, per DERBY-1178
se = Util.getExceptionFactory().getArgumentFerry( se );
if (se instanceof EmbedSQLException && ! severe)
sqlerrmc = buildTokenizedSqlerrmc(se);
else if (se instanceof DataTruncation)
sqlerrmc = buildDataTruncationSqlerrmc((DataTruncation) se);
else {
// If this is not an EmbedSQLException or is a severe excecption where
// we have no hope of succussfully calling the SYSIBM.SQLCAMESSAGE send
// preformatted message using the server locale
sqlerrmc = buildPreformattedSqlerrmc(se);
}
// Truncate the sqlerrmc to a length that the client can support.
int maxlen = (sqlerrmc == null) ? -1 : Math.min(sqlerrmc.length(),
appRequester.supportedMessageParamLength());
if ((maxlen >= 0) && (sqlerrmc.length() > maxlen))
// have to truncate so the client can handle it.
sqlerrmc = sqlerrmc.substring(0, maxlen);
return sqlerrmc;
}
/**
* Build preformatted SQLException text
* for severe exceptions or SQLExceptions that are not EmbedSQLExceptions.
* Just send the message text localized to the server locale.
*
* @param se SQLException for which to build SQLERRMC
* @return preformated message text
* with messages separted by SQLERRMC_PREFORMATED_MESSAGE_DELIMITER
*
*/
private String buildPreformattedSqlerrmc(SQLException se) {
if (se == null)
return "";
StringBuffer sb = new StringBuffer();
// String buffer to build up message
do {
sb.append(se.getLocalizedMessage());
se = se.getNextException();
if (se != null)
sb.append(SQLERRMC_PREFORMATTED_MESSAGE_DELIMITER +
"SQLSTATE: " + se.getSQLState());
} while (se != null);
return sb.toString();
}
/**
* Build Tokenized SQLERRMC to just send the tokenized arguments to the client.
* for a Derby SQLException or an SQLException thrown by user code.
* Message argument tokens are separated by SQLERRMC_TOKEN_DELIMITER
* Multiple messages are separated by SystemProcedures.SQLERRMC_MESSAGE_DELIMITER
*
* ...
* @param se SQLException to print
*
*/
private String buildTokenizedSqlerrmc(SQLException se) {
String sqlerrmc = "";
do {
if ( se instanceof EmbedSQLException)
{
String messageId = ((EmbedSQLException)se).getMessageId();
// arguments are variable part of a message
Object[] args = ((EmbedSQLException)se).getArguments();
for (int i = 0; args != null && i < args.length; i++)
sqlerrmc += args[i] + SQLERRMC_TOKEN_DELIMITER;
sqlerrmc += messageId;
se = se.getNextException();
}
else
{
// this could happen for instance if an SQLException was thrown
// from a stored procedure.
StringBuffer sb = new StringBuffer();
sb.append(se.getLocalizedMessage());
se = se.getNextException();
if (se != null)
sb.append(SQLERRMC_TOKEN_DELIMITER +
"SQLSTATE: " + se.getSQLState());
sqlerrmc += sb.toString();
}
if (se != null)
{
sqlerrmc += SystemProcedures.SQLERRMC_MESSAGE_DELIMITER + se.getSQLState() + ":";
}
} while (se != null);
return sqlerrmc;
}
/**
* Build the SQLERRMC for a {@code java.sql.DataTruncation} warning.
* Serialize all the fields of the {@code DataTruncation} instance in the
* order in which they appear in the parameter list of the constructor.
*
* @param dt the {@code DataTruncation} instance to serialize
* @return the SQLERRMC string with all fields of the warning
*/
private String buildDataTruncationSqlerrmc(DataTruncation dt) {
return dt.getIndex() + SQLERRMC_TOKEN_DELIMITER +
dt.getParameter() + SQLERRMC_TOKEN_DELIMITER +
dt.getRead() + SQLERRMC_TOKEN_DELIMITER +
dt.getDataSize() + SQLERRMC_TOKEN_DELIMITER +
dt.getTransferSize();
}
/**
* Write SQLCAXGRP
*
* SQLCAXGRP : EARLY FDOCA GROUP
* SQL Communications Area Exceptions Group Description
*
* FORMAT FOR SQLAM <= 6
* SQLRDBNME; DRDA TYPE FCS; ENVLID 0x30; Length Override 18
* SQLERRD1; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD2; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD3; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD4; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD5; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD6; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLWARN0; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN1; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN2; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN3; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN4; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN5; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN6; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN7; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN8; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN9; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARNA; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLERRMSG_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 70
* SQLERRMSG_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 70
*
* FORMAT FOR SQLAM >= 7
* SQLERRD1; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD2; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD3; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD4; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD5; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD6; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLWARN0; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN1; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN2; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN3; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN4; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN5; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN6; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN7; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN8; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN9; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARNA; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLRDBNAME; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* SQLERRMSG_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 70
* SQLERRMSG_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 70
* @param nextException SQLException encountered
* @param sqlerrmc sqlcode
*
* @exception DRDAProtocolException
*/
private void writeSQLCAXGRP(int updateCount, long rowCount, String sqlerrmc,
SQLException nextException) throws DRDAProtocolException
{
writer.writeByte(0); // SQLCAXGRP INDICATOR
if (sqlamLevel < 7)
{
writeRDBNAM(database.getDatabaseName());
writeSQLCAERRWARN(updateCount, rowCount);
}
else
{
// SQL ERRD1 - D6, WARN0-WARNA (35 bytes)
writeSQLCAERRWARN(updateCount, rowCount);
writer.writeShort(0); //CCC on Win does not take RDBNAME
}
writeVCMorVCS(sqlerrmc);
if (sqlamLevel >=7)
writeSQLDIAGGRP(nextException);
}
/**
* Write the ERR and WARN part of the SQLCA
*
* @param updateCount
* @param rowCount
*/
private void writeSQLCAERRWARN(int updateCount, long rowCount)
{
// SQL ERRD1 - ERRD2 - row Count
writer.writeInt((int)((rowCount>>>32)));
writer.writeInt((int)(rowCount & 0x0000000ffffffffL));
// SQL ERRD3 - updateCount
writer.writeInt(updateCount);
// SQL ERRD4 - D6 (12 bytes)
writer.writeBytes(errD4_D6); // byte[] constant
// WARN0-WARNA (11 bytes)
writer.writeBytes(warn0_warnA); // byte[] constant
}
/**
* Write SQLDIAGGRP: SQL Diagnostics Group Description - Identity 0xD1
* Nullable Group
* SQLDIAGSTT; DRDA TYPE N-GDA; ENVLID 0xD3; Length Override 0
* SQLDIAGCN; DRFA TYPE N-RLO; ENVLID 0xF6; Length Override 0
* SQLDIAGCI; DRDA TYPE N-RLO; ENVLID 0xF5; Length Override 0
*/
private void writeSQLDIAGGRP(SQLException nextException)
throws DRDAProtocolException
{
// for now we only want to send ROW_DELETED and ROW_UPDATED warnings
// as extended diagnostics
// move to first ROW_DELETED or ROW_UPDATED exception. These have been
// added to the end of the warning chain.
while (
nextException != null &&
nextException.getSQLState() != SQLState.ROW_UPDATED &&
nextException.getSQLState() != SQLState.ROW_DELETED) {
nextException = nextException.getNextException();
}
if ((nextException == null) ||
(diagnosticLevel == CodePoint.DIAGLVL0)) {
writer.writeByte(CodePoint.NULLDATA);
return;
}
writer.writeByte(0); // SQLDIAGGRP indicator
writeSQLDIAGSTT();
writeSQLDIAGCI(nextException);
writeSQLDIAGCN();
}
/*
* writeSQLDIAGSTT: Write NULLDATA for now
*/
private void writeSQLDIAGSTT()
throws DRDAProtocolException
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
/**
* writeSQLDIAGCI: SQL Diagnostics Condition Information Array - Identity 0xF5
* SQLNUMROW; ROW LID 0x68; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLDCIROW; ROW LID 0xE5; ELEMENT TAKEN 0(all); REP FACTOR 0(all)
*/
private void writeSQLDIAGCI(SQLException nextException)
throws DRDAProtocolException
{
SQLException se = nextException;
long rowNum = 1;
/* Write the number of next exceptions to expect */
writeSQLNUMROW(se);
while (se != null)
{
String sqlState = se.getSQLState();
// SQLCode > 0 -> Warning
// SQLCode = 0 -> Info
// SQLCode < 0 -> Error
int severity = getExceptionSeverity(se);
int sqlCode = -1;
if (severity == CodePoint.SVRCOD_WARNING)
sqlCode = 1;
else if (severity == CodePoint.SVRCOD_INFO)
sqlCode = 0;
String sqlerrmc = "";
if (diagnosticLevel == CodePoint.DIAGLVL1) {
sqlerrmc = se.getLocalizedMessage();
}
// arguments are variable part of a message
// only send arguments for diagnostic level 0
if (diagnosticLevel == CodePoint.DIAGLVL0) {
// we are only able to get arguments of EmbedSQLException
if (se instanceof EmbedSQLException) {
Object[] args = ((EmbedSQLException)se).getArguments();
for (int i = 0; args != null && i < args.length; i++)
sqlerrmc += args[i].toString() + SQLERRMC_TOKEN_DELIMITER;
}
}
String dbname = null;
if (database != null)
dbname = database.getDatabaseName();
writeSQLDCROW(rowNum++, sqlCode, sqlState, dbname, sqlerrmc);
se = se.getNextException();
}
return;
}
/**
* writeSQLNUMROW: Writes SQLNUMROW : FDOCA EARLY ROW
* SQL Number of Elements Row Description
* FORMAT FOR SQLAM LEVELS
* SQLNUMGRP; GROUP LID 0x58; ELEMENT TAKEN 0(all); REP FACTOR 1
*/
private void writeSQLNUMROW(SQLException nextException)
throws DRDAProtocolException
{
writeSQLNUMGRP(nextException);
}
/**
* writeSQLNUMGRP: Writes SQLNUMGRP : FDOCA EARLY GROUP
* SQL Number of Elements Group Description
* FORMAT FOR ALL SQLAM LEVELS
* SQLNUM; DRDA TYPE I2; ENVLID 0x04; Length Override 2
*/
private void writeSQLNUMGRP(SQLException nextException)
throws DRDAProtocolException
{
int i=0;
SQLException se;
/* Count the number of chained exceptions to be sent */
for (se = nextException; se != null; se = se.getNextException()) i++;
writer.writeShort(i);
}
/**
* writeSQLDCROW: SQL Diagnostics Condition Row - Identity 0xE5
* SQLDCGRP; GROUP LID 0xD5; ELEMENT TAKEN 0(all); REP FACTOR 1
*/
private void writeSQLDCROW(long rowNum, int sqlCode, String sqlState, String dbname,
String sqlerrmc) throws DRDAProtocolException
{
writeSQLDCGRP(rowNum, sqlCode, sqlState, dbname, sqlerrmc);
}
/**
* writeSQLDCGRP: SQL Diagnostics Condition Group Description
*
* SQLDCCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCSTATE; DRDA TYPE FCS; ENVLID Ox30; Lengeh Override 5
* SQLDCREASON; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCLINEN; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCROWN; DRDA TYPE FD; ENVLID 0x0E; Lengeh Override 31
* SQLDCER01; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCER02; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCER03; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCER04; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCPART; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCPPOP; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCMSGID; DRDA TYPE FCS; ENVLID 0x30; Length Override 10
* SQLDCMDE; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLDCPMOD; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLDCRDB; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* SQLDCTOKS; DRDA TYPE N-RLO; ENVLID 0xF7; Length Override 0
* SQLDCMSG_m; DRDA TYPE NVMC; ENVLID 0x3F; Length Override 32672
* SQLDCMSG_S; DRDA TYPE NVCS; ENVLID 0x33; Length Override 32672
* SQLDCCOLN_m; DRDA TYPE NVCM ; ENVLID 0x3F; Length Override 255
* SQLDCCOLN_s; DRDA TYPE NVCS; ENVLID 0x33; Length Override 255
* SQLDCCURN_m; DRDA TYPE NVCM; ENVLID 0x3F; Length Override 255
* SQLDCCURN_s; DRDA TYPE NVCS; ENVLID 0x33; Length Override 255
* SQLDCPNAM_m; DRDA TYPE NVCM; ENVLID 0x3F; Length Override 255
* SQLDCPNAM_s; DRDA TYPE NVCS; ENVLID 0x33; Length Override 255
* SQLDCXGRP; DRDA TYPE N-GDA; ENVLID 0xD3; Length Override 1
*/
private void writeSQLDCGRP(long rowNum, int sqlCode, String sqlState, String dbname,
String sqlerrmc) throws DRDAProtocolException
{
// SQLDCCODE
writer.writeInt(sqlCode);
// SQLDCSTATE
writer.writeString(sqlState);
writer.writeInt(0); // REASON_CODE
writer.writeInt(0); // LINE_NUMBER
writer.writeLong(rowNum); // ROW_NUMBER
byte[] byteArray = new byte[1];
writer.writeScalarPaddedBytes(byteArray, 47, (byte) 0);
writer.writeShort(0); // CCC on Win does not take RDBNAME
writer.writeByte(CodePoint.NULLDATA); // MESSAGE_TOKENS
writer.writeLDString(sqlerrmc); // MESSAGE_TEXT
writeVCMorVCS(null); // COLUMN_NAME
writeVCMorVCS(null); // PARAMETER_NAME
writeVCMorVCS(null); // EXTENDED_NAME
writer.writeByte(CodePoint.NULLDATA); // SQLDCXGRP
}
/*
* writeSQLDIAGCN: Write NULLDATA for now
*/
private void writeSQLDIAGCN()
throws DRDAProtocolException
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
/**
* Write SQLDARD
*
* SQLDARD : FDOCA EARLY ARRAY
* SQL Descriptor Area Row Description with SQL Communications Area
*
* FORMAT FOR SQLAM <= 6
* SQLCARD; ROW LID 0x64; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLNUMROW; ROW LID 0x68; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLDAROW; ROW LID 0x60; ELEMENT TAKEN 0(all); REP FACTOR 0(all)
*
* FORMAT FOR SQLAM >= 7
* SQLCARD; ROW LID 0x64; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLDHROW; ROW LID 0xE0; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLNUMROW; ROW LID 0x68; ELEMENT TAKEN 0(all); REP FACTOR 1
*
* @param stmt prepared statement
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLDARD(DRDAStatement stmt, boolean rtnOutput, SQLException e) throws DRDAProtocolException, SQLException
{
PreparedStatement ps = stmt.getPreparedStatement();
ResultSetMetaData rsmeta = ps.getMetaData();
ParameterMetaData pmeta = stmt.getParameterMetaData();
int numElems = 0;
if (e == null || e instanceof SQLWarning)
{
if (rtnOutput && (rsmeta != null))
numElems = rsmeta.getColumnCount();
else if ((! rtnOutput) && (pmeta != null))
numElems = pmeta.getParameterCount();
}
writer.createDssObject();
// all went well we will just write a null SQLCA
writer.startDdm(CodePoint.SQLDARD);
writeSQLCAGRP(e, 0, 0);
if (sqlamLevel >= MGRLVL_7)
writeSQLDHROW(ps.getResultSetHoldability());
//SQLNUMROW
if (SanityManager.DEBUG)
trace("num Elements = " + numElems);
writer.writeShort(numElems);
for (int i=0; i < numElems; i++)
writeSQLDAGRP (rsmeta, pmeta, i, rtnOutput);
writer.endDdmAndDss();
}
/**
* Write QRYDSC - Query Answer Set Description
*
* @param stmt DRDAStatement we are working on
* @param FDODSConly simply the FDODSC, without the wrap
*
* Instance Variables
* SQLDTAGRP - required
*
* Only 84 columns can be sent in a single QRYDSC. If there are more columns
* they must be sent in subsequent QRYDSC.
* If the QRYDSC will not fit into the current block, as many columns as can
* fit are sent and then the remaining are sent in the following blocks.
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeQRYDSC(DRDAStatement stmt, boolean FDODSConly)
throws DRDAProtocolException, SQLException
{
ResultSet rs = null;
ResultSetMetaData rsmeta = null;
ParameterMetaData pmeta = null;
if (!stmt.needsToSendParamData)
rs = stmt.getResultSet();
if (rs == null) // this is a CallableStatement, use parameter meta data
pmeta = stmt.getParameterMetaData();
else
rsmeta = rs.getMetaData();
int numCols = (rsmeta != null ? rsmeta.getColumnCount() : pmeta.getParameterCount());
int numGroups = 1;
int colStart = 1;
int colEnd = numCols;
int blksize = stmt.getBlksize() > 0 ? stmt.getBlksize() : CodePoint.QRYBLKSZ_MAX;
// check for remaining space in current query block
// Need to mod with blksize so remaining doesn't go negative. 4868
int remaining = blksize - (writer.getDSSLength() % blksize) - (3 +
FdocaConstants.SQLCADTA_SQLDTARD_RLO_SIZE);
// calcuate how may columns can be sent in the current query block
int firstcols = remaining/FdocaConstants.SQLDTAGRP_COL_DSC_SIZE;
// check if it doesn't all fit into the first block and
// under FdocaConstants.MAX_VARS_IN_NGDA
if (firstcols < numCols || numCols > FdocaConstants.MAX_VARS_IN_NGDA)
{
// we are limited to FdocaConstants.MAX_VARS_IN_NGDA
if (firstcols > FdocaConstants.MAX_VARS_IN_NGDA)
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(numCols > FdocaConstants.MAX_VARS_IN_NGDA,
"Number of columns " + numCols +
" is less than MAX_VARS_IN_NGDA");
numGroups = numCols/FdocaConstants.MAX_VARS_IN_NGDA;
// some left over
if (FdocaConstants.MAX_VARS_IN_NGDA * numGroups < numCols)
numGroups++;
colEnd = FdocaConstants.MAX_VARS_IN_NGDA;
}
else
{
colEnd = firstcols;
numGroups += (numCols-firstcols)/FdocaConstants.MAX_VARS_IN_NGDA;
if (FdocaConstants.MAX_VARS_IN_NGDA * numGroups < numCols)
numGroups++;
}
}
if (! FDODSConly)
{
writer.createDssObject();
writer.startDdm(CodePoint.QRYDSC);
}
for (int i = 0; i < numGroups; i++)
{
writeSQLDTAGRP(stmt, rsmeta, pmeta, colStart, colEnd,
(i == 0 ? true : false));
colStart = colEnd + 1;
// 4868 - Limit range to MAX_VARS_IN_NGDA (used to have extra col)
colEnd = colEnd + FdocaConstants.MAX_VARS_IN_NGDA;
if (colEnd > numCols)
colEnd = numCols;
}
writer.writeBytes(FdocaConstants.SQLCADTA_SQLDTARD_RLO);
if (! FDODSConly)
writer.endDdmAndDss();
}
/**
* Write SQLDTAGRP
* SQLDAGRP : Late FDOCA GROUP
* SQL Data Value Group Descriptor
* LENGTH - length of the SQLDTAGRP
* TRIPLET_TYPE - NGDA for first, CPT for following
* ID - SQLDTAGRP_LID for first, NULL_LID for following
* For each column
* DRDA TYPE
* LENGTH OVERRIDE
* For numeric/decimal types
* PRECISON
* SCALE
* otherwise
* LENGTH or DISPLAY_WIDTH
*
* @param stmt drda statement
* @param rsmeta resultset meta data
* @param pmeta parameter meta data for CallableStatement
* @param colStart starting column for group to send
* @param colEnd end column to send
* @param first is this the first group
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLDTAGRP(DRDAStatement stmt, ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int colStart, int colEnd, boolean first)
throws DRDAProtocolException, SQLException
{
int length = (FdocaConstants.SQLDTAGRP_COL_DSC_SIZE *
((colEnd+1) - colStart)) + 3;
writer.writeByte(length);
if (first)
{
writer.writeByte(FdocaConstants.NGDA_TRIPLET_TYPE);
writer.writeByte(FdocaConstants.SQLDTAGRP_LID);
}
else
{
//continued
writer.writeByte(FdocaConstants.CPT_TRIPLET_TYPE);
writer.writeByte(FdocaConstants.NULL_LID);
}
boolean hasRs = (rsmeta != null); // if don't have result, then we look at parameter meta
for (int i = colStart; i <= colEnd; i++)
{
boolean nullable = (hasRs ? (rsmeta.isNullable(i) == rsmeta.columnNullable) :
(pmeta.isNullable(i) == JDBC30Translation.PARAMETER_NULLABLE));
int colType = (hasRs ? rsmeta.getColumnType(i) : pmeta.getParameterType(i));
int[] outlen = {-1};
int drdaType = FdocaConstants.mapJdbcTypeToDrdaType( colType, nullable, appRequester, outlen );
boolean isDecimal = ((drdaType | 1) == DRDAConstants.DRDA_TYPE_NDECIMAL);
int precision = 0, scale = 0;
if (hasRs)
{
precision = rsmeta.getPrecision(i);
scale = rsmeta.getScale(i);
stmt.setRsDRDAType(i,drdaType);
stmt.setRsPrecision(i, precision);
stmt.setRsScale(i,scale);
}
else if (isDecimal)
{
if (stmt.isOutputParam(i))
{
precision = pmeta.getPrecision(i);
scale = pmeta.getScale(i);
((CallableStatement) stmt.ps).registerOutParameter(i,Types.DECIMAL,scale);
}
}
if (SanityManager.DEBUG)
trace("jdbcType=" + colType + " \tdrdaType=" + Integer.toHexString(drdaType));
// Length or precision and scale for decimal values.
writer.writeByte(drdaType);
if (isDecimal)
{
writer.writeByte(precision);
writer.writeByte(scale);
}
else if (outlen[0] != -1)
writer.writeShort(outlen[0]);
else if (hasRs)
writer.writeShort(rsmeta.getColumnDisplaySize(i));
else
writer.writeShort(stmt.getParamLen(i));
}
}
/**
* Holdability passed in as it can represent the holdability of
* the statement or a specific result set.
* @param holdability HOLD_CURSORS_OVER_COMMIT or CLOSE_CURSORS_AT_COMMIT
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLDHROW(int holdability) throws DRDAProtocolException,SQLException
{
if (JVMInfo.JDK_ID < 2) //write null indicator for SQLDHROW because there is no holdability support prior to jdk1.3
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
writer.writeByte(0); // SQLDHROW INDICATOR
//SQLDHOLD
writer.writeShort(holdability);
//SQLDRETURN
writer.writeShort(0);
//SQLDSCROLL
writer.writeShort(0);
//SQLDSENSITIVE
writer.writeShort(0);
//SQLDFCODE
writer.writeShort(0);
//SQLDKEYTYPE
writer.writeShort(0);
//SQLRDBNAME
writer.writeShort(0); //CCC on Windows somehow does not take any dbname
//SQLDSCHEMA
writeVCMorVCS(null);
}
/**
* Write QRYDTA - Query Answer Set Data
* Contains some or all of the answer set data resulting from a query
* If the client is not using rowset processing, this routine attempts
* to pack as much data into the QRYDTA as it can. This may result in
* splitting the last row across the block, in which case when the
* client calls CNTQRY we will return the remainder of the row.
*
* Splitting a QRYDTA block is expensive, for several reasons:
* - extra logic must be run, on both client and server side
* - more network round-trips are involved
* - the QRYDTA block which contains the continuation of the split
* row is generally wasteful, since it contains the remainder of
* the split row but no additional rows.
* Since splitting is expensive, the server makes some attempt to
* avoid it. Currently, the server's algorithm for this is to
* compute the length of the current row, and to stop trying to pack
* more rows into this buffer if another row of that length would
* not fit. However, since rows can vary substantially in length,
* this algorithm is often ineffective at preventing splits. For
* example, if a short row near the end of the buffer is then
* followed by a long row, that long row will be split. It is possible
* to improve this algorithm substantially:
* - instead of just using the length of the previous row as a guide
* for whether to attempt packing another row in, use some sort of
* overall average row size computed over multiple rows (e.g., all
* the rows we've placed into this QRYDTA block, or all the rows
* we've process for this result set)
* - when we discover that the next row will not fit, rather than
* splitting the row across QRYDTA blocks, if it is relatively
* small, we could just hold the entire row in a buffer to place
* it entirely into the next QRYDTA block, or reset the result
* set cursor back one row to "unread" this row.
* - when splitting a row across QRYDTA blocks, we tend to copy
* data around multiple times. Careful coding could remove some
* of these copies.
* However, it is important not to over-complicate this code: it is
* better to be correct than to be efficient, and there have been
* several bugs in the split logic already.
*
* Instance Variables
* Byte string
*
* @param stmt DRDA statement we are processing
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeQRYDTA (DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
boolean getMoreData = true;
boolean sentExtData = false;
int startLength = 0;
writer.createDssObject();
if (SanityManager.DEBUG)
trace("Write QRYDTA");
writer.startDdm(CodePoint.QRYDTA);
// Check to see if there was leftover data from splitting
// the previous QRYDTA for this result set. If there was, and
// if we have now sent all of it, send any EXTDTA for that row
// and increment the rowCount which we failed to increment in
// writeFDODTA when we realized the row needed to be split.
if (processLeftoverQRYDTA(stmt))
{
if (stmt.getSplitQRYDTA() == null)
{
stmt.rowCount += 1;
if (stmt.getExtDtaObjects() != null)
writeEXTDTA(stmt);
}
return;
}
while(getMoreData)
{
sentExtData = false;
getMoreData = writeFDODTA(stmt);
if (stmt.getExtDtaObjects() != null &&
stmt.getSplitQRYDTA() == null)
{
writer.endDdmAndDss();
writeEXTDTA(stmt);
getMoreData=false;
sentExtData = true;
}
// if we don't have enough room for a row of the
// last row's size, don't try to cram it in.
// It would get split up but it is not very efficient.
if (getMoreData == true)
{
int endLength = writer.getDSSLength();
int rowsize = endLength - startLength;
if ((stmt.getBlksize() - endLength ) < rowsize)
getMoreData = false;
startLength = endLength;
}
}
// If we sent extDta we will rely on
// writeScalarStream to end the dss with the proper chaining.
// otherwise end it here.
if (! sentExtData)
writer.endDdmAndDss();
if (!stmt.hasdata()) {
final boolean qryclsOnLmtblkprc =
appRequester.supportsQryclsimpForLmtblkprc();
if (stmt.isRSCloseImplicit(qryclsOnLmtblkprc)) {
stmt.rsClose();
}
}
}
/**
* This routine places some data into the current QRYDTA block using
* FDODTA (Formatted Data Object DaTA rules).
*
* There are 3 basic types of processing flow for this routine:
* - In normal non-rowset, non-scrollable cursor flow, this routine
* places a single row into the QRYDTA block and returns TRUE,
* indicating that the caller can call us back to place another
* row into the result set if he wishes. (The caller may need to
* send Externalized Data, which would be a reason for him NOT to
* place any more rows into the QRYDTA).
* - In ROWSET processing, this routine places an entire ROWSET of
* rows into the QRYDTA block and returns FALSE, indicating that
* the QRYDTA block is full and should now be sent.
* - In callable statement processing, this routine places the
* results from the output parameters of the called procedure into
* the QRYDTA block. This code path is really dramatically
* different from the other two paths and shares only a very small
* amount of common code in this routine.
*
* In all cases, it is possible that the data we wish to return may
* not fit into the QRYDTA block, in which case we call splitQRYDTA
* to split the data and remember the remainder data in the result set.
* Splitting the data is relatively rare in the normal cursor case,
* because our caller (writeQRYDTA) uses a coarse estimation
* technique to avoid calling us if he thinks a split is likely.
*
* The overall structure of this routine is implemented as two
* loops:
* - the outer "do ... while ... " loop processes a ROWSET, one row
* at a time. For non-ROWSET cursors, and for callable statements,
* this loop executes only once.
* - the inner "for ... i < numCols ..." loop processes each column
* in the current row, or each output parmeter in the procedure.
*
* Most column data is written directly inline in the QRYDTA block.
* Some data, however, is written as Externalized Data. This is
* commonly used for Large Objects. In that case, an Externalized
* Data Pointer is written into the QRYDTA block, and the actual
* data flows in separate EXTDTA blocks which are returned
* after this QRYDTA block.
*/
private boolean writeFDODTA (DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
boolean hasdata = false;
int blksize = stmt.getBlksize() > 0 ? stmt.getBlksize() : CodePoint.QRYBLKSZ_MAX;
long rowCount = 0;
ResultSet rs =null;
boolean moreData = (stmt.getQryprctyp()
== CodePoint.LMTBLKPRC);
int numCols;
if (!stmt.needsToSendParamData)
{
rs = stmt.getResultSet();
}
if (rs != null)
{
numCols = stmt.getNumRsCols();
if (stmt.isScrollable())
hasdata = positionCursor(stmt, rs);
else
hasdata = rs.next();
}
else // it's for a CallableStatement
{
hasdata = stmt.hasOutputParams();
numCols = stmt.getDrdaParamCount();
}
do {
if (!hasdata)
{
doneData(stmt, rs);
moreData = false;
return moreData;
}
// Send ResultSet warnings if there are any
SQLWarning sqlw = (rs != null)? rs.getWarnings(): null;
if (rs != null) {
rs.clearWarnings();
}
// for updatable, insensitive result sets we signal the
// row updated condition to the client via a warning to be
// popped by client onto its rowUpdated state, i.e. this
// warning should not reach API level.
if (rs != null && rs.rowUpdated()) {
SQLWarning w = new SQLWarning("", SQLState.ROW_UPDATED,
ExceptionSeverity.WARNING_SEVERITY);
if (sqlw != null) {
sqlw.setNextWarning(w);
} else {
sqlw = w;
}
}
// Delete holes are manifest as a row consisting of a non-null
// SQLCARD and a null data group. The SQLCARD has a warning
// SQLSTATE of 02502
if (rs != null && rs.rowDeleted()) {
SQLWarning w = new SQLWarning("", SQLState.ROW_DELETED,
ExceptionSeverity.WARNING_SEVERITY);
if (sqlw != null) {
sqlw.setNextWarning(w);
} else {
sqlw = w;
}
}
// Save the position where we start writing the warnings in case
// we need to add more warnings later.
final int sqlcagrpStart = writer.getBufferPosition();
if (sqlw == null)
writeSQLCAGRP(nullSQLState, 0, -1, -1);
else
writeSQLCAGRP(sqlw, 1, -1);
// Save the position right after the warnings so we know where to
// insert more warnings later.
final int sqlcagrpEnd = writer.getBufferPosition();
// if we were asked not to return data, mark QRYDTA null; do not
// return yet, need to make rowCount right
// if the row has been deleted return QRYDTA null (delete hole)
boolean noRetrieveRS = (rs != null &&
(!stmt.getQryrtndta() || rs.rowDeleted()));
if (noRetrieveRS)
writer.writeByte(0xFF); //QRYDTA null indicator: IS NULL
else
writer.writeByte(0); //QRYDTA null indicator: not null
for (int i = 1; i <= numCols; i++)
{
if (noRetrieveRS)
break;
int drdaType;
int ndrdaType;
int precision;
int scale;
Object val = null;
boolean valNull;
if (rs != null)
{
drdaType = stmt.getRsDRDAType(i) & 0xff;
precision = stmt.getRsPrecision(i);
scale = stmt.getRsScale(i);
ndrdaType = drdaType | 1;
if (SanityManager.DEBUG)
trace("!!drdaType = " + java.lang.Integer.toHexString(drdaType) +
" precision=" + precision +" scale = " + scale);
switch (ndrdaType)
{
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
EXTDTAInputStream extdtaStream=
EXTDTAInputStream.getEXTDTAStream(rs, i, drdaType);
writeFdocaVal(i, extdtaStream, drdaType, precision,
scale, extdtaStream.isNull(), stmt, false);
break;
case DRDAConstants.DRDA_TYPE_NINTEGER:
int ival = rs.getInt(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing int: "+ ival + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeInt(ival);
break;
case DRDAConstants.DRDA_TYPE_NSMALL:
short sval = rs.getShort(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing small: "+ sval + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeShort(sval);
break;
case DRDAConstants.DRDA_TYPE_NINTEGER8:
long lval = rs.getLong(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing long: "+ lval + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeLong(lval);
break;
case DRDAConstants.DRDA_TYPE_NFLOAT4:
float fval = rs.getFloat(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing float: "+ fval + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeFloat(fval);
break;
case DRDAConstants.DRDA_TYPE_NFLOAT8:
double dval = rs.getDouble(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing double: "+ dval + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeDouble(dval);
break;
case DRDAConstants.DRDA_TYPE_NCHAR:
case DRDAConstants.DRDA_TYPE_NVARCHAR:
case DRDAConstants.DRDA_TYPE_NVARMIX:
case DRDAConstants.DRDA_TYPE_NLONG:
case DRDAConstants.DRDA_TYPE_NLONGMIX:
String valStr = rs.getString(i);
if (SanityManager.DEBUG)
trace("====== writing char/varchar/mix :"+ valStr + ":");
writeFdocaVal(i, valStr, drdaType,
precision, scale, rs.wasNull(),
stmt, false);
break;
default:
val = getObjectForWriteFdoca(rs, i, drdaType);
writeFdocaVal(i, val, drdaType,
precision, scale, rs.wasNull(),
stmt, false);
}
}
else
{
drdaType = stmt.getParamDRDAType(i) & 0xff;
precision = stmt.getParamPrecision(i);
scale = stmt.getParamScale(i);
if (stmt.isOutputParam(i)) {
int[] outlen = new int[1];
drdaType = FdocaConstants.mapJdbcTypeToDrdaType( stmt.getOutputParamType(i), true, appRequester, outlen );
precision = stmt.getOutputParamPrecision(i);
scale = stmt.getOutputParamScale(i);
if (SanityManager.DEBUG)
trace("***getting Object "+i);
val = getObjectForWriteFdoca(
(CallableStatement) stmt.ps, i, drdaType);
valNull = (val == null);
writeFdocaVal(i, val, drdaType, precision, scale,
valNull, stmt, true);
}
else
writeFdocaVal(i, null, drdaType, precision, scale,
true, stmt, true);
}
}
DataTruncation truncated = stmt.getTruncationWarnings();
if (truncated != null) {
// Some of the data was truncated, so we need to add a
// truncation warning. Save a copy of the row data, then move
// back to the SQLCAGRP section and overwrite it with the new
// warnings, and finally re-insert the row data after the new
// SQLCAGRP section.
byte[] data = writer.getBufferContents(sqlcagrpEnd);
writer.setBufferPosition(sqlcagrpStart);
if (sqlw != null) {
truncated.setNextWarning(sqlw);
}
writeSQLCAGRP(truncated, 1, -1);
writer.writeBytes(data);
stmt.clearTruncationWarnings();
}
// does all this fit in one QRYDTA
if (writer.getDSSLength() > blksize)
{
splitQRYDTA(stmt, blksize);
return false;
}
if (rs == null)
return moreData;
//get the next row
rowCount++;
if (rowCount < stmt.getQryrowset())
{
hasdata = rs.next();
}
/*(1) scrollable we return at most a row set; OR (2) no retrieve data
*/
else if (stmt.isScrollable() || noRetrieveRS)
moreData=false;
} while (hasdata && rowCount < stmt.getQryrowset());
// add rowCount to statement row count
// for non scrollable cursors
if (!stmt.isScrollable())
stmt.rowCount += rowCount;
if (!hasdata)
{
doneData(stmt, rs);
moreData=false;
}
if (!stmt.isScrollable())
stmt.setHasdata(hasdata);
return moreData;
}
/**
* <p>
* Get a column value of the specified type from a {@code ResultSet}, in
* a form suitable for being writted by {@link #writeFdocaVal}. For most
* types, this means just calling {@code ResultSet.getObject(int)}.
* </p>
*
* <p>
* The only exception currently is the data types representing dates and
* times, as they need to be fetched using the same
* {@code java.util.Calendar} as {@link #writeFdocaVal} uses when writing
* them (DERBY-4582).
* </p>
*
* <p>
* <b>Note:</b> Changes made in this method should also be made in the
* corresponding method for {@code CallableStatement}:
* {@link #getObjectForWriteFdoca(java.sql.CallableStatement, int, int)}.
* </p>
*
* @param rs the result set to fetch the object from
* @param index the column index
* @param drdaType the DRDA type of the object to fetch
* @return an object with the value of the column
* @throws if a database error occurs while fetching the column value
* @see #getObjectForWriteFdoca(java.sql.CallableStatement, int, int)
*/
private Object getObjectForWriteFdoca(ResultSet rs, int index, int drdaType)
throws SQLException {
// convert to corresponding nullable type to reduce number of cases
int ndrdaType = drdaType | 1;
switch (ndrdaType) {
case DRDAConstants.DRDA_TYPE_NDATE:
return rs.getDate(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NTIME:
return rs.getTime(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NTIMESTAMP:
return rs.getTimestamp(index, getGMTCalendar());
default:
return rs.getObject(index);
}
}
/**
* <p>
* Get the value of an output parameter of the specified type from a
* {@code CallableStatement}, in a form suitable for being writted by
* {@link #writeFdocaVal}. For most types, this means just calling
* {@code CallableStatement.getObject(int)}.
* </p>
*
* <p>
* This method should behave like the corresponding method for
* {@code ResultSet}, and changes made to one of these methods, must be
* reflected in the other method. See
* {@link #getObjectForWriteFdoca(java.sql.ResultSet, int, int)}
* for details.
* </p>
*
* @param cs the callable statement to fetch the object from
* @param index the parameter index
* @param drdaType the DRDA type of the object to fetch
* @return an object with the value of the output parameter
* @throws if a database error occurs while fetching the parameter value
* @see #getObjectForWriteFdoca(java.sql.ResultSet, int, int)
*/
private Object getObjectForWriteFdoca(CallableStatement cs,
int index, int drdaType)
throws SQLException {
// convert to corresponding nullable type to reduce number of cases
int ndrdaType = drdaType | 1;
switch (ndrdaType) {
case DRDAConstants.DRDA_TYPE_NDATE:
return cs.getDate(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NTIME:
return cs.getTime(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NTIMESTAMP:
return cs.getTimestamp(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
return EXTDTAInputStream.getEXTDTAStream(cs, index, drdaType);
default:
return cs.getObject(index);
}
}
/**
* Split QRYDTA into blksize chunks
*
* This routine is called if the QRYDTA data will not fit. It writes
* as much data as it can, then stores the remainder in the result
* set. At some later point, when the client returns with a CNTQRY,
* we will call processLeftoverQRYDTA to handle that data.
*
* The interaction between DRDAConnThread and DDMWriter is rather
* complicated here. This routine gets called because DRDAConnThread
* realizes that it has constructed a QRYDTA message which is too
* large. At that point, we need to reclaim the "extra" data and
* hold on to it. To aid us in that processing, DDMWriter provides
* the routines getDSSLength, copyDSSDataToEnd, and truncateDSS.
* For some additional detail on this complex sub-protocol, the
* interested reader should study bug DERBY-491 and 492 at:
* http://issues.apache.org/jira/browse/DERBY-491 and
* http://issues.apache.org/jira/browse/DERBY-492
*
* @param stmt DRDA statment
* @param blksize size of query block
*
* @throws SQLException
* @throws DRDAProtocolException
*/
private void splitQRYDTA(DRDAStatement stmt, int blksize) throws SQLException,
DRDAProtocolException
{
// make copy of extra data
byte [] temp = writer.copyDSSDataToEnd(blksize);
// truncate to end of blocksize
writer.truncateDSS(blksize);
if (temp.length == 0)
agentError("LMTBLKPRC violation: splitQRYDTA was " +
"called to split a QRYDTA block, but the " +
"entire row fit successfully into the " +
"current block. Server rowsize computation " +
"was probably incorrect (perhaps an off-by-" +
"one bug?). QRYDTA blocksize: " + blksize);
stmt.setSplitQRYDTA(temp);
}
/**
* Process remainder data resulting from a split.
*
* This routine is called at the start of building each QRYDTA block.
* Normally, it observes that there is no remainder data from the
* previous QRYDTA block, and returns FALSE, indicating that there
* was nothing to do.
*
* However, if it discovers that the previous QRYDTA block was split,
* then it retrieves the remainder data from the result set, writes
* as much of it as will fit into the QRYDTA block (hopefully all of
* it will fit, but the row may be very long), and returns TRUE,
* indicating that this QRYDTA block has been filled with remainder
* data and should now be sent immediately.
*/
private boolean processLeftoverQRYDTA(DRDAStatement stmt)
throws SQLException,DRDAProtocolException
{
byte []leftovers = stmt.getSplitQRYDTA();
if (leftovers == null)
return false;
int blksize = stmt.getBlksize() > 0 ? stmt.getBlksize() : CodePoint.QRYBLKSZ_MAX;
blksize = blksize - 10; //DSS header + QRYDTA and length
if (leftovers.length < blksize)
{
writer.writeBytes(leftovers, 0, leftovers.length);
stmt.setSplitQRYDTA(null);
}
else
{
writer.writeBytes(leftovers, 0, blksize);
byte []newLeftovers = new byte[leftovers.length-blksize];
for (int i = 0; i < newLeftovers.length; i++)
newLeftovers[i] = leftovers[blksize+i];
stmt.setSplitQRYDTA(newLeftovers);
}
// finish off query block and send
writer.endDdmAndDss();
return true;
}
/**
* Done data
* Send SQLCARD for the end of the data
*
* @param stmt DRDA statement
* @param rs Result set
* @throws DRDAProtocolException
* @throws SQLException
*/
private void doneData(DRDAStatement stmt, ResultSet rs)
throws DRDAProtocolException, SQLException
{
if (SanityManager.DEBUG)
trace("*****NO MORE DATA!!");
int blksize = stmt.getBlksize() > 0 ? stmt.getBlksize() : CodePoint.QRYBLKSZ_MAX;
if (rs != null)
{
if (stmt.isScrollable())
{
//keep isAfterLast and isBeforeFirst to be able
//to reposition after counting rows
boolean isAfterLast = rs.isAfterLast();
boolean isBeforeFirst = rs.isBeforeFirst();
// for scrollable cursors - calculate the row count
// since we may not have gone through each row
rs.last();
stmt.rowCount = rs.getRow();
// reposition after last or before first
if (isAfterLast) {
rs.afterLast();
}
if (isBeforeFirst) {
rs.beforeFirst();
}
}
else // non-scrollable cursor
{
final boolean qryclsOnLmtblkprc =
appRequester.supportsQryclsimpForLmtblkprc();
if (stmt.isRSCloseImplicit(qryclsOnLmtblkprc)) {
stmt.rsClose();
stmt.rsSuspend();
}
}
}
// For scrollable cursor's QRYSCRAFT, when we reach here, DRDA spec says sqlstate
// is 00000, sqlcode is not mentioned. But DB2 CLI code expects sqlcode to be 0.
// We return sqlcode 0 in this case, as the DB2 server does.
boolean isQRYSCRAFT = (stmt.getQryscrorn() == CodePoint.QRYSCRAFT);
// Using sqlstate 00000 or 02000 for end of data.
writeSQLCAGRP((isQRYSCRAFT ? eod00000 : eod02000),
(isQRYSCRAFT ? 0 : 100), 0, stmt.rowCount);
writer.writeByte(CodePoint.NULLDATA);
// does all this fit in one QRYDTA
if (writer.getDSSLength() > blksize)
{
splitQRYDTA(stmt, blksize);
}
}
/**
* Position cursor for insensitive scrollable cursors
*
* @param stmt DRDA statement
* @param rs Result set
*/
private boolean positionCursor(DRDAStatement stmt, ResultSet rs)
throws SQLException, DRDAProtocolException
{
boolean retval = false;
switch (stmt.getQryscrorn())
{
case CodePoint.QRYSCRREL:
int rows = (int)stmt.getQryrownbr();
if ((rs.isAfterLast() && rows > 0) || (rs.isBeforeFirst() && rows < 0)) {
retval = false;
} else {
retval = rs.relative(rows);
}
break;
case CodePoint.QRYSCRABS:
// JCC uses an absolute value of 0 which is not allowed in JDBC
// We translate it into beforeFirst which seems to work.
if (stmt.getQryrownbr() == 0)
{
rs.beforeFirst();
retval = false;
}
else
{
retval = rs.absolute((int)stmt.getQryrownbr());
}
break;
case CodePoint.QRYSCRAFT:
rs.afterLast();
retval = false;
break;
case CodePoint.QRYSCRBEF:
rs.beforeFirst();
retval = false;
break;
default:
agentError("Invalid value for cursor orientation "+ stmt.getQryscrorn());
}
return retval;
}
/**
* Write SQLDAGRP
* SQLDAGRP : EARLY FDOCA GROUP
* SQL Data Area Group Description
*
* FORMAT FOR SQLAM <= 6
* SQLPRECISION; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLSCALE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLLENGTH; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLTYPE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLCCSID; DRDA TYPE FB; ENVLID 0x26; Length Override 2
* SQLNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 30
* SQLNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 30
* SQLLABEL_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 30
* SQLLABEL_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 30
* SQLCOMMENTS_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 254
* SQLCOMMENTS_m; DRDA TYPE VCS; ENVLID 0x32; Length Override 254
*
* FORMAT FOR SQLAM == 6
* SQLPRECISION; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLSCALE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLLENGTH; DRDA TYPE I8; ENVLID 0x16; Length Override 8
* SQLTYPE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLCCSID; DRDA TYPE FB; ENVLID 0x26; Length Override 2
* SQLNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 30
* SQLNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 30
* SQLLABEL_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 30
* SQLLABEL_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 30
* SQLCOMMENTS_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 254
* SQLCOMMENTS_m; DRDA TYPE VCS; ENVLID 0x32; Length Override 254
* SQLUDTGRP; DRDA TYPE N-GDA; ENVLID 0x51; Length Override 0
*
* FORMAT FOR SQLAM >= 7
* SQLPRECISION; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLSCALE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLLENGTH; DRDA TYPE I8; ENVLID 0x16; Length Override 8
* SQLTYPE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLCCSID; DRDA TYPE FB; ENVLID 0x26; Length Override 2
* SQLDOPTGRP; DRDA TYPE N-GDA; ENVLID 0xD2; Length Override 0
*
* @param rsmeta resultset meta data
* @param pmeta parameter meta data
* @param elemNum column number we are returning (in case of result set), or,
* parameter number (in case of parameter)
* @param rtnOutput whether this is for a result set
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLDAGRP(ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int elemNum, boolean rtnOutput)
throws DRDAProtocolException, SQLException
{
//jdbc uses offset of 1
int jdbcElemNum = elemNum +1;
// length to be retreived as output parameter
int[] outlen = {-1};
int elemType = rtnOutput ? rsmeta.getColumnType(jdbcElemNum) : pmeta.getParameterType(jdbcElemNum);
int precision = rtnOutput ? rsmeta.getPrecision(jdbcElemNum) : pmeta.getPrecision(jdbcElemNum);
if (precision > FdocaConstants.NUMERIC_MAX_PRECISION)
precision = FdocaConstants.NUMERIC_MAX_PRECISION;
// 2-byte precision
writer.writeShort(precision);
// 2-byte scale
int scale = (rtnOutput ? rsmeta.getScale(jdbcElemNum) : pmeta.getScale(jdbcElemNum));
writer.writeShort(scale);
boolean nullable = rtnOutput ? (rsmeta.isNullable(jdbcElemNum) ==
ResultSetMetaData.columnNullable) :
(pmeta.isNullable(jdbcElemNum) == JDBC30Translation.PARAMETER_NULLABLE);
int sqlType = SQLTypes.mapJdbcTypeToDB2SqlType(elemType,
nullable, appRequester,
outlen);
if (outlen[0] == -1) //some types not set
{
switch (elemType)
{
case Types.DECIMAL:
case Types.NUMERIC:
scale = rtnOutput ? rsmeta.getScale(jdbcElemNum) : pmeta.getScale(jdbcElemNum);
outlen[0] = ((precision <<8) | (scale <<0));
if (SanityManager.DEBUG)
trace("\n\nprecision =" +precision +
" scale =" + scale);
break;
default:
outlen[0] = Math.min(FdocaConstants.LONGVARCHAR_MAX_LEN,
(rtnOutput ? rsmeta.getColumnDisplaySize(jdbcElemNum) :
pmeta.getPrecision(jdbcElemNum)));
}
}
switch (elemType)
{
case Types.BINARY:
case Types.VARBINARY:
case Types.LONGVARBINARY:
case Types.BLOB: //for CLI describe to be correct
case Types.CLOB:
outlen[0] = (rtnOutput ? rsmeta.getPrecision(jdbcElemNum) :
pmeta.getPrecision(jdbcElemNum));
}
if (SanityManager.DEBUG)
trace("SQLDAGRP len =" + java.lang.Integer.toHexString(outlen[0]) + "for type:" + elemType);
// 8 or 4 byte sqllength
if (sqlamLevel >= MGRLVL_6)
writer.writeLong(outlen[0]);
else
writer.writeInt(outlen[0]);
String typeName = rtnOutput ? rsmeta.getColumnTypeName(jdbcElemNum) :
pmeta.getParameterTypeName(jdbcElemNum);
if (SanityManager.DEBUG)
trace("jdbcType =" + typeName + " sqlType =" + sqlType + "len =" +outlen[0]);
writer.writeShort(sqlType);
// CCSID
// CCSID should be 0 for Binary Types.
if (elemType == java.sql.Types.CHAR ||
elemType == java.sql.Types.VARCHAR
|| elemType == java.sql.Types.LONGVARCHAR
|| elemType == java.sql.Types.CLOB)
writer.writeScalar2Bytes(1208);
else
writer.writeScalar2Bytes(0);
if (sqlamLevel < MGRLVL_7)
{
//SQLName
writeVCMorVCS(rtnOutput ? rsmeta.getColumnName(jdbcElemNum) : null);
//SQLLabel
writeVCMorVCS(null);
//SQLComments
writeVCMorVCS(null);
if (sqlamLevel == MGRLVL_6)
writeSQLUDTGRP(rsmeta, pmeta, jdbcElemNum, rtnOutput);
}
else
{
writeSQLDOPTGRP(rsmeta, pmeta, jdbcElemNum, rtnOutput);
}
}
/**
* Write variable character mixed byte or single byte
* The preference is to write mixed byte if it is defined for the server,
* since that is our default and we don't allow it to be changed, we always
* write mixed byte.
*
* @param s string to write
* @exception DRDAProtocolException
*/
private void writeVCMorVCS(String s)
throws DRDAProtocolException
{
//Write only VCM and 0 length for VCS
if (s == null)
{
writer.writeShort(0);
writer.writeShort(0);
return;
}
// VCM
writer.writeLDString(s);
// VCS
writer.writeShort(0);
}
/**
* Write SQLUDTGRP (SQL Descriptor User-Defined Type Group Descriptor)
*
* This is the format from the DRDA spec, Volume 1, section 5.6.4.10.
* However, this format is not rich enough to carry the information needed
* by JDBC. This format does not have a subtype code for JAVA_OBJECT and
* this format does not convey the Java class name needed
* by ResultSetMetaData.getColumnClassName().
*
* SQLUDXTYPE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* Constants which map to java.sql.Types constants DISTINCT, STRUCT, and REF.
* But DRDA does not define a constant which maps to java.sql.Types.JAVA_OBJECT.
* SQLUDTRDB; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* Database name.
* SQLUDTSCHEMA_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
* SQLUDTSCHEMA_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* Schema name. One of the above.
* SQLUDTNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
* SQLUDTNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* Unqualified UDT name. One of the above.
*
* Instead, we use the following format and only for communication between
* Derby servers and Derby clients which are both at version 10.6 or higher.
* For all other client/server combinations, we send null.
*
* SQLUDTNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
* SQLUDTNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* Fully qualified UDT name. One of the above.
* SQLUDTCLASSNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override FdocaConstants.LONGVARCHAR_MAX_LEN
* SQLUDTCLASSNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override FdocaConstants.LONGVARCHAR_MAX_LEN
* Name of the Java class bound to the UDT. One of the above.
*
* @param rsmeta resultset meta data
* @param pmeta parameter meta data
* @param jdbcElemNum column number we are returning (in case of result set), or,
* parameter number (in case of parameter)
* @param rtnOutput whether this is for a result set
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLUDTGRP(ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int jdbcElemNum, boolean rtnOutput)
throws DRDAProtocolException,SQLException
{
int jdbcType = rtnOutput ?
rsmeta.getColumnType( jdbcElemNum) : pmeta.getParameterType( jdbcElemNum );
if ( !(jdbcType == Types.JAVA_OBJECT) || !appRequester.supportsUDTs() )
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
String typeName = rtnOutput ?
rsmeta.getColumnTypeName( jdbcElemNum ) : pmeta.getParameterTypeName( jdbcElemNum );
String className = rtnOutput ?
rsmeta.getColumnClassName( jdbcElemNum ) : pmeta.getParameterClassName( jdbcElemNum );
writeVCMorVCS( typeName );
writeVCMorVCS( className );
}
private void writeSQLDOPTGRP(ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int jdbcElemNum, boolean rtnOutput)
throws DRDAProtocolException,SQLException
{
writer.writeByte(0);
//SQLUNAMED
writer.writeShort(0);
//SQLName
writeVCMorVCS(rtnOutput ? rsmeta.getColumnName(jdbcElemNum) : null);
//SQLLabel
writeVCMorVCS(null);
//SQLComments
writeVCMorVCS(null);
//SQLDUDTGRP
writeSQLUDTGRP(rsmeta, pmeta, jdbcElemNum, rtnOutput);
//SQLDXGRP
writeSQLDXGRP(rsmeta, pmeta, jdbcElemNum, rtnOutput);
}
private void writeSQLDXGRP(ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int jdbcElemNum, boolean rtnOutput)
throws DRDAProtocolException,SQLException
{
// Null indicator indicates we have data
writer.writeByte(0);
// SQLXKEYMEM; DRDA TYPE I2; ENVLID 0x04; Length Override 2
// Hard to get primary key info. Send 0 for now
writer.writeShort(0);
// SQLXUPDATEABLE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
writer.writeShort(rtnOutput ? rsmeta.isWritable(jdbcElemNum) : false);
// SQLXGENERATED; DRDA TYPE I2; ENVLID 0x04; Length Override 2
if (rtnOutput && rsmeta.isAutoIncrement(jdbcElemNum))
writer.writeShort(2);
else
writer.writeShort(0);
// SQLXPARMMODE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
if (pmeta != null && !rtnOutput)
{
int mode = pmeta.getParameterMode(jdbcElemNum);
if (mode == JDBC30Translation.PARAMETER_MODE_UNKNOWN)
{
// For old style callable statements. We assume in/out if it
// is an output parameter.
int type = DRDAStatement.getOutputParameterTypeFromClassName(
pmeta.getParameterClassName(jdbcElemNum));
if (type != DRDAStatement.NOT_OUTPUT_PARAM)
mode = JDBC30Translation.PARAMETER_MODE_IN_OUT;
}
writer.writeShort(mode);
}
else
{
writer.writeShort(0);
}
// SQLXRDBNAM; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
// JCC uses this as the catalog name so we will send null.
writer.writeShort(0);
// SQLXCORNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
// SQLXCORNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
writeVCMorVCS(null);
// SQLXBASENAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
// SQLXBASENAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
writeVCMorVCS(rtnOutput ? rsmeta.getTableName(jdbcElemNum) : null);
// SQLXSCHEMA_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
// SQLXSCHEMA_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
writeVCMorVCS(rtnOutput ? rsmeta.getSchemaName(jdbcElemNum): null);
// SQLXNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
// SQLXNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
writeVCMorVCS(rtnOutput ? rsmeta.getColumnName(jdbcElemNum): null);
}
/**
* Write Fdoca Value to client
* @param index Index of column being returned
* @param val Value to write to client
* @param drdaType FD:OCA DRDA Type from FdocaConstants
* @param precision Precision
* @param stmt Statement being processed
* @param isParam True when writing a value for a procedure parameter
*
* @exception DRDAProtocolException
*
* @exception SQLException
*
* @see FdocaConstants
*/
protected void writeFdocaVal(int index, Object val, int drdaType,
int precision, int scale, boolean valNull,
DRDAStatement stmt, boolean isParam)
throws DRDAProtocolException, SQLException
{
writeNullability(drdaType,valNull);
if (! valNull)
{
int ndrdaType = drdaType | 1;
long valLength = 0;
switch (ndrdaType)
{
case DRDAConstants.DRDA_TYPE_NBOOLEAN:
writer.writeBoolean( ((Boolean) val).booleanValue() );
break;
case DRDAConstants.DRDA_TYPE_NSMALL:
// DB2 does not have a BOOLEAN java.sql.bit type,
// so we need to send it as a small
if (val instanceof Boolean)
{
writer.writeShort(((Boolean) val).booleanValue());
}
else if (val instanceof Short)
writer.writeShort(((Short) val).shortValue());
else if (val instanceof Byte)
writer.writeShort(((Byte) val).byteValue());
else
writer.writeShort(((Integer) val).shortValue());
break;
case DRDAConstants.DRDA_TYPE_NINTEGER:
writer.writeInt(((Integer) val).intValue());
break;
case DRDAConstants.DRDA_TYPE_NINTEGER8:
writer.writeLong(((Long) val).longValue());
break;
case DRDAConstants.DRDA_TYPE_NFLOAT4:
writer.writeFloat(((Float) val).floatValue());
break;
case DRDAConstants.DRDA_TYPE_NFLOAT8:
writer.writeDouble(((Double) val).doubleValue());
break;
case DRDAConstants.DRDA_TYPE_NDECIMAL:
if (precision == 0)
precision = FdocaConstants.NUMERIC_DEFAULT_PRECISION;
BigDecimal bd = (java.math.BigDecimal) val;
writer.writeBigDecimal(bd,precision,scale);
break;
case DRDAConstants.DRDA_TYPE_NDATE:
writer.writeString(formatDate((java.sql.Date) val));
break;
case DRDAConstants.DRDA_TYPE_NTIME:
writer.writeString(formatTime((Time) val));
break;
case DRDAConstants.DRDA_TYPE_NTIMESTAMP:
writer.writeString(formatTimestamp((Timestamp) val));
break;
case DRDAConstants.DRDA_TYPE_NCHAR:
writer.writeString(((String) val).toString());
break;
case DRDAConstants.DRDA_TYPE_NVARCHAR:
case DRDAConstants.DRDA_TYPE_NVARMIX:
case DRDAConstants.DRDA_TYPE_NLONG:
case DRDAConstants.DRDA_TYPE_NLONGMIX:
//WriteLDString and generate warning if truncated
// which will be picked up by checkWarning()
writer.writeLDString(val.toString(), index, stmt, isParam);
break;
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
// do not send EXTDTA for lob of length 0, beetle 5967
if( ! ((EXTDTAInputStream) val).isEmptyStream() ){
stmt.addExtDtaObject(val, index);
//indicate externalized and size is unknown.
writer.writeExtendedLength(0x8000);
}else{
writer.writeExtendedLength(0);
}
break;
case DRDAConstants.DRDA_TYPE_NFIXBYTE:
writer.writeBytes((byte[]) val);
break;
case DRDAConstants.DRDA_TYPE_NVARBYTE:
case DRDAConstants.DRDA_TYPE_NLONGVARBYTE:
writer.writeLDBytes((byte[]) val, index);
break;
case DRDAConstants.DRDA_TYPE_NLOBLOC:
case DRDAConstants.DRDA_TYPE_NCLOBLOC:
writer.writeInt(((EngineLOB)val).getLocator());
break;
case DRDAConstants.DRDA_TYPE_NUDT:
writer.writeUDT( val, index );
break;
case DRDAConstants.DRDA_TYPE_NROWID:
writer.writeRowId(val, index);
break;
default:
if (SanityManager.DEBUG)
trace("ndrdaType is: "+ndrdaType);
writer.writeLDString(val.toString(), index, stmt, isParam);
}
}
}
/**
* write nullability if this is a nullable drdatype and FDOCA null
* value if appropriate
* @param drdaType FDOCA type
* @param valNull true if this is a null value. False otherwise
*
**/
private void writeNullability(int drdaType, boolean valNull)
{
if(FdocaConstants.isNullable(drdaType))
{
if (valNull)
writer.writeByte(FdocaConstants.NULL_DATA);
else
{
writer.writeByte(FdocaConstants.INDICATOR_NULLABLE);
}
}
}
/**
* Convert a {@code java.sql.Date} to a string with the format expected
* by the client.
*
* @param date the date to format
* @return a string on the format YYYY-MM-DD representing the date
* @see com.splicemachine.db.client.am.DateTime#dateBytesToDate
*/
private String formatDate(java.sql.Date date) {
Calendar cal = getGMTCalendar();
cal.clear();
cal.setTime(date);
char[] buf = "YYYY-MM-DD".toCharArray();
padInt(buf, 0, 4, cal.get(Calendar.YEAR));
padInt(buf, 5, 2, cal.get(Calendar.MONTH) + 1);
padInt(buf, 8, 2, cal.get(Calendar.DAY_OF_MONTH));
return new String(buf);
}
/**
* Convert a {@code java.sql.Time} to a string with the format expected
* by the client.
*
* @param time the time to format
* @return a string on the format HH:MM:SS representing the time
* @see com.splicemachine.db.client.am.DateTime#timeBytesToTime
*/
private String formatTime(Time time) {
Calendar cal = getGMTCalendar();
cal.clear();
cal.setTime(time);
char[] buf = "HH:MM:SS".toCharArray();
padInt(buf, 0, 2, cal.get(Calendar.HOUR_OF_DAY));
padInt(buf, 3, 2, cal.get(Calendar.MINUTE));
padInt(buf, 6, 2, cal.get(Calendar.SECOND));
return new String(buf);
}
/**
* Convert a {@code java.sql.Timestamp} to a string with the format
* expected by the client.
*
* @param ts the timestamp to format
* @return a string on the format YYYY-MM-DD-HH.MM.SS.ffffff[fff]
* @see com.splicemachine.db.client.am.DateTime#timestampBytesToTimestamp
*/
private String formatTimestamp(Timestamp ts) {
Calendar cal = getGMTCalendar();
cal.clear();
cal.setTime(ts);
char[] buf = new char[appRequester.getTimestampLength()];
padInt(buf, 0, 4, cal.get(Calendar.YEAR));
buf[4] = '-';
padInt(buf, 5, 2, cal.get(Calendar.MONTH) + 1);
buf[7] = '-';
padInt(buf, 8, 2, cal.get(Calendar.DAY_OF_MONTH));
buf[10] = '-';
padInt(buf, 11, 2, cal.get(Calendar.HOUR_OF_DAY));
buf[13] = '.';
padInt(buf, 14, 2, cal.get(Calendar.MINUTE));
buf[16] = '.';
padInt(buf, 17, 2, cal.get(Calendar.SECOND));
buf[19] = '.';
int nanos = ts.getNanos();
if (appRequester.supportsTimestampNanoseconds()) {
padInt(buf, 20, 9, nanos);
} else {
padInt(buf, 20, 6, nanos / 1000);
}
return new String(buf);
}
/**
* Insert an integer into a char array and pad it with leading zeros if
* its string representation is shorter than {@code length} characters.
*
* @param buf the char array
* @param offset where in the array to start inserting the value
* @param length the desired length of the inserted string
* @param value the integer value to insert
*/
private void padInt(char[] buf, int offset, int length, int value) {
final int radix = 10;
for (int i = offset + length - 1; i >= offset; i--) {
buf[i] = Character.forDigit(value % radix, radix);
value /= radix;
}
}
/**
* Methods to keep track of required codepoints
*/
/**
* Copy a list of required code points to template for checking
*
* @param req list of required codepoints
*/
private void copyToRequired(int [] req)
{
currentRequiredLength = req.length;
if (currentRequiredLength > required.length)
required = new int[currentRequiredLength];
for (int i = 0; i < req.length; i++)
required[i] = req[i];
}
/**
* Remove codepoint from required list
*
* @param codePoint - code point to be removed
*/
private void removeFromRequired(int codePoint)
{
for (int i = 0; i < currentRequiredLength; i++)
if (required[i] == codePoint)
required[i] = 0;
}
/**
* Check whether we have seen all the required code points
*
* @param codePoint code point for which list of code points is required
*/
private void checkRequired(int codePoint) throws DRDAProtocolException
{
int firstMissing = 0;
for (int i = 0; i < currentRequiredLength; i++)
{
if (required[i] != 0)
{
firstMissing = required[i];
break;
}
}
if (firstMissing != 0)
missingCodePoint(firstMissing);
}
/**
* Error routines
*/
/**
* Seen too many of this code point
*
* @param codePoint code point which has been duplicated
*
* @exception DRDAProtocolException
*/
private void tooMany(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_TOO_MANY, codePoint);
}
/**
* Object too big
*
* @param codePoint code point with too big object
* @exception DRDAProtocolException
*/
private void tooBig(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_TOO_BIG, codePoint);
}
/**
* Invalid non-db client tried to connect.
* thrown a required Value not found error and log a message to db.log
*
* @param prdid product id that does not match DNC
* @throws DRDAProtocolException
*/
private void invalidClient(String prdid) throws DRDAProtocolException {
Monitor.logMessage(new Date()
+ " : "
+ server.localizeMessage("DRDA_InvalidClient.S",
new String[] { prdid }));
requiredValueNotFound(CodePoint.PRDID);
}
/*** Required value not found.
*
* @param codePoint code point with invalid value
*
*/
private void requiredValueNotFound(int codePoint) throws DRDAProtocolException {
throwSyntaxrm(CodePoint.SYNERRCD_REQ_VAL_NOT_FOUND, codePoint);
}
/**
* Object length not allowed
*
* @param codePoint code point with bad object length
* @exception DRDAProtocolException
*/
private void badObjectLength(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_OBJ_LEN_NOT_ALLOWED, codePoint);
}
/**
* RDB not found
*
* @param rdbnam name of database
* @exception DRDAProtocolException
*/
private void rdbNotFound(String rdbnam) throws DRDAProtocolException
{
Object[] oa = {rdbnam};
throw new
DRDAProtocolException(DRDAProtocolException.DRDA_Proto_RDBNFNRM,
this,0,
DRDAProtocolException.NO_ASSOC_ERRCD, oa);
}
/**
* Invalid value for this code point
*
* @param codePoint code point value
* @exception DRDAProtocolException
*/
private void invalidValue(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_REQ_VAL_NOT_FOUND, codePoint);
}
/**
* Invalid codepoint for this command
*
* @param codePoint code point value
*
* @exception DRDAProtocolException
*/
protected void invalidCodePoint(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_INVALID_CP_FOR_CMD, codePoint);
}
/**
* Don't support this code point
*
* @param codePoint code point value
* @exception DRDAProtocolException
*/
protected void codePointNotSupported(int codePoint) throws DRDAProtocolException
{
throw new
DRDAProtocolException(DRDAProtocolException.DRDA_Proto_CMDNSPRM,
this,codePoint,
DRDAProtocolException.NO_ASSOC_ERRCD);
}
/**
* Don't support this value
*
* @param codePoint code point value
* @exception DRDAProtocolException
*/
private void valueNotSupported(int codePoint) throws DRDAProtocolException
{
throw new
DRDAProtocolException(DRDAProtocolException.DRDA_Proto_VALNSPRM,
this,codePoint,
DRDAProtocolException.NO_ASSOC_ERRCD);
}
/**
* Verify that the code point is the required code point
*
* @param codePoint code point we have
* @param reqCodePoint code point required at this time
*
* @exception DRDAProtocolException
*/
private void verifyRequiredObject(int codePoint, int reqCodePoint)
throws DRDAProtocolException
{
if (codePoint != reqCodePoint )
{
throwSyntaxrm(CodePoint.SYNERRCD_REQ_OBJ_NOT_FOUND,codePoint);
}
}
/**
* Verify that the code point is in the right order
*
* @param codePoint code point we have
* @param reqCodePoint code point required at this time
*
* @exception DRDAProtocolException
*/
private void verifyInOrderACCSEC_SECCHK(int codePoint, int reqCodePoint)
throws DRDAProtocolException
{
if (codePoint != reqCodePoint )
{
throw
new DRDAProtocolException(DRDAProtocolException.DRDA_Proto_PRCCNVRM,
this, codePoint,
CodePoint.PRCCNVCD_ACCSEC_SECCHK_WRONG_STATE);
}
}
/**
* Database name given under code point doesn't match previous database names
*
* @param codePoint codepoint where the mismatch occurred
*
* @exception DRDAProtocolException
*/
private void rdbnamMismatch(int codePoint)
throws DRDAProtocolException
{
throw new DRDAProtocolException(DRDAProtocolException.DRDA_Proto_PRCCNVRM,
this, codePoint,
CodePoint.PRCCNVCD_RDBNAM_MISMATCH);
}
/**
* Close the current session
*/
private void closeSession()
{
if (session == null)
return;
/* DERBY-2220: Rollback the current XA transaction if it is
still associated with the connection. */
if (xaProto != null)
xaProto.rollbackCurrentTransaction();
server.removeFromSessionTable(session.connNum);
try {
session.close();
} catch (SQLException se)
{
// If something went wrong closing down the session.
// Print an error to the console and close this
//thread. (6013)
sendUnexpectedException(se);
close();
}
finally {
session = null;
database = null;
appRequester=null;
sockis = null;
sockos=null;
databaseAccessException=null;
}
}
/**
* Handle Exceptions - write error protocol if appropriate and close session
* or thread as appropriate
*/
private void handleException(Exception e)
{
try {
if (e instanceof DRDAProtocolException) {
// protocol error - write error message
sendProtocolException((DRDAProtocolException) e);
} else {
// something unexpected happened
sendUnexpectedException(e);
server.consoleExceptionPrintTrace(e);
}
} finally {
// always close the session and stop the thread after handling
// these exceptions
closeSession();
close();
}
}
/**
* Notice the client about a protocol error.
*
* @param de <code>DRDAProtocolException</code> to be sent
*/
private void sendProtocolException(DRDAProtocolException de) {
String dbname = null;
if (database != null) {
dbname = database.getDatabaseName();
}
try {
println2Log(dbname, session.drdaID, de.getMessage());
server.consoleExceptionPrintTrace(de);
reader.clearBuffer();
de.write(writer);
finalizeChain();
} catch (DRDAProtocolException ioe) {
// There may be an IO exception in the write.
println2Log(dbname, session.drdaID, de.getMessage());
server.consoleExceptionPrintTrace(ioe);
}
}
/**
* Send unpexpected error to the client
* @param e Exception to be sent
*/
private void sendUnexpectedException(Exception e)
{
DRDAProtocolException unExpDe;
String dbname = null;
try {
if (database != null)
dbname = database.getDatabaseName();
println2Log(dbname,session.drdaID, e.getMessage());
server.consoleExceptionPrintTrace(e);
unExpDe = DRDAProtocolException.newAgentError(this,
CodePoint.SVRCOD_PRMDMG,
dbname, e.getMessage());
reader.clearBuffer();
unExpDe.write(writer);
finalizeChain();
}
catch (DRDAProtocolException nde)
{
// we can't tell the client, but we tried.
}
}
/**
* Test if DRDA connection thread is closed
*
* @return true if close; false otherwise
*/
private boolean closed()
{
synchronized (closeSync)
{
return close;
}
}
/**
* Get whether connections are logged
*
* @return true if connections are being logged; false otherwise
*/
private boolean getLogConnections()
{
synchronized(logConnectionsSync) {
return logConnections;
}
}
/**
* Get time slice value for length of time to work on a session
*
* @return time slice
*/
private long getTimeSlice()
{
synchronized(timeSliceSync) {
return timeSlice;
}
}
/**
* Send string to console
*
* @param value - value to print on console
*/
protected void trace(String value)
{
if (SanityManager.DEBUG && server.debugOutput == true)
server.consoleMessage(value, true);
}
/**
* Sends a trace string to the console when reading an EXTDTA value (if
* tracing is enabled).
*
* @param drdaType the DRDA type of the EXTDTA value
* @param index the one-based parameter index
* @param stream the stream being read
* @param streamLOB whether or not the value is being streamed as the last
* parameter value in the DRDA protocol flow
* @param encoding the encoding of the data, if any
*/
private void traceEXTDTARead(int drdaType, int index,
EXTDTAReaderInputStream stream,
boolean streamLOB, String encoding) {
if (SanityManager.DEBUG && server.debugOutput == true) {
StringBuffer sb = new StringBuffer("Reading/setting EXTDTA: ");
// Data: t<type>/i<ob_index>/<streamLOB>/<encoding>/
// <statusByteExpected>/b<byteLength>
sb.append("t").append(drdaType).append("/i").append(index).
append("/").append(streamLOB).
append("/").append(encoding).append("/").
append(stream.readStatusByte). append("/b");
if (stream == null) {
sb.append("NULL");
} else if (stream.isLayerBStream()) {
sb.append("UNKNOWN_LENGTH");
} else {
sb.append(
((StandardEXTDTAReaderInputStream)stream).getLength());
}
trace(sb.toString());
}
}
/***
* Show runtime memory
*
***/
public static void showmem() {
Runtime rt = null;
Date d = null;
rt = Runtime.getRuntime();
rt.gc();
d = new Date();
System.out.println("total memory: "
+ rt.totalMemory()
+ " free: "
+ rt.freeMemory()
+ " " + d.toString());
}
/**
* convert byte array to a Hex string
*
* @param buf buffer to convert
* @return hex string representation of byte array
*/
private String convertToHexString(byte [] buf)
{
StringBuffer str = new StringBuffer();
str.append("0x");
String val;
int byteVal;
for (int i = 0; i < buf.length; i++)
{
byteVal = buf[i] & 0xff;
val = Integer.toHexString(byteVal);
if (val.length() < 2)
str.append("0");
str.append(val);
}
return str.toString();
}
/**
* check that the given typdefnam is acceptable
*
* @param typdefnam
*
* @exception DRDAProtocolException
*/
private void checkValidTypDefNam(String typdefnam)
throws DRDAProtocolException
{
if (typdefnam.equals("QTDSQL370"))
return;
if (typdefnam.equals("QTDSQL400"))
return;
if (typdefnam.equals("QTDSQLX86"))
return;
if (typdefnam.equals("QTDSQLASC"))
return;
if (typdefnam.equals("QTDSQLVAX"))
return;
if (typdefnam.equals("QTDSQLJVM"))
return;
invalidValue(CodePoint.TYPDEFNAM);
}
/**
* Check that the length is equal to the required length for this codepoint
*
* @param codepoint codepoint we are checking
* @param reqlen required length
*
* @exception DRDAProtocolException
*/
private void checkLength(int codepoint, int reqlen)
throws DRDAProtocolException
{
long len = reader.getDdmLength();
if (len < reqlen)
badObjectLength(codepoint);
else if (len > reqlen)
tooBig(codepoint);
}
/**
* Read and check a boolean value
*
* @param codepoint codePoint to be used in error reporting
* @return true or false depending on boolean value read
*
* @exception DRDAProtocolException
*/
private boolean readBoolean(int codepoint) throws DRDAProtocolException
{
checkLength(codepoint, 1);
byte val = reader.readByte();
if (val == CodePoint.TRUE)
return true;
else if (val == CodePoint.FALSE)
return false;
else
invalidValue(codepoint);
return false; //to shut the compiler up
}
/**
* Create a new database and intialize the
* DRDAConnThread database.
*
* @param dbname database name to initialize. If
* dbnam is non null, add database to the current session
*
*/
private void initializeDatabase(String dbname)
{
Database db;
if (appRequester.isXARequester())
{
db = new XADatabase(dbname);
}
else
db = new Database(dbname);
if (dbname != null) {
session.addDatabase(db);
session.database = db;
}
database = db;
}
/**
* Set the current database
*
* @param codePoint codepoint we are processing
*
* @exception DRDAProtocolException
*/
private void setDatabase(int codePoint) throws DRDAProtocolException
{
String rdbnam = parseRDBNAM();
// using same database so we are done
if (database != null && database.getDatabaseName().equals(rdbnam))
return;
Database d = session.getDatabase(rdbnam);
if (d == null)
rdbnamMismatch(codePoint);
else
database = d;
session.database = d;
}
/**
* Write ENDUOWRM
* Instance Variables
* SVCOD - severity code - WARNING - required
* UOWDSP - Unit of Work Disposition - required
* RDBNAM - Relational Database name - optional
* SRVDGN - Server Diagnostics information - optional
*
* @param opType - operation type 1 - commit, 2 -rollback
*/
private void writeENDUOWRM(int opType)
{
writer.createDssReply();
writer.startDdm(CodePoint.ENDUOWRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_WARNING);
writer.writeScalar1Byte(CodePoint.UOWDSP, opType);
writer.endDdmAndDss();
}
void writeEXTDTA (DRDAStatement stmt) throws SQLException, DRDAProtocolException
{
ArrayList extdtaValues = stmt.getExtDtaObjects();
// build the EXTDTA data, if necessary
if (extdtaValues == null)
return;
boolean chainFlag, chainedWithSameCorrelator;
boolean writeNullByte = false;
for (int i = 0; i < extdtaValues.size(); i++) {
// is this the last EXTDTA to be built?
if (i != extdtaValues.size() - 1) { // no
chainFlag = true;
chainedWithSameCorrelator = true;
}
else { // yes
chainFlag = false; //last blob DSS stream itself is NOT chained with the NEXT DSS
chainedWithSameCorrelator = false;
}
if (sqlamLevel >= MGRLVL_7)
if (stmt.isExtDtaValueNullable(i))
writeNullByte = true;
Object o = extdtaValues.get(i);
if (o instanceof EXTDTAInputStream) {
EXTDTAInputStream stream = (EXTDTAInputStream) o;
try{
stream.initInputStream();
writer.writeScalarStream (chainedWithSameCorrelator,
CodePoint.EXTDTA,
stream,
writeNullByte);
}finally{
// close the stream when done
closeStream(stream);
}
}
}
// reset extdtaValues after sending
stmt.clearExtDtaObjects();
}
/**
* Check SQLWarning and write SQLCARD as needed.
*
* @param conn connection to check
* @param stmt statement to check
* @param rs result set to check
* @param updateCount update count to include in SQLCARD
* @param alwaysSend whether always send SQLCARD regardless of
* the existance of warnings
* @param sendWarn whether to send any warnings or not.
*
* @exception DRDAProtocolException
*/
private void checkWarning(Connection conn, Statement stmt, ResultSet rs,
int updateCount, boolean alwaysSend, boolean sendWarn)
throws DRDAProtocolException, SQLException
{
// instead of writing a chain of sql warning, we send the first one, this is
// jcc/db2 limitation, see beetle 4629
SQLWarning warning = null;
SQLWarning reportWarning = null;
try
{
if (stmt != null)
{
warning = stmt.getWarnings();
if (warning != null)
{
stmt.clearWarnings();
reportWarning = warning;
}
}
if (rs != null)
{
warning = rs.getWarnings();
if (warning != null)
{
rs.clearWarnings();
if (reportWarning == null)
reportWarning = warning;
}
}
if (conn != null)
{
warning = conn.getWarnings();
if (warning != null)
{
conn.clearWarnings();
if (reportWarning == null)
reportWarning = warning;
}
}
}
catch (SQLException se)
{
if (SanityManager.DEBUG)
trace("got SQLException while trying to get warnings.");
}
if ((alwaysSend || reportWarning != null) && sendWarn)
writeSQLCARDs(reportWarning, updateCount);
}
boolean hasSession() {
return session != null;
}
long getBytesRead() {
return reader.totalByteCount;
}
long getBytesWritten() {
return writer.totalByteCount;
}
protected String buildRuntimeInfo(String indent, LocalizedResource localLangUtil )
{
String s ="";
if (!hasSession())
return s;
else
s += session.buildRuntimeInfo("", localLangUtil);
s += "\n";
return s;
}
/**
* Finalize the current DSS chain and send it if
* needed.
*/
private void finalizeChain() throws DRDAProtocolException {
writer.finalizeChain(reader.getCurrChainState(), getOutputStream());
return;
}
/**
* Validate SECMEC_USRSSBPWD (Strong Password Substitute) can be used as
* DRDA security mechanism.
*
* Here we check that the target server can support SECMEC_USRSSBPWD
* security mechanism based on the environment, application
* requester's identity (PRDID) and connection URL.
*
* IMPORTANT NOTE:
* --------------
* SECMEC_USRSSBPWD is ONLY supported by the target server if:
* - current authentication provider is Derby BUILTIN or
* NONE. (database / system level) (Phase I)
* - database-level password must have been encrypted with the
* SHA-1 based authentication scheme
* - Application requester is 'DNC' (Derby Network Client)
* (Phase I)
*
* @return security check code - 0 if everything O.K.
*/
private int validateSecMecUSRSSBPWD() throws DRDAProtocolException
{
String dbName = null;
AuthenticationService authenticationService = null;
com.splicemachine.db.iapi.db.Database databaseObj = null;
String srvrlslv = appRequester.srvrlslv;
// Check if application requester is the Derby Network Client (DNC)
//
// We use a trick here - as the product ID is not yet available
// since ACCRDB message is only coming later, we check the server
// release level field sent as part of the initial EXCSAT message;
// indeed, the product ID (PRDID) is prefixed to in the field.
// Derby always sets it as part of the EXCSAT message so if it is
// not available, we stop here and inform the requester that
// SECMEC_USRSSBPWD cannot be supported for this connection.
if ((srvrlslv == null) || (srvrlslv.length() == 0) ||
(srvrlslv.length() < CodePoint.PRDID_MAX) ||
(srvrlslv.indexOf(DRDAConstants.DERBY_DRDA_CLIENT_ID)
== -1))
return CodePoint.SECCHKCD_NOTSUPPORTED; // Not Supported
// Client product version is extracted from the srvrlslv field.
// srvrlslv has the format <PRDID>/<ALTERNATE VERSION FORMAT>
// typically, a known Derby client has a four part version number
// with a pattern such as DNC10020/10.2.0.3 alpha. If the alternate
// version format is not specified, clientProductVersion_ will just
// be set to the srvrlslvl. Final fallback will be the product id.
//
// SECMEC_USRSSBPWD is only supported by the Derby engine and network
// server code starting at version major '10' and minor '02'. Hence,
// as this is the same for the db client driver, we need to ensure
// our DNC client is at version and release level of 10.2 at least.
// We set the client version in the application requester and check
// if it is at the level we require at a minimum.
appRequester.setClientVersion(
srvrlslv.substring(0, (int) CodePoint.PRDID_MAX));
if (appRequester.supportsSecMecUSRSSBPWD() == false)
return CodePoint.SECCHKCD_NOTSUPPORTED; // Not Supported
dbName = database.getShortDbName();
// Check if the database is available (booted)
//
// First we need to have the database name available and it should
// have been set as part of the ACCSEC request (in the case of a Derby
// 'DNC' client)
if ((dbName == null) || (dbName.length() == 0))
{
// No database specified in the connection URL attributes
//
// In this case, we get the authentication service handle from the
// local driver, as the requester may simply be trying to shutdown
// the engine.
authenticationService = ((InternalDriver)
NetworkServerControlImpl.getDriver()).getAuthenticationService();
}
else
{
// We get the authentication service from the database as this
// last one might have specified its own auth provider (at the
// database level).
//
// if monitor is never setup by any ModuleControl, getMonitor
// returns null and no Derby database has been booted.
if (Monitor.getMonitor() != null)
databaseObj = (com.splicemachine.db.iapi.db.Database)
Monitor.findService(Property.DATABASE_MODULE, dbName);
if (databaseObj == null)
{
// If database is not found, try connecting to it.
database.makeDummyConnection();
// now try to find it again
databaseObj = (com.splicemachine.db.iapi.db.Database)
Monitor.findService(Property.DATABASE_MODULE, dbName);
}
// If database still could not be found, it means the database
// does not exist - we just return security mechanism not
// supported down below as we could not verify we can handle
// it.
try {
if (databaseObj != null)
authenticationService =
databaseObj.getAuthenticationService();
} catch (StandardException se) {
println2Log(null, session.drdaID, se.getMessage());
// Local security service non-retryable error.
return CodePoint.SECCHKCD_0A;
}
}
// Now we check if the authentication provider is NONE or BUILTIN
if (authenticationService != null)
{
String authClassName = authenticationService.getClass().getName();
if (!authClassName.equals(AUTHENTICATION_PROVIDER_BUILTIN_CLASS) &&
!authClassName.equals(AUTHENTICATION_PROVIDER_NONE_CLASS))
return CodePoint.SECCHKCD_NOTSUPPORTED; // Not Supported
}
// SECMEC_USRSSBPWD target initialization
try {
myTargetSeed = DecryptionManager.generateSeed();
database.secTokenOut = myTargetSeed;
} catch (SQLException se) {
println2Log(null, session.drdaID, se.getMessage());
// Local security service non-retryable error.
return CodePoint.SECCHKCD_0A;
}
return 0; // SECMEC_USRSSBPWD is supported
}
/**
* Close a stream.
*
* @param stream the stream to close (possibly {@code null})
* @throws SQLException wrapped around an {@code IOException} if closing
* the stream failed
*/
private static void closeStream(InputStream stream) throws SQLException {
try {
if (stream != null) {
stream.close();
}
} catch (IOException e) {
throw Util.javaException(e);
}
}
private static InputStream
convertAsByteArrayInputStream( EXTDTAReaderInputStream stream )
throws IOException {
// Suppress the exception that may be thrown when reading the status
// byte here, we want the embedded statement to fail while executing.
stream.setSuppressException(true);
final int byteArrayLength =
stream instanceof StandardEXTDTAReaderInputStream ?
(int) ( ( StandardEXTDTAReaderInputStream ) stream ).getLength() :
1 + stream.available(); // +1 to avoid infinite loop
// TODO: We will run into OOMEs for large values here.
// Could avoid this by saving value temporarily to disk, for
// instance by using the existing LOB code.
PublicBufferOutputStream pbos =
new PublicBufferOutputStream( byteArrayLength );
byte[] buffer = new byte[Math.min(byteArrayLength, 32*1024)];
int c = 0;
while( ( c = stream.read( buffer,
0,
buffer.length ) ) > -1 ) {
pbos.write( buffer, 0, c );
}
// Check if the client driver encountered any errors when reading the
// source on the client side.
if (stream.isStatusSet() &&
stream.getStatus() != DRDAConstants.STREAM_OK) {
// Create a stream that will just fail when accessed.
return new FailingEXTDTAInputStream(stream.getStatus());
} else {
return new ByteArrayInputStream( pbos.getBuffer(),
0,
pbos.getCount() );
}
}
private static class PublicBufferOutputStream extends ByteArrayOutputStream{
PublicBufferOutputStream(int size){
super(size);
}
public byte[] getBuffer(){
return buf;
}
public int getCount(){
return count;
}
}
/**
* Sets the specified character EXTDTA parameter of the embedded statement.
*
* @param stmt the DRDA statement to use
* @param i the one-based index of the parameter
* @param extdtaStream the EXTDTA stream to read data from
* @param streamLOB whether or not the stream content is streamed as the
* last value in the DRDA protocol flow
* @param encoding the encoding of the EXTDTA stream
* @throws IOException if reading from the stream fails
* @throws SQLException if setting the stream fails
*/
private static void setAsCharacterStream(
DRDAStatement stmt,
int i,
EXTDTAReaderInputStream extdtaStream,
boolean streamLOB,
String encoding)
throws IOException, SQLException {
PreparedStatement ps = stmt.getPreparedStatement();
EnginePreparedStatement engnps =
( EnginePreparedStatement ) ps;
// DERBY-3085. Save the stream so it can be drained later
// if not used.
if (streamLOB)
stmt.setStreamedParameter(extdtaStream);
final InputStream is =
streamLOB ?
(InputStream) extdtaStream :
convertAsByteArrayInputStream( extdtaStream );
final InputStreamReader streamReader =
new InputStreamReader( is,
encoding ) ;
engnps.setCharacterStream(i, streamReader);
}
/**
* Sets the specified binary EXTDTA parameter of the embedded statement.
*
* @param stmt the DRDA statement to use
* @param index the one-based index of the parameter
* @param stream the EXTDTA stream to read data from
* @param streamLOB whether or not the stream content is streamed as the
* last value in the DRDA protocol flow
* @throws IOException if reading from the stream fails
* @throws SQLException if setting the stream fails
*/
private static void setAsBinaryStream(DRDAStatement stmt,
int index,
EXTDTAReaderInputStream stream,
boolean streamLOB)
throws IOException, SQLException {
int type = stmt.getParameterMetaData().getParameterType(index);
boolean useSetBinaryStream = (type == Types.BLOB);
PreparedStatement ps = stmt.getPreparedStatement();
if (streamLOB && useSetBinaryStream) {
// Save the streamed parameter so we can drain it if it does not
// get used by embedded when the statement is executed. DERBY-3085
stmt.setStreamedParameter(stream);
if (stream == null) {
ps.setBytes(index, null);
} else if (!stream.isLayerBStream()) {
int length = (int)((StandardEXTDTAReaderInputStream)
stream).getLength();
ps.setBinaryStream(index, stream, length);
} else {
((EnginePreparedStatement)ps).setBinaryStream(index, stream);
}
} else {
if (stream == null) {
ps.setBytes(index, null);
} else {
InputStream bais = convertAsByteArrayInputStream(stream);
ps.setBinaryStream(index, bais, bais.available());
}
}
}
}
| java/drda/com/splicemachine/db/impl/drda/DRDAConnThread.java | /*
Derby - Class com.splicemachine.db.impl.drda.DRDAConnThread
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.splicemachine.db.impl.drda;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.OutputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.DataTruncation;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Properties;
import java.util.TimeZone;
import java.util.Vector;
import com.splicemachine.db.catalog.SystemProcedures;
import com.splicemachine.db.iapi.error.StandardException;
import com.splicemachine.db.iapi.error.ExceptionSeverity;
import com.splicemachine.db.iapi.reference.Attribute;
import com.splicemachine.db.iapi.reference.DRDAConstants;
import com.splicemachine.db.iapi.reference.JDBC30Translation;
import com.splicemachine.db.iapi.reference.Property;
import com.splicemachine.db.iapi.reference.SQLState;
import com.splicemachine.db.iapi.services.info.JVMInfo;
import com.splicemachine.db.iapi.services.monitor.Monitor;
import com.splicemachine.db.iapi.services.sanity.SanityManager;
import com.splicemachine.db.iapi.services.stream.HeaderPrintWriter;
import com.splicemachine.db.iapi.types.SQLRowId;
import com.splicemachine.db.iapi.tools.i18n.LocalizedResource;
import com.splicemachine.db.iapi.jdbc.AuthenticationService;
import com.splicemachine.db.iapi.jdbc.EngineLOB;
import com.splicemachine.db.iapi.jdbc.EngineResultSet;
import com.splicemachine.db.impl.jdbc.EmbedSQLException;
import com.splicemachine.db.impl.jdbc.Util;
import com.splicemachine.db.jdbc.InternalDriver;
import com.splicemachine.db.iapi.jdbc.EnginePreparedStatement;
/**
* This class translates DRDA protocol from an application requester to JDBC
* for Derby and then translates the results from Derby to DRDA
* for return to the application requester.
*/
class DRDAConnThread extends Thread {
private static final String leftBrace = "{";
private static final String rightBrace = "}";
private static final byte NULL_VALUE = (byte)0xff;
private static final String SYNTAX_ERR = "42X01";
// Manager Level 3 constant.
private static final int MGRLVL_3 = 0x03;
// Manager Level 4 constant.
private static final int MGRLVL_4 = 0x04;
// Manager Level 5 constant.
private static final int MGRLVL_5 = 0x05;
// Manager level 6 constant.
private static final int MGRLVL_6 = 0x06;
// Manager Level 7 constant.
private static final int MGRLVL_7 = 0x07;
// Commit or rollback UOWDSP values
private static final int COMMIT = 1;
private static final int ROLLBACK = 2;
private int correlationID;
private InputStream sockis;
private OutputStream sockos;
private DDMReader reader;
private DDMWriter writer;
private DRDAXAProtocol xaProto;
private static int [] ACCRDB_REQUIRED = {CodePoint.RDBACCCL,
CodePoint.CRRTKN,
CodePoint.PRDID,
CodePoint.TYPDEFNAM,
CodePoint.TYPDEFOVR};
private static int MAX_REQUIRED_LEN = 5;
private int currentRequiredLength = 0;
private int [] required = new int[MAX_REQUIRED_LEN];
private NetworkServerControlImpl server; // server who created me
private Session session; // information about the session
private long timeSlice; // time slice for this thread
private Object timeSliceSync = new Object(); // sync object for updating time slice
private boolean logConnections; // log connections to databases
private boolean sendWarningsOnCNTQRY = false; // Send Warnings for SELECT if true
private Object logConnectionsSync = new Object(); // sync object for log connect
private boolean close; // end this thread
private Object closeSync = new Object(); // sync object for parent to close us down
private static HeaderPrintWriter logStream;
private AppRequester appRequester; // pointer to the application requester
// for the session being serviced
private Database database; // pointer to the current database
private int sqlamLevel; // SQLAM Level - determines protocol
// DRDA diagnostic level, DIAGLVL0 by default
private byte diagnosticLevel = (byte)0xF0;
// manager processing
private Vector unknownManagers;
private Vector knownManagers;
private Vector errorManagers;
private Vector errorManagersLevel;
// database accessed failed
private SQLException databaseAccessException;
// these fields are needed to feed back to jcc about a statement/procedure's PKGNAMCSN
/** The value returned by the previous call to
* <code>parsePKGNAMCSN()</code>. */
private Pkgnamcsn prevPkgnamcsn = null;
/** Current RDB Package Name. */
private DRDAString rdbnam = null;
/** Current RDB Collection Identifier. */
private DRDAString rdbcolid = null;
/** Current RDB Package Identifier. */
private DRDAString pkgid = null;
/** Current RDB Package Consistency Token. */
private DRDAString pkgcnstkn = null;
/** Current RDB Package Section Number. */
private int pkgsn;
private final static String TIMEOUT_STATEMENT = "SET STATEMENT_TIMEOUT ";
private int pendingStatementTimeout; // < 0 means no pending timeout to set
// this flag is for an execute statement/procedure which actually returns a result set;
// do not commit the statement, otherwise result set is closed
// for decryption
private static DecryptionManager decryptionManager;
// public key generated by Deffie-Hellman algorithm, to be passed to the encrypter,
// as well as used to initialize the cipher
private byte[] myPublicKey;
// generated target seed to be used to generate the password substitute
// as part of SECMEC_USRSSBPWD security mechanism
private byte[] myTargetSeed;
// Some byte[] constants that are frequently written into messages. It is more efficient to
// use these constants than to convert from a String each time
// (This replaces the qryscraft_ and notQryscraft_ static exception objects.)
private static final byte[] eod00000 = { '0', '0', '0', '0', '0' };
private static final byte[] eod02000 = { '0', '2', '0', '0', '0' };
private static final byte[] nullSQLState = { ' ', ' ', ' ', ' ', ' ' };
private static final byte[] errD4_D6 = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; // 12x0
private static final byte[] warn0_warnA = { ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ' }; // 11x ' '
private final static String AUTHENTICATION_PROVIDER_BUILTIN_CLASS =
"com.splicemachine.db.impl.jdbc.authentication.BasicAuthenticationServiceImpl";
private final static String AUTHENTICATION_PROVIDER_NONE_CLASS =
"com.splicemachine.db.impl.jdbc.authentication.NoneAuthenticationServiceImpl";
// Work around a classloader bug involving interrupt handling during
// class loading. If the first request to load the
// DRDAProtocolExceptionInfo class occurs during shutdown, the
// loading of the class may be aborted when the Network Server calls
// Thread.interrupt() on the DRDAConnThread. By including a static
// reference to the DRDAProtocolExceptionInfo class here, we ensure
// that it is loaded as soon as the DRDAConnThread class is loaded,
// and therefore we know we won't be trying to load the class during
// shutdown. See DERBY-1338 for more background, including pointers
// to the apparent classloader bug in the JVM.
private static final DRDAProtocolExceptionInfo dummy =
new DRDAProtocolExceptionInfo(0,0,0,false);
/**
* Tells if the reset / connect request is a deferred request.
* This information is used to work around a bug (DERBY-3596) in a
* compatible manner, which also avoids any changes in the client driver.
* <p>
* The bug manifests itself when a connection pool data source is used and
* logical connections are obtained from the physical connection associated
* with the data source. Each new logical connection causes a new physical
* connection on the server, including a new transaction. These connections
* and transactions are not closed / cleaned up.
*/
private boolean deferredReset = false;
// constructor
/**
* Create a new Thread for processing session requests
*
* @param session Session requesting processing
* @param server Server starting thread
* @param timeSlice timeSlice for thread
* @param logConnections
**/
DRDAConnThread(Session session, NetworkServerControlImpl server,
long timeSlice,
boolean logConnections) {
super();
// Create a more meaningful name for this thread (but preserve its
// thread id from the default name).
NetworkServerControlImpl.setUniqueThreadName(this, "DRDAConnThread");
this.session = session;
this.server = server;
this.timeSlice = timeSlice;
this.logConnections = logConnections;
this.pendingStatementTimeout = -1;
initialize();
}
/**
* Main routine for thread, loops until the thread is closed
* Gets a session, does work for the session
*/
public void run() {
if (SanityManager.DEBUG)
trace("Starting new connection thread");
Session prevSession;
while(!closed())
{
// get a new session
prevSession = session;
session = server.getNextSession(session);
if (session == null)
close();
if (closed())
break;
if (session != prevSession)
{
initializeForSession();
}
try {
long timeStart = System.currentTimeMillis();
switch (session.state)
{
case Session.INIT:
sessionInitialState();
if (session == null)
break;
// else fallthrough
case Session.ATTEXC:
case Session.SECACC:
case Session.CHKSEC:
long currentTimeSlice;
do {
try {
processCommands();
} catch (DRDASocketTimeoutException ste) {
// Just ignore the exception. This was
// a timeout on the read call in
// DDMReader.fill(), which will happen
// only when timeSlice is set.
}
currentTimeSlice = getTimeSlice();
} while ((currentTimeSlice <= 0) ||
(System.currentTimeMillis() - timeStart < currentTimeSlice));
break;
default:
// this is an error
agentError("Session in invalid state:" + session.state);
}
} catch (Exception e) {
if (e instanceof DRDAProtocolException &&
((DRDAProtocolException)e).isDisconnectException())
{
// client went away - this is O.K. here
closeSession();
}
else
{
handleException(e);
}
} catch (Error error) {
// Do as little as possible, but try to cut loose the client
// to avoid that it hangs in a socket read-call.
// TODO: Could make use of Throwable.addSuppressed here when
// compiled as Java 7 (or newer).
try {
closeSession();
} catch (Throwable t) {
// One last attempt...
try {
session.clientSocket.close();
} catch (IOException ioe) {
// Ignore, we're in deeper trouble already.
}
} finally {
// Rethrow the original error, ignore errors that happened
// when trying to close the socket to the client.
throw error;
}
}
}
if (SanityManager.DEBUG)
trace("Ending connection thread");
server.removeThread(this);
}
/**
* Get input stream
*
* @return input stream
*/
protected InputStream getInputStream()
{
return sockis;
}
/**
* Get output stream
*
* @return output stream
*/
protected OutputStream getOutputStream()
{
return sockos;
}
/**
* get DDMReader
* @return DDMReader for this thread
*/
protected DDMReader getReader()
{
return reader;
}
/**
* get DDMWriter
* @return DDMWriter for this thread
*/
protected DDMWriter getWriter()
{
return writer;
}
/**
* Get correlation id
*
* @return correlation id
*/
protected int getCorrelationID ()
{
return correlationID;
}
/**
* Get session we are working on
*
* @return session
*/
protected Session getSession()
{
return session;
}
/**
* Get Database we are working on
*
* @return database
*/
protected Database getDatabase()
{
return database;
}
/**
* Get server
*
* @return server
*/
protected NetworkServerControlImpl getServer()
{
return server;
}
/**
* Get correlation token
*
* @return crrtkn
*/
protected byte[] getCrrtkn()
{
if (database != null)
return database.crrtkn;
return null;
}
/**
* Get database name
*
* @return database name
*/
protected String getDbName()
{
if (database != null)
return database.getDatabaseName();
return null;
}
/**
* Close DRDA connection thread
*/
protected void close()
{
synchronized (closeSync)
{
close = true;
}
}
/**
* Set logging of connections
*
* @param value value to set for logging connections
*/
protected void setLogConnections(boolean value)
{
synchronized(logConnectionsSync) {
logConnections = value;
}
}
/**
* Set time slice value
*
* @param value new value for time slice
*/
protected void setTimeSlice(long value)
{
synchronized(timeSliceSync) {
timeSlice = value;
}
}
/**
* Indicate a communications failure
*
* @param arg1 - info about the communications failure
* @param arg2 - info about the communications failure
* @param arg3 - info about the communications failure
* @param arg4 - info about the communications failure
*
* @exception DRDAProtocolException disconnect exception always thrown
*/
protected void markCommunicationsFailure(String arg1, String arg2, String arg3,
String arg4) throws DRDAProtocolException
{
markCommunicationsFailure(null,arg1,arg2,arg3, arg4);
}
/**
* Indicate a communications failure. Log to db.log
*
* @param e - Source exception that was thrown
* @param arg1 - info about the communications failure
* @param arg2 - info about the communications failure
* @param arg3 - info about the communications failure
* @param arg4 - info about the communications failure
*
* @exception DRDAProtocolException disconnect exception always thrown
*/
protected void markCommunicationsFailure(Exception e, String arg1, String arg2, String arg3,
String arg4) throws DRDAProtocolException
{
String dbname = null;
if (database != null)
{
dbname = database.getDatabaseName();
}
if (e != null) {
println2Log(dbname,session.drdaID, e.getMessage());
server.consoleExceptionPrintTrace(e);
}
Object[] oa = {arg1,arg2,arg3,arg4};
throw DRDAProtocolException.newDisconnectException(this,oa);
}
/**
* Syntax error
*
* @param errcd Error code
* @param cpArg code point value
* @exception DRDAProtocolException
*/
protected void throwSyntaxrm(int errcd, int cpArg)
throws DRDAProtocolException
{
throw new
DRDAProtocolException(DRDAProtocolException.DRDA_Proto_SYNTAXRM,
this,
cpArg,
errcd);
}
/**
* Agent error - something very bad happened
*
* @param msg Message describing error
*
* @exception DRDAProtocolException newAgentError always thrown
*/
protected void agentError(String msg) throws DRDAProtocolException
{
String dbname = null;
if (database != null)
dbname = database.getDatabaseName();
throw DRDAProtocolException.newAgentError(this, CodePoint.SVRCOD_PRMDMG,
dbname, msg);
}
/**
* Missing code point
*
* @param codePoint code point value
* @exception DRDAProtocolException
*/
protected void missingCodePoint(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_REQ_OBJ_NOT_FOUND, codePoint);
}
/**
* Print a line to the DB2j log
*
* @param dbname database name
* @param drdaID DRDA identifier
* @param msg message
*/
protected static void println2Log(String dbname, String drdaID, String msg)
{
if (logStream == null)
logStream = Monitor.getStream();
if (dbname != null)
{
int endOfName = dbname.indexOf(';');
if (endOfName != -1)
dbname = dbname.substring(0, endOfName);
}
logStream.printlnWithHeader("(DATABASE = " + dbname + "), (DRDAID = " + drdaID + "), " + msg);
}
/**
* Write RDBNAM
*
* @param rdbnam database name
* @exception DRDAProtocolException
*/
protected void writeRDBNAM(String rdbnam)
throws DRDAProtocolException
{
CcsidManager currentManager = writer.getCurrentCcsidManager();
int len = currentManager.getByteLength(rdbnam);
if (len < CodePoint.RDBNAM_LEN)
len = CodePoint.RDBNAM_LEN;
/* Write the string padded */
writer.writeScalarPaddedString(CodePoint.RDBNAM, rdbnam, len);
}
/***************************************************************************
* Private methods
***************************************************************************/
/**
* Initialize class
*/
private void initialize()
{
// set input and output sockets
// this needs to be done before creating reader
sockis = session.sessionInput;
sockos = session.sessionOutput;
reader = new DDMReader(this, session.dssTrace);
writer = new DDMWriter(this, session.dssTrace);
/* At this stage we can initialize the strings as we have
* the CcsidManager for the DDMWriter. */
rdbnam = new DRDAString(writer);
rdbcolid = new DRDAString(writer);
pkgid = new DRDAString(writer);
pkgcnstkn = new DRDAString(writer);
}
/**
* Initialize for a new session
*/
private void initializeForSession()
{
// set input and output sockets
sockis = session.sessionInput;
sockos = session.sessionOutput;
// intialize reader and writer
reader.initialize(this, session.dssTrace);
writer.reset(session.dssTrace);
// initialize local pointers to session info
database = session.database;
appRequester = session.appRequester;
// set sqlamLevel
if (session.state == Session.ATTEXC)
sqlamLevel = appRequester.getManagerLevel(CodePoint.SQLAM);
/* All sessions MUST start as EBCDIC */
reader.setEbcdicCcsid();
writer.setEbcdicCcsid();
}
/**
* In initial state for a session,
* determine whether this is a command
* session or a DRDA protocol session. A command session is for changing
* the configuration of the Net server, e.g., turning tracing on
* If it is a command session, process the command and close the session.
* If it is a DRDA session, exchange server attributes and change session
* state.
*/
private void sessionInitialState()
throws Exception
{
// process NetworkServerControl commands - if it is not either valid protocol let the
// DRDA error handling handle it
if (reader.isCmd())
{
try {
server.processCommands(reader, writer, session);
// reset reader and writer
reader.initialize(this, null);
writer.reset(null);
closeSession();
} catch (Throwable t) {
if (t instanceof InterruptedException)
throw (InterruptedException)t;
else
{
server.consoleExceptionPrintTrace(t);
}
}
}
else
{
// exchange attributes with application requester
exchangeServerAttributes();
}
}
/**
* Cleans up and closes a result set if an exception is thrown
* when collecting QRYDTA in response to OPNQRY or CNTQRY.
*
* @param stmt the DRDA statement to clean up
* @param sqle the exception that was thrown
* @param writerMark start index for the first DSS to clear from
* the output buffer
* @exception DRDAProtocolException if a DRDA protocol error is
* detected
*/
private void cleanUpAndCloseResultSet(DRDAStatement stmt,
SQLException sqle,
int writerMark)
throws DRDAProtocolException
{
if (stmt != null) {
writer.clearDSSesBackToMark(writerMark);
if (!stmt.rsIsClosed()) {
try {
stmt.rsClose();
} catch (SQLException ec) {
if (SanityManager.DEBUG) {
trace("Warning: Error closing result set");
}
}
writeABNUOWRM();
writeSQLCARD(sqle, CodePoint.SVRCOD_ERROR, 0, 0);
}
} else {
writeSQLCARDs(sqle, 0);
}
errorInChain(sqle);
}
/**
* Process DRDA commands we can receive once server attributes have been
* exchanged.
*
* @exception DRDAProtocolException
*/
private void processCommands() throws DRDAProtocolException
{
DRDAStatement stmt = null;
int updateCount = 0;
boolean PRPSQLSTTfailed = false;
boolean checkSecurityCodepoint = session.requiresSecurityCodepoint();
do
{
correlationID = reader.readDssHeader();
int codePoint = reader.readLengthAndCodePoint( false );
int writerMark = writer.markDSSClearPoint();
if (checkSecurityCodepoint)
verifyInOrderACCSEC_SECCHK(codePoint,session.getRequiredSecurityCodepoint());
switch(codePoint)
{
case CodePoint.CNTQRY:
try{
stmt = parseCNTQRY();
if (stmt != null)
{
writeQRYDTA(stmt);
if (stmt.rsIsClosed())
{
writeENDQRYRM(CodePoint.SVRCOD_WARNING);
writeNullSQLCARDobject();
}
// Send any warnings if JCC can handle them
checkWarning(null, null, stmt.getResultSet(), 0, false, sendWarningsOnCNTQRY);
writePBSD();
}
}
catch(SQLException e)
{
// if we got a SQLException we need to clean up and
// close the result set Beetle 4758
cleanUpAndCloseResultSet(stmt, e, writerMark);
}
break;
case CodePoint.EXCSQLIMM:
try {
updateCount = parseEXCSQLIMM();
// RESOLVE: checking updateCount is not sufficient
// since it will be 0 for creates, we need to know when
// any logged changes are made to the database
// Not getting this right for JCC is probably O.K., this
// will probably be a problem for ODBC and XA
// The problem is that JDBC doesn't provide this information
// so we would have to expand the JDBC API or call a
// builtin method to check(expensive)
// For now we will assume that every execute immediate
// does an update (that is the most conservative thing)
if (database.RDBUPDRM_sent == false)
{
writeRDBUPDRM();
}
// we need to set update count in SQLCARD
checkWarning(null, database.getDefaultStatement().getStatement(),
null, updateCount, true, true);
writePBSD();
} catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.EXCSQLSET:
try {
if (parseEXCSQLSET())
// all went well.
writeSQLCARDs(null,0);
}
catch (SQLWarning w)
{
writeSQLCARD(w, CodePoint.SVRCOD_WARNING, 0, 0);
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.PRPSQLSTT:
int sqldaType;
PRPSQLSTTfailed = false;
try {
database.getConnection().clearWarnings();
sqldaType = parsePRPSQLSTT();
database.getCurrentStatement().sqldaType = sqldaType;
if (sqldaType > 0) // do write SQLDARD
writeSQLDARD(database.getCurrentStatement(),
(sqldaType == CodePoint.TYPSQLDA_LIGHT_OUTPUT),
database.getConnection().getWarnings());
else
checkWarning(database.getConnection(), null, null, 0, true, true);
} catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
writeSQLCARDs(e, 0, true);
PRPSQLSTTfailed = true;
errorInChain(e);
}
break;
case CodePoint.OPNQRY:
PreparedStatement ps = null;
try {
if (PRPSQLSTTfailed) {
// read the command objects
// for ps with parameter
// Skip objects/parameters
skipRemainder(true);
// If we failed to prepare, then we fail
// to open, which means OPNQFLRM.
writeOPNQFLRM(null);
break;
}
Pkgnamcsn pkgnamcsn = parseOPNQRY();
if (pkgnamcsn != null)
{
stmt = database.getDRDAStatement(pkgnamcsn);
ps = stmt.getPreparedStatement();
ps.clearWarnings();
if (pendingStatementTimeout >= 0) {
ps.setQueryTimeout(pendingStatementTimeout);
pendingStatementTimeout = -1;
}
stmt.execute();
writeOPNQRYRM(false, stmt);
checkWarning(null, ps, null, 0, false, true);
long sentVersion = stmt.versionCounter;
long currentVersion =
((EnginePreparedStatement)stmt.ps).
getVersionCounter();
if (stmt.sqldaType ==
CodePoint.TYPSQLDA_LIGHT_OUTPUT &&
currentVersion != sentVersion) {
// DERBY-5459. The prepared statement has a
// result set and has changed on the server
// since we last informed the client about its
// shape, so re-send metadata.
//
// NOTE: This is an extension of the standard
// DRDA protocol since we send the SQLDARD
// even if it isn't requested in this case.
// This is OK because there is already code on the
// client to handle an unrequested SQLDARD at
// this point in the protocol.
writeSQLDARD(stmt, true, null);
}
writeQRYDSC(stmt, false);
stmt.rsSuspend();
if (stmt.getQryprctyp() == CodePoint.LMTBLKPRC &&
stmt.getQryrowset() != 0) {
// The DRDA spec allows us to send
// QRYDTA here if there are no LOB
// columns.
DRDAResultSet drdars =
stmt.getCurrentDrdaResultSet();
try {
if (drdars != null &&
!drdars.hasLobColumns()) {
writeQRYDTA(stmt);
}
} catch (SQLException sqle) {
cleanUpAndCloseResultSet(stmt, sqle,
writerMark);
}
}
}
writePBSD();
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
// The fix for DERBY-1196 removed code
// here to close the prepared statement
// if OPNQRY failed.
writeOPNQFLRM(e);
}
break;
case CodePoint.RDBCMM:
try
{
if (SanityManager.DEBUG)
trace("Received commit");
if (!database.getConnection().getAutoCommit())
{
database.getConnection().clearWarnings();
database.commit();
writeENDUOWRM(COMMIT);
checkWarning(database.getConnection(), null, null, 0, true, true);
}
// we only want to write one of these per transaction
// so set to false in preparation for next command
database.RDBUPDRM_sent = false;
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
// Even in case of error, we have to write the ENDUOWRM.
writeENDUOWRM(COMMIT);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.RDBRLLBCK:
try
{
if (SanityManager.DEBUG)
trace("Received rollback");
database.getConnection().clearWarnings();
database.rollback();
writeENDUOWRM(ROLLBACK);
checkWarning(database.getConnection(), null, null, 0, true, true);
// we only want to write one of these per transaction
// so set to false in preparation for next command
database.RDBUPDRM_sent = false;
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
// Even in case of error, we have to write the ENDUOWRM.
writeENDUOWRM(ROLLBACK);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.CLSQRY:
try{
stmt = parseCLSQRY();
stmt.rsClose();
writeSQLCARDs(null, 0);
}
catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.EXCSAT:
parseEXCSAT();
writeEXCSATRD();
break;
case CodePoint.ACCSEC:
int securityCheckCode = parseACCSEC();
writeACCSECRD(securityCheckCode);
/* ACCSECRD is the last reply that is mandatorily in EBCDIC */
if (appRequester.supportsUtf8Ccsid()) {
switchToUtf8();
} else {
/* This thread might serve several requests.
* Revert if not supported by current client. */
switchToEbcdic();
}
checkSecurityCodepoint = true;
break;
case CodePoint.SECCHK:
if(parseDRDAConnection())
// security all checked and connection ok
checkSecurityCodepoint = false;
break;
/* since we don't support sqlj, we won't get bind commands from jcc, we
* might get it from ccc; just skip them.
*/
case CodePoint.BGNBND:
reader.skipBytes();
writeSQLCARDs(null, 0);
break;
case CodePoint.BNDSQLSTT:
reader.skipBytes();
parseSQLSTTDss();
writeSQLCARDs(null, 0);
break;
case CodePoint.SQLSTTVRB:
// optional
reader.skipBytes();
break;
case CodePoint.ENDBND:
reader.skipBytes();
writeSQLCARDs(null, 0);
break;
case CodePoint.DSCSQLSTT:
if (PRPSQLSTTfailed) {
reader.skipBytes();
writeSQLCARDs(null, 0);
break;
}
try {
boolean rtnOutput = parseDSCSQLSTT();
writeSQLDARD(database.getCurrentStatement(), rtnOutput,
null);
} catch (SQLException e)
{
writer.clearDSSesBackToMark(writerMark);
server.consoleExceptionPrint(e);
try {
writeSQLDARD(database.getCurrentStatement(), true, e);
} catch (SQLException e2) { // should not get here since doing nothing with ps
agentError("Why am I getting another SQLException?");
}
errorInChain(e);
}
break;
case CodePoint.EXCSQLSTT:
if (PRPSQLSTTfailed) {
// Skip parameters too if they are chained Beetle 4867
skipRemainder(true);
writeSQLCARDs(null, 0);
break;
}
try {
parseEXCSQLSTT();
DRDAStatement curStmt = database.getCurrentStatement();
if (curStmt != null)
curStmt.rsSuspend();
writePBSD();
} catch (SQLException e)
{
skipRemainder(true);
writer.clearDSSesBackToMark(writerMark);
if (SanityManager.DEBUG)
{
server.consoleExceptionPrint(e);
}
writeSQLCARDs(e, 0);
errorInChain(e);
}
break;
case CodePoint.SYNCCTL:
if (xaProto == null)
xaProto = new DRDAXAProtocol(this);
xaProto.parseSYNCCTL();
try {
writePBSD();
} catch (SQLException se) {
server.consoleExceptionPrint(se);
errorInChain(se);
}
break;
default:
codePointNotSupported(codePoint);
}
if (SanityManager.DEBUG) {
String cpStr = CodePointNameTable.lookup(codePoint);
try {
PiggyBackedSessionData pbsd =
database.getPiggyBackedSessionData(false);
// DERBY-3596
// Don't perform this assert if a deferred reset is
// happening or has recently taken place, because the
// connection state has been changed under the feet of the
// piggy-backing mechanism.
if (!this.deferredReset && pbsd != null) {
// Session data has already been piggy-backed. Refresh
// the data from the connection, to make sure it has
// not changed behind our back.
pbsd.refresh();
SanityManager.ASSERT(!pbsd.isModified(),
"Unexpected PBSD modification: " + pbsd +
" after codePoint " + cpStr);
}
// Not having a pbsd here is ok. No data has been
// piggy-backed and the client has no cached values.
// If needed it will send an explicit request to get
// session data
} catch (SQLException sqle) {
server.consoleExceptionPrint(sqle);
SanityManager.THROWASSERT("Unexpected exception after " +
"codePoint "+cpStr, sqle);
}
}
// Set the correct chaining bits for whatever
// reply DSS(es) we just wrote. If we've reached
// the end of the chain, this method will send
// the DSS(es) across.
finalizeChain();
}
while (reader.isChainedWithSameID() || reader.isChainedWithDiffID());
}
/**
* If there's a severe error in the DDM chain, and if the header indicates
* "terminate chain on error", we stop processing further commands in the chain
* nor do we send any reply for them. In accordance to this, a SQLERRRM message
* indicating the severe error must have been sent! (otherwise application requestor,
* such as JCC, would not terminate the receiving of chain replies.)
*
* Each DRDA command is processed independently. DRDA defines no interdependencies
* across chained commands. A command is processed the same when received within
* a set of chained commands or received separately. The chaining was originally
* defined as a way to save network costs.
*
* @param e the SQLException raised
* @exception DRDAProtocolException
*/
private void errorInChain(SQLException e) throws DRDAProtocolException
{
if (reader.terminateChainOnErr() && (getExceptionSeverity(e) > CodePoint.SVRCOD_ERROR))
{
if (SanityManager.DEBUG) trace("terminating the chain on error...");
skipRemainder(false);
}
}
/**
* Exchange server attributes with application requester
*
* @exception DRDAProtocolException
*/
private void exchangeServerAttributes()
throws DRDAProtocolException
{
int codePoint;
correlationID = reader.readDssHeader();
if (SanityManager.DEBUG) {
if (correlationID == 0)
{
SanityManager.THROWASSERT(
"Unexpected value for correlationId = " + correlationID);
}
}
codePoint = reader.readLengthAndCodePoint( false );
// The first code point in the exchange of attributes must be EXCSAT
if (codePoint != CodePoint.EXCSAT)
{
//Throw PRCCNVRM
throw
new DRDAProtocolException(DRDAProtocolException.DRDA_Proto_PRCCNVRM,
this, codePoint,
CodePoint.PRCCNVCD_EXCSAT_FIRST_AFTER_CONN);
}
parseEXCSAT();
writeEXCSATRD();
finalizeChain();
session.setState(session.ATTEXC);
}
private boolean parseDRDAConnection() throws DRDAProtocolException
{
int codePoint;
boolean sessionOK = true;
int securityCheckCode = parseSECCHK();
if (SanityManager.DEBUG)
trace("*** SECCHKRM securityCheckCode is: "+securityCheckCode);
writeSECCHKRM(securityCheckCode);
//at this point if the security check failed, we're done, the session failed
if (securityCheckCode != 0)
{
return false;
}
correlationID = reader.readDssHeader();
codePoint = reader.readLengthAndCodePoint( false );
verifyRequiredObject(codePoint,CodePoint.ACCRDB);
int svrcod = parseACCRDB();
//If network server gets a null connection form InternalDriver, reply with
//RDBAFLRM and SQLCARD with null SQLException
if(database.getConnection() == null && databaseAccessException == null){
writeRDBfailure(CodePoint.RDBAFLRM);
return false;
}
//if earlier we couldn't access the database
if (databaseAccessException != null)
{
//if the Database was not found we will try DS
int failureType = getRdbAccessErrorCodePoint();
if (failureType == CodePoint.RDBNFNRM
|| failureType == CodePoint.RDBATHRM)
{
writeRDBfailure(failureType);
}
else
{
writeRDBfailure(CodePoint.RDBAFLRM);
}
return false;
}
else if (database.accessCount > 1 ) // already in conversation with database
{
writeRDBfailure(CodePoint.RDBACCRM);
return false;
}
else // everything is fine
writeACCRDBRM(svrcod);
// compare this application requester with previously stored
// application requesters and if we have already seen this one
// use stored application requester
session.appRequester = server.getAppRequester(appRequester);
return sessionOK;
}
/**
* Switch the DDMWriter and DDMReader to UTF8 IF supported
*/
private void switchToUtf8() {
writer.setUtf8Ccsid();
reader.setUtf8Ccsid();
}
/**
* Switch the DDMWriter and DDMReader to EBCDIC
*/
private void switchToEbcdic() {
writer.setEbcdicCcsid();
reader.setEbcdicCcsid();
}
/**
* Write RDB Failure
*
* Instance Variables
* SVRCOD - Severity Code - required
* RDBNAM - Relational Database name - required
* SRVDGN - Server Diagnostics - optional (not sent for now)
*
* @param codePoint codepoint of failure
*/
private void writeRDBfailure(int codePoint) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(codePoint);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_ERROR);
writeRDBNAM(database.getDatabaseName());
writer.endDdmAndDss();
switch(codePoint){
case CodePoint.RDBAFLRM:
//RDBAFLRM requires TYPDEFNAM and TYPDEFOVR
writer.createDssObject();
writer.writeScalarString(CodePoint.TYPDEFNAM,
CodePoint.TYPDEFNAM_QTDSQLASC);
writeTYPDEFOVR();
writer.endDss();
case CodePoint.RDBNFNRM:
case CodePoint.RDBATHRM:
writeSQLCARD(databaseAccessException,CodePoint.SVRCOD_ERROR,0,0);
case CodePoint.RDBACCRM:
//Ignore anything that was chained to the ACCRDB.
skipRemainder(false);
// Finalize chain state for whatever we wrote in
// response to ACCRDB.
finalizeChain();
break;
}
}
/* Check the database access exception and return the appropriate
error codepoint.
RDBNFNRM - Database not found
RDBATHRM - Not Authorized
RDBAFLRM - Access failure
@return RDB Access codepoint
*/
private int getRdbAccessErrorCodePoint()
{
String sqlState = databaseAccessException.getSQLState();
// These tests are ok since DATABASE_NOT_FOUND and
// AUTH_INVALID_USER_NAME are not ambigious error codes (on the first
// five characters) in SQLState. If they were, we would have to
// perform a similar check as done in method isAuthenticationException
if (sqlState.regionMatches(0,SQLState.DATABASE_NOT_FOUND,0,5)) {
// RDB not found codepoint
return CodePoint.RDBNFNRM;
} else {
if (isAuthenticationException(databaseAccessException) ||
sqlState.regionMatches(0,SQLState.AUTH_INVALID_USER_NAME,0,5)) {
// Not Authorized To RDB reply message codepoint
return CodePoint.RDBATHRM;
} else {
// RDB Access Failed Reply Message codepoint
return CodePoint.RDBAFLRM;
}
}
}
/**
* There are multiple reasons for not getting a connection, and
* all these should throw SQLExceptions with SQL state 08004
* according to the SQL standard. Since only one of these SQL
* states indicate that an authentication error has occurred, it
* is not enough to check that the SQL state is 08004 and conclude
* that authentication caused the exception to be thrown.
*
* This method tries to cast the exception to an EmbedSQLException
* and use getMessageId on that object to check for authentication
* error instead of the SQL state we get from
* SQLExceptions#getSQLState. getMessageId returns the entire id
* as defined in SQLState (e.g. 08004.C.1), while getSQLState only
* return the 5 first characters (i.e. 08004 instead of 08004.C.1)
*
* If the cast to EmbedSQLException is not successful, the
* assumption that SQL State 08004 is caused by an authentication
* failure is followed even though this is not correct. This was
* the pre DERBY-3060 way of solving the issue.
*
* @param sqlException The exception that is checked to see if
* this is really caused by an authentication failure
* @return true if sqlException is (or has to be assumed to be)
* caused by an authentication failure, false otherwise.
* @see SQLState
*/
private boolean isAuthenticationException (SQLException sqlException) {
boolean authFail = false;
// get exception which carries Derby messageID and args
SQLException se = Util.getExceptionFactory().
getArgumentFerry(sqlException);
if (se instanceof EmbedSQLException) {
// DERBY-3060: if this is an EmbedSQLException, we can
// check the messageId to find out what caused the
// exception.
String msgId = ((EmbedSQLException)se).getMessageId();
// Of the 08004.C.x messages, only
// SQLState.NET_CONNECT_AUTH_FAILED is an authentication
// exception
if (msgId.equals(SQLState.NET_CONNECT_AUTH_FAILED)) {
authFail = true;
}
} else {
String sqlState = se.getSQLState();
if (sqlState.regionMatches(0,SQLState.LOGIN_FAILED,0,5)) {
// Unchanged by DERBY-3060: This is not an
// EmbedSQLException, so we cannot check the
// messageId. As before DERBY-3060, we assume that all
// 08004 error codes are due to an authentication
// failure, even though this ambigious
authFail = true;
}
}
return authFail;
}
/**
* Verify userId and password
*
* Username and password is verified by making a connection to the
* database
*
* @return security check code, 0 is O.K.
* @exception DRDAProtocolException
*/
private int verifyUserIdPassword() throws DRDAProtocolException
{
databaseAccessException = null;
int retSecChkCode = 0;
String realName = database.getDatabaseName(); //first strip off properties
int endOfName = realName.indexOf(';');
if (endOfName != -1)
realName = realName.substring(0, endOfName);
retSecChkCode = getConnFromDatabaseName();
return retSecChkCode;
}
/**
* Get connection from a database name
*
* Username and password is verified by making a connection to the
* database
*
* @return security check code, 0 is O.K.
* @exception DRDAProtocolException
*/
private int getConnFromDatabaseName() throws DRDAProtocolException
{
Properties p = new Properties();
databaseAccessException = null;
//if we haven't got the correlation token yet, use session number for drdaID
if (session.drdaID == null)
session.drdaID = leftBrace + session.connNum + rightBrace;
p.put(Attribute.DRDAID_ATTR, session.drdaID);
// We pass extra property information for the authentication provider
// to successfully re-compute the substitute (hashed) password and
// compare it with what we've got from the requester (source).
//
// If a password attribute appears as part of the connection URL
// attributes, we then don't use the substitute hashed password
// to authenticate with the engine _as_ the one (if any) as part
// of the connection URL attributes, will be used to authenticate
// against Derby's BUILT-IN authentication provider - As a reminder,
// Derby allows password to be mentioned as part of the connection
// URL attributes, as this extra capability could be useful to pass
// passwords to external authentication providers for Derby; hence
// a password defined as part of the connection URL attributes cannot
// be substituted (single-hashed) as it is not recoverable.
if ((database.securityMechanism == CodePoint.SECMEC_USRSSBPWD) &&
(database.getDatabaseName().indexOf(Attribute.PASSWORD_ATTR) == -1))
{
p.put(Attribute.DRDA_SECMEC,
String.valueOf(database.securityMechanism));
p.put(Attribute.DRDA_SECTKN_IN,
DecryptionManager.toHexString(database.secTokenIn, 0,
database.secTokenIn.length));
p.put(Attribute.DRDA_SECTKN_OUT,
DecryptionManager.toHexString(database.secTokenOut, 0,
database.secTokenOut.length));
}
try {
database.makeConnection(p);
} catch (SQLException se) {
String sqlState = se.getSQLState();
databaseAccessException = se;
for (; se != null; se = se.getNextException())
{
if (SanityManager.DEBUG)
trace(se.getMessage());
println2Log(database.getDatabaseName(), session.drdaID, se.getMessage());
}
if (isAuthenticationException(databaseAccessException)) {
// need to set the security check code based on the
// reason the connection was denied, Derby doesn't say
// whether the userid or password caused the problem,
// so we will just return userid invalid
return CodePoint.SECCHKCD_USERIDINVALID;
} else {
return 0;
}
}
catch (Exception e)
{
// If Derby has shut down for some reason,
// we will send an agent error and then try to
// get the driver loaded again. We have to get
// rid of the client first in case they are holding
// the DriverManager lock.
println2Log(database.getDatabaseName(), session.drdaID,
"Driver not loaded"
+ e.getMessage());
try {
agentError("Driver not loaded");
}
catch (DRDAProtocolException dpe)
{
// Retry starting the server before rethrowing
// the protocol exception. Then hopfully all
// will be well when they try again.
try {
server.startNetworkServer();
} catch (Exception re) {
println2Log(database.getDatabaseName(), session.drdaID, "Failed attempt to reload driver " +re.getMessage() );
}
throw dpe;
}
}
// Everything worked so log connection to the database.
if (getLogConnections())
println2Log(database.getDatabaseName(), session.drdaID,
"Apache Derby Network Server connected to database " +
database.getDatabaseName());
return 0;
}
/**
* Parses EXCSAT (Exchange Server Attributes)
* Instance variables
* EXTNAM(External Name) - optional
* MGRLVLLS(Manager Levels) - optional
* SPVNAM(Supervisor Name) - optional
* SRVCLSNM(Server Class Name) - optional
* SRVNAM(Server Name) - optional, ignorable
* SRVRLSLV(Server Product Release Level) - optional, ignorable
*
* @exception DRDAProtocolException
*/
private void parseEXCSAT() throws DRDAProtocolException
{
int codePoint;
String strVal;
// There are three kinds of EXCSAT's we might get.
// 1) Initial Exchange attributes.
// For this we need to initialize the apprequester.
// Session state is set to ATTEXC and then the AR must
// follow up with ACCSEC and SECCHK to get the connection.
// 2) Send of EXCSAT as ping or mangager level adjustment.
// (see parseEXCSAT2())
// For this we just ignore the EXCSAT objects that
// are already set.
// 3) Send of EXCSAT for connection reset. (see parseEXCSAT2())
// This is treated just like ping and will be followed up
// by an ACCSEC request if in fact it is a connection reset.
// If we have already exchanged attributes once just
// process any new manager levels and return (case 2 and 3 above)
this.deferredReset = false; // Always reset, only set to true below.
if (appRequester != null)
{
// DERBY-3596
// Don't mess with XA requests, as the logic for these are handled
// by the server side (embedded) objects. Note that XA requests
// results in a different database object implementation, and it
// does not have the bug we are working around.
if (!appRequester.isXARequester()) {
this.deferredReset = true; // Non-XA deferred reset detected.
}
parseEXCSAT2();
return;
}
// set up a new Application Requester to store information about the
// application requester for this session
appRequester = new AppRequester();
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.EXTNAM:
appRequester.extnam = reader.readString();
if (SanityManager.DEBUG)
trace("extName = " + appRequester.extnam);
if (appRequester.extnam.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.EXTNAM);
break;
// optional
case CodePoint.MGRLVLLS:
parseMGRLVLLS(1);
break;
// optional
case CodePoint.SPVNAM:
appRequester.spvnam = reader.readString();
// This is specified as a null parameter so length should
// be zero
if (appRequester.spvnam != null)
badObjectLength(CodePoint.SPVNAM);
break;
// optional
case CodePoint.SRVNAM:
appRequester.srvnam = reader.readString();
if (SanityManager.DEBUG)
trace("serverName = " + appRequester.srvnam);
if (appRequester.srvnam.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.SRVNAM);
break;
// optional
case CodePoint.SRVRLSLV:
appRequester.srvrlslv = reader.readString();
if (SanityManager.DEBUG)
trace("serverlslv = " + appRequester.srvrlslv);
if (appRequester.srvrlslv.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.SRVRLSLV);
break;
// optional
case CodePoint.SRVCLSNM:
appRequester.srvclsnm = reader.readString();
if (SanityManager.DEBUG)
trace("serverClassName = " + appRequester.srvclsnm);
if (appRequester.srvclsnm.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.SRVCLSNM);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
}
/**
* Parses EXCSAT2 (Exchange Server Attributes)
* Instance variables
* EXTNAM(External Name) - optional
* MGRLVLLS(Manager Levels) - optional
* SPVNAM(Supervisor Name) - optional
* SRVCLSNM(Server Class Name) - optional
* SRVNAM(Server Name) - optional, ignorable
* SRVRLSLV(Server Product Release Level) - optional, ignorable
*
* @exception DRDAProtocolException
*
* This parses a second occurrence of an EXCSAT command
* The target must ignore the values for extnam, srvclsnm, srvnam and srvrlslv.
* I am also going to ignore spvnam since it should be null anyway.
* Only new managers can be added.
*/
private void parseEXCSAT2() throws DRDAProtocolException
{
int codePoint;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.EXTNAM:
case CodePoint.SRVNAM:
case CodePoint.SRVRLSLV:
case CodePoint.SRVCLSNM:
case CodePoint.SPVNAM:
reader.skipBytes();
break;
// optional
case CodePoint.MGRLVLLS:
parseMGRLVLLS(2);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
}
/**
* Parse manager levels
* Instance variables
* MGRLVL - repeatable, required
* CODEPOINT
* CCSIDMGR - CCSID Manager
* CMNAPPC - LU 6.2 Conversational Communications Manager
* CMNSYNCPT - SNA LU 6.2 SyncPoint Conversational Communications Manager
* CMNTCPIP - TCP/IP Communication Manager
* DICTIONARY - Dictionary
* RDB - Relational Database
* RSYNCMGR - Resynchronization Manager
* SECMGR - Security Manager
* SQLAM - SQL Application Manager
* SUPERVISOR - Supervisor
* SYNCPTMGR - Sync Point Manager
* VALUE
*
* On the second appearance of this codepoint, it can only add managers
*
* @param time 1 for first time this is seen, 2 for subsequent ones
* @exception DRDAProtocolException
*
*/
private void parseMGRLVLLS(int time) throws DRDAProtocolException
{
int manager, managerLevel;
int currentLevel;
// set up vectors to keep track of manager information
unknownManagers = new Vector();
knownManagers = new Vector();
errorManagers = new Vector();
errorManagersLevel = new Vector();
if (SanityManager.DEBUG)
trace("Manager Levels");
while (reader.moreDdmData())
{
manager = reader.readNetworkShort();
managerLevel = reader.readNetworkShort();
if (CodePoint.isKnownManager(manager))
{
knownManagers.add(new Integer(manager));
//if the manager level hasn't been set, set it
currentLevel = appRequester.getManagerLevel(manager);
if (currentLevel == appRequester.MGR_LEVEL_UNKNOWN)
appRequester.setManagerLevel(manager, managerLevel);
else
{
//if the level is still the same we'll ignore it
if (currentLevel != managerLevel)
{
//keep a list of conflicting managers
errorManagers.add(new Integer(manager));
errorManagersLevel.add(new Integer (managerLevel));
}
}
}
else
unknownManagers.add(new Integer(manager));
if (SanityManager.DEBUG)
trace("Manager = " + java.lang.Integer.toHexString(manager) +
" ManagerLevel " + managerLevel);
}
sqlamLevel = appRequester.getManagerLevel(CodePoint.SQLAM);
// did we have any errors
if (errorManagers.size() > 0)
{
Object [] oa = new Object[errorManagers.size()*2];
int j = 0;
for (int i = 0; i < errorManagers.size(); i++)
{
oa[j++] = errorManagers.get(i);
oa[j++] = errorManagersLevel.get(i);
}
throw new DRDAProtocolException(DRDAProtocolException.DRDA_Proto_MGRLVLRM,
this, 0,
0, oa);
}
}
/**
* Write reply to EXCSAT command
* Instance Variables
* EXTNAM - External Name (optional)
* MGRLVLLS - Manager Level List (optional)
* SRVCLSNM - Server Class Name (optional) - used by JCC
* SRVNAM - Server Name (optional)
* SRVRLSLV - Server Product Release Level (optional)
*
* @exception DRDAProtocolException
*/
private void writeEXCSATRD() throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.EXCSATRD);
writer.writeScalarString(CodePoint.EXTNAM, server.att_extnam);
//only reply with manager levels if we got sent some
if (knownManagers != null && knownManagers.size() > 0)
writeMGRLEVELS();
writer.writeScalarString(CodePoint.SRVCLSNM, server.att_srvclsnm);
writer.writeScalarString(CodePoint.SRVNAM, server.ATT_SRVNAM);
writer.writeScalarString(CodePoint.SRVRLSLV, server.att_srvrlslv);
writer.endDdmAndDss();
}
/**
* Write manager levels
* The target server must not provide information for any target
* managers unless the source explicitly requests it.
* For each manager class, if the target server's support level
* is greater than or equal to the source server's level, then the source
* server's level is returned for that class if the target server can operate
* at the source's level; otherwise a level 0 is returned. If the target
* server's support level is less than the source server's level, the
* target server's level is returned for that class. If the target server
* does not recognize the code point of a manager class or does not support
* that class, it returns a level of 0. The target server then waits
* for the next command or for the source server to terminate communications.
* When the source server receives EXCSATRD, it must compare each of the entries
* in the mgrlvlls parameter it received to the corresponding entries in the mgrlvlls
* parameter it sent. If any level mismatches, the source server must decide
* whether it can use or adjust to the lower level of target support for that manager
* class. There are no architectural criteria for making this decision.
* The source server can terminate communications or continue at the target
* servers level of support. It can also attempt to use whatever
* commands its user requests while receiving error reply messages for real
* functional mismatches.
* The manager levels the source server specifies or the target server
* returns must be compatible with the manager-level dependencies of the specified
* manangers. Incompatible manager levels cannot be specified.
* Instance variables
* MGRLVL - repeatable, required
* CODEPOINT
* CCSIDMGR - CCSID Manager
* CMNAPPC - LU 6.2 Conversational Communications Manager
* CMNSYNCPT - SNA LU 6.2 SyncPoint Conversational Communications Manager
* CMNTCPIP - TCP/IP Communication Manager
* DICTIONARY - Dictionary
* RDB - Relational Database
* RSYNCMGR - Resynchronization Manager
* SECMGR - Security Manager
* SQLAM - SQL Application Manager
* SUPERVISOR - Supervisor
* SYNCPTMGR - Sync Point Manager
* XAMGR - XA manager
* VALUE
*/
private void writeMGRLEVELS() throws DRDAProtocolException
{
int manager;
int appLevel;
int serverLevel;
writer.startDdm(CodePoint.MGRLVLLS);
for (int i = 0; i < knownManagers.size(); i++)
{
manager = ((Integer)knownManagers.get(i)).intValue();
appLevel = appRequester.getManagerLevel(manager);
serverLevel = server.getManagerLevel(manager);
if (serverLevel >= appLevel)
{
//Note appLevel has already been set to 0 if we can't support
//the original app Level
writer.writeCodePoint4Bytes(manager, appLevel);
}
else
{
writer.writeCodePoint4Bytes(manager, serverLevel);
// reset application manager level to server level
appRequester.setManagerLevel(manager, serverLevel);
}
}
// write 0 for all unknown managers
for (int i = 0; i < unknownManagers.size(); i++)
{
manager = ((Integer)unknownManagers.get(i)).intValue();
writer.writeCodePoint4Bytes(manager, 0);
}
writer.endDdm();
}
/**
* Parse Access Security
*
* If the target server supports the SECMEC requested by the application requester
* then a single value is returned and it is identical to the SECMEC value
* in the ACCSEC command. If the target server does not support the SECMEC
* requested, then one or more values are returned and the application requester
* must choose one of these values for the security mechanism.
* We currently support
* - user id and password (default for JCC)
* - encrypted user id and password
* - strong password substitute (USRSSBPWD w/
* Derby network client only)
*
* Instance variables
* SECMGRNM - security manager name - optional
* SECMEC - security mechanism - required
* RDBNAM - relational database name - optional
* SECTKN - security token - optional, (required if sec mech. needs it)
*
* @return security check code - 0 if everything O.K.
*/
private int parseACCSEC() throws DRDAProtocolException
{
int securityCheckCode = 0;
int securityMechanism = 0;
byte [] secTokenIn = null;
reader.markCollection();
int codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch(codePoint)
{
//optional
case CodePoint.SECMGRNM:
// this is defined to be 0 length
if (reader.getDdmLength() != 0)
badObjectLength(CodePoint.SECMGRNM);
break;
//required
case CodePoint.SECMEC:
checkLength(CodePoint.SECMEC, 2);
securityMechanism = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("parseACCSEC - Security mechanism = " + securityMechanism);
// if Property.DRDA_PROP_SECURITYMECHANISM has been set, then
// network server only accepts connections which use that
// security mechanism. No other types of connections
// are accepted.
// Make check to see if this property has been set.
// if set, and if the client requested security mechanism
// is not the same, then return a security check code
// that the server does not support/allow this security
// mechanism
if ( (server.getSecurityMechanism() !=
NetworkServerControlImpl.INVALID_OR_NOTSET_SECURITYMECHANISM)
&& securityMechanism != server.getSecurityMechanism())
{
securityCheckCode = CodePoint.SECCHKCD_NOTSUPPORTED;
if (SanityManager.DEBUG) {
trace("parseACCSEC - SECCHKCD_NOTSUPPORTED [1] - " +
securityMechanism + " <> " +
server.getSecurityMechanism() + "\n");
}
}
else
{
// for plain text userid,password USRIDPWD, and USRIDONL
// no need of decryptionManager
if (securityMechanism != CodePoint.SECMEC_USRIDPWD &&
securityMechanism != CodePoint.SECMEC_USRIDONL)
{
// These are the only other mechanisms we understand
if (((securityMechanism != CodePoint.SECMEC_EUSRIDPWD) ||
(securityMechanism == CodePoint.SECMEC_EUSRIDPWD &&
!server.supportsEUSRIDPWD())
) &&
(securityMechanism !=
CodePoint.SECMEC_USRSSBPWD))
//securityCheckCode = CodePoint.SECCHKCD_NOTSUPPORTED;
{
securityCheckCode = CodePoint.SECCHKCD_NOTSUPPORTED;
if (SanityManager.DEBUG) {
trace("parseACCSEC - SECCHKCD_NOTSUPPORTED [2]\n");
}
}
else
{
// We delay the initialization and required
// processing for SECMEC_USRSSBPWD as we need
// to ensure the database is booted so that
// we can verify that the current auth scheme
// is set to BUILT-IN or NONE. For this we need
// to have the RDBNAM codepoint available.
//
// See validateSecMecUSRSSBPWD() call below
if (securityMechanism ==
CodePoint.SECMEC_USRSSBPWD)
break;
// SECMEC_EUSRIDPWD initialization
try {
if (decryptionManager == null)
decryptionManager = new DecryptionManager();
myPublicKey = decryptionManager.obtainPublicKey();
} catch (SQLException e) {
println2Log(null, session.drdaID, e.getMessage());
// Local security service non-retryable error.
securityCheckCode = CodePoint.SECCHKCD_0A;
}
}
}
}
break;
//optional (currently required for Derby - needed for
// DERBY-528 as well)
case CodePoint.RDBNAM:
String dbname = parseRDBNAM();
Database d = session.getDatabase(dbname);
if (d == null)
initializeDatabase(dbname);
else
{
// reset database for connection re-use
// DERBY-3596
// If we are reusing resources for a new physical
// connection, reset the database object. If the client
// is in the process of creating a new logical
// connection only, don't reset the database object.
if (!deferredReset) {
d.reset();
}
database = d;
}
break;
//optional - depending on security Mechanism
case CodePoint.SECTKN:
secTokenIn = reader.readBytes();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for required CodePoint's
if (securityMechanism == 0)
missingCodePoint(CodePoint.SECMEC);
if (database == null)
initializeDatabase(null);
database.securityMechanism = securityMechanism;
database.secTokenIn = secTokenIn;
// If security mechanism is SECMEC_USRSSBPWD, then ensure it can be
// used for the database or system based on the client's connection
// URL and its identity.
if (securityCheckCode == 0 &&
(database.securityMechanism == CodePoint.SECMEC_USRSSBPWD))
{
if (SanityManager.DEBUG)
SanityManager.ASSERT((securityCheckCode == 0),
"SECMEC_USRSSBPWD: securityCheckCode should not " +
"already be set, found it initialized with " +
"a value of '" + securityCheckCode + "'.");
securityCheckCode = validateSecMecUSRSSBPWD();
}
// need security token
if (securityCheckCode == 0 &&
(database.securityMechanism == CodePoint.SECMEC_EUSRIDPWD ||
database.securityMechanism == CodePoint.SECMEC_USRSSBPWD) &&
database.secTokenIn == null)
securityCheckCode = CodePoint.SECCHKCD_SECTKNMISSING_OR_INVALID;
// shouldn't have security token
if (securityCheckCode == 0 &&
(database.securityMechanism == CodePoint.SECMEC_USRIDPWD ||
database.securityMechanism == CodePoint.SECMEC_USRIDONL) &&
database.secTokenIn != null)
securityCheckCode = CodePoint.SECCHKCD_SECTKNMISSING_OR_INVALID;
if (SanityManager.DEBUG)
trace("** ACCSECRD securityCheckCode is: " + securityCheckCode);
// If the security check was successful set the session state to
// security accesseed. Otherwise go back to attributes exchanged so we
// require another ACCSEC
if (securityCheckCode == 0)
session.setState(session.SECACC);
else
session.setState(session.ATTEXC);
return securityCheckCode;
}
/**
* Parse OPNQRY
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* QRYBLKSZ - Query Block Size - required
* QRYBLKCTL - Query Block Protocol Control - optional
* MAXBLKEXT - Maximum Number of Extra Blocks - optional - default value 0
* OUTOVROPT - Output Override Option
* QRYROWSET - Query Rowset Size - optional - level 7
* MONITOR - Monitor events - optional.
*
* @return RDB Package Name, Consistency Token, and Section Number
* @exception DRDAProtocolException
*/
private Pkgnamcsn parseOPNQRY() throws DRDAProtocolException, SQLException
{
Pkgnamcsn pkgnamcsn = null;
boolean gotQryblksz = false;
int blksize = 0;
int qryblkctl = CodePoint.QRYBLKCTL_DEFAULT;
int maxblkext = CodePoint.MAXBLKEXT_DEFAULT;
int qryrowset = CodePoint.QRYROWSET_DEFAULT;
int qryclsimp = DRDAResultSet.QRYCLSIMP_DEFAULT;
int outovropt = CodePoint.OUTOVRFRS;
reader.markCollection();
int codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch(codePoint)
{
//optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.OPNQRY);
break;
//required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
//required
case CodePoint.QRYBLKSZ:
blksize = parseQRYBLKSZ();
gotQryblksz = true;
break;
//optional
case CodePoint.QRYBLKCTL:
qryblkctl = reader.readNetworkShort();
//The only type of query block control we can specify here
//is forced fixed row
if (qryblkctl != CodePoint.FRCFIXROW)
invalidCodePoint(qryblkctl);
if (SanityManager.DEBUG)
trace("!!qryblkctl = "+Integer.toHexString(qryblkctl));
gotQryblksz = true;
break;
//optional
case CodePoint.MAXBLKEXT:
maxblkext = reader.readSignedNetworkShort();
if (SanityManager.DEBUG)
trace("maxblkext = "+maxblkext);
break;
// optional
case CodePoint.OUTOVROPT:
outovropt = parseOUTOVROPT();
break;
//optional
case CodePoint.QRYROWSET:
//Note minimum for OPNQRY is 0
qryrowset = parseQRYROWSET(0);
break;
case CodePoint.QRYCLSIMP:
// Implicitly close non-scrollable cursor
qryclsimp = parseQRYCLSIMP();
break;
case CodePoint.QRYCLSRLS:
// Ignore release of read locks. Nothing we can do here
parseQRYCLSRLS();
break;
// optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for required variables
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
if (!gotQryblksz)
missingCodePoint(CodePoint.QRYBLKSZ);
// get the statement we are opening
DRDAStatement stmt = database.getDRDAStatement(pkgnamcsn);
if (stmt == null)
{
//XXX should really throw a SQL Exception here
invalidValue(CodePoint.PKGNAMCSN);
}
// check that this statement is not already open
// commenting this check out for now
// it turns out that JCC doesn't send a close if executeQuery is
// done again without closing the previous result set
// this check can't be done since the second executeQuery should work
//if (stmt.state != DRDAStatement.NOT_OPENED)
//{
// writeQRYPOPRM();
// pkgnamcsn = null;
//}
//else
//{
stmt.setOPNQRYOptions(blksize,qryblkctl,maxblkext,outovropt,
qryrowset, qryclsimp);
//}
// read the command objects
// for ps with parameter
if (reader.isChainedWithSameID())
{
if (SanityManager.DEBUG)
trace("&&&&&& parsing SQLDTA");
parseOPNQRYobjects(stmt);
}
return pkgnamcsn;
}
/**
* Parse OPNQRY objects
* Objects
* TYPDEFNAM - Data type definition name - optional
* TYPDEFOVR - Type defintion overrides - optional
* SQLDTA- SQL Program Variable Data - optional
*
* If TYPDEFNAM and TYPDEFOVR are supplied, they apply to the objects
* sent with the statement. Once the statement is over, the default values
* sent in the ACCRDB are once again in effect. If no values are supplied,
* the values sent in the ACCRDB are used.
* Objects may follow in one DSS or in several DSS chained together.
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void parseOPNQRYobjects(DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
int codePoint;
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, stmt, parseTYPDEFNAM());
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(stmt);
break;
// optional
case CodePoint.SQLDTA:
parseSQLDTA(stmt);
break;
// optional
case CodePoint.EXTDTA:
readAndSetAllExtParams(stmt, false);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
}
/**
* Parse OUTOVROPT - this indicates whether output description can be
* overridden on just the first CNTQRY or on any CNTQRY
*
* @return output override option
* @exception DRDAProtocolException
*/
private int parseOUTOVROPT() throws DRDAProtocolException
{
checkLength(CodePoint.OUTOVROPT, 1);
int outovropt = reader.readUnsignedByte();
if (SanityManager.DEBUG)
trace("output override option: "+outovropt);
if (outovropt != CodePoint.OUTOVRFRS && outovropt != CodePoint.OUTOVRANY)
invalidValue(CodePoint.OUTOVROPT);
return outovropt;
}
/**
* Parse QRYBLSZ - this gives the maximum size of the query blocks that
* can be returned to the requester
*
* @return query block size
* @exception DRDAProtocolException
*/
private int parseQRYBLKSZ() throws DRDAProtocolException
{
checkLength(CodePoint.QRYBLKSZ, 4);
int blksize = reader.readNetworkInt();
if (SanityManager.DEBUG)
trace("qryblksz = "+blksize);
if (blksize < CodePoint.QRYBLKSZ_MIN || blksize > CodePoint.QRYBLKSZ_MAX)
invalidValue(CodePoint.QRYBLKSZ);
return blksize;
}
/**
* Parse QRYROWSET - this is the number of rows to return
*
* @param minVal - minimum value
* @return query row set size
* @exception DRDAProtocolException
*/
private int parseQRYROWSET(int minVal) throws DRDAProtocolException
{
checkLength(CodePoint.QRYROWSET, 4);
int qryrowset = reader.readNetworkInt();
if (SanityManager.DEBUG)
trace("qryrowset = " + qryrowset);
if (qryrowset < minVal || qryrowset > CodePoint.QRYROWSET_MAX)
invalidValue(CodePoint.QRYROWSET);
return qryrowset;
}
/** Parse a QRYCLSIMP - Implicitly close non-scrollable cursor
* after end of data.
* @return true to close on end of data
*/
private int parseQRYCLSIMP() throws DRDAProtocolException
{
checkLength(CodePoint.QRYCLSIMP, 1);
int qryclsimp = reader.readUnsignedByte();
if (SanityManager.DEBUG)
trace ("qryclsimp = " + qryclsimp);
if (qryclsimp != CodePoint.QRYCLSIMP_SERVER_CHOICE &&
qryclsimp != CodePoint.QRYCLSIMP_YES &&
qryclsimp != CodePoint.QRYCLSIMP_NO )
invalidValue(CodePoint.QRYCLSIMP);
return qryclsimp;
}
private int parseQRYCLSRLS() throws DRDAProtocolException
{
reader.skipBytes();
return 0;
}
/**
* Write a QRYPOPRM - Query Previously opened
* Instance Variables
* SVRCOD - Severity Code - required - 8 ERROR
* RDBNAM - Relational Database Name - required
* PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number - required
*
* @exception DRDAProtocolException
*/
private void writeQRYPOPRM() throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.QRYPOPRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_ERROR);
writeRDBNAM(database.getDatabaseName());
writePKGNAMCSN();
writer.endDdmAndDss();
}
/**
* Write a QRYNOPRM - Query Not Opened
* Instance Variables
* SVRCOD - Severity Code - required - 4 Warning 8 ERROR
* RDBNAM - Relational Database Name - required
* PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number - required
*
* @param svrCod Severity Code
* @exception DRDAProtocolException
*/
private void writeQRYNOPRM(int svrCod) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.QRYNOPRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, svrCod);
writeRDBNAM(database.getDatabaseName());
writePKGNAMCSN();
writer.endDdmAndDss();
}
/**
* Write a OPNQFLRM - Open Query Failure
* Instance Variables
* SVRCOD - Severity Code - required - 8 ERROR
* RDBNAM - Relational Database Name - required
*
* @param e Exception describing failure
*
* @exception DRDAProtocolException
*/
private void writeOPNQFLRM(SQLException e) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.OPNQFLRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_ERROR);
writeRDBNAM(database.getDatabaseName());
writer.endDdm();
writer.startDdm(CodePoint.SQLCARD);
writeSQLCAGRP(e, 0, 0);
writer.endDdmAndDss();
}
/**
* Write PKGNAMCSN
* Instance Variables
* NAMESYMDR - database name - not validated
* RDBCOLID - RDB Collection Identifier
* PKGID - RDB Package Identifier
* PKGCNSTKN - RDB Package Consistency Token
* PKGSN - RDB Package Section Number
*
* There are two possible formats, fixed and extended which includes length
* information for the strings
*
* @throws DRDAProtocolException
*/
private void writePKGNAMCSN(byte[] pkgcnstkn) throws DRDAProtocolException
{
writer.startDdm(CodePoint.PKGNAMCSN);
if (rdbnam.length() <= CodePoint.RDBNAM_LEN &&
rdbcolid.length() <= CodePoint.RDBCOLID_LEN &&
pkgid.length() <= CodePoint.PKGID_LEN)
{ // if none of RDBNAM, RDBCOLID and PKGID have a length of
// more than 18, use fixed format
writer.writeScalarPaddedString(rdbnam, CodePoint.RDBNAM_LEN);
writer.writeScalarPaddedString(rdbcolid, CodePoint.RDBCOLID_LEN);
writer.writeScalarPaddedString(pkgid, CodePoint.PKGID_LEN);
writer.writeScalarPaddedBytes(pkgcnstkn,
CodePoint.PKGCNSTKN_LEN, (byte) 0);
writer.writeShort(pkgsn);
}
else // extended format
{
int len = Math.max(CodePoint.RDBNAM_LEN, rdbnam.length());
writer.writeShort(len);
writer.writeScalarPaddedString(rdbnam, len);
len = Math.max(CodePoint.RDBCOLID_LEN, rdbcolid.length());
writer.writeShort(len);
writer.writeScalarPaddedString(rdbcolid, len);
len = Math.max(CodePoint.PKGID_LEN, pkgid.length());
writer.writeShort(len);
writer.writeScalarPaddedString(pkgid, len);
writer.writeScalarPaddedBytes(pkgcnstkn,
CodePoint.PKGCNSTKN_LEN, (byte) 0);
writer.writeShort(pkgsn);
}
writer.endDdm();
}
private void writePKGNAMCSN() throws DRDAProtocolException
{
writePKGNAMCSN(pkgcnstkn.getBytes());
}
/**
* Parse CNTQRY - Continue Query
* Instance Variables
* RDBNAM - Relational Database Name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number - required
* QRYBLKSZ - Query Block Size - required
* QRYRELSCR - Query Relative Scrolling Action - optional
* QRYSCRORN - Query Scroll Orientation - optional - level 7
* QRYROWNBR - Query Row Number - optional
* QRYROWSNS - Query Row Sensitivity - optional - level 7
* QRYBLKRST - Query Block Reset - optional - level 7
* QRYRTNDTA - Query Returns Data - optional - level 7
* QRYROWSET - Query Rowset Size - optional - level 7
* QRYRFRTBL - Query Refresh Answer Set Table - optional
* NBRROW - Number of Fetch or Insert Rows - optional
* MAXBLKEXT - Maximum number of extra blocks - optional
* RTNEXTDTA - Return of EXTDTA Option - optional
* MONITOR - Monitor events - optional.
*
* @return DRDAStatement we are continuing
* @throws DRDAProtocolException
* @throws SQLException
*/
private DRDAStatement parseCNTQRY() throws DRDAProtocolException, SQLException
{
byte val;
Pkgnamcsn pkgnamcsn = null;
boolean gotQryblksz = false;
boolean qryrelscr = true;
long qryrownbr = 1;
boolean qryrfrtbl = false;
int nbrrow = 1;
int blksize = 0;
int maxblkext = -1;
long qryinsid;
boolean gotQryinsid = false;
int qryscrorn = CodePoint.QRYSCRREL;
boolean qryrowsns = false;
boolean gotQryrowsns = false;
boolean qryblkrst = false;
boolean qryrtndta = true;
int qryrowset = CodePoint.QRYROWSET_DEFAULT;
int rtnextdta = CodePoint.RTNEXTROW;
reader.markCollection();
int codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch(codePoint)
{
//optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.CNTQRY);
break;
//required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
//required
case CodePoint.QRYBLKSZ:
blksize = parseQRYBLKSZ();
gotQryblksz = true;
break;
//optional
case CodePoint.QRYRELSCR:
qryrelscr = readBoolean(CodePoint.QRYRELSCR);
if (SanityManager.DEBUG)
trace("qryrelscr = "+qryrelscr);
break;
//optional
case CodePoint.QRYSCRORN:
checkLength(CodePoint.QRYSCRORN, 1);
qryscrorn = reader.readUnsignedByte();
if (SanityManager.DEBUG)
trace("qryscrorn = "+qryscrorn);
switch (qryscrorn)
{
case CodePoint.QRYSCRREL:
case CodePoint.QRYSCRABS:
case CodePoint.QRYSCRAFT:
case CodePoint.QRYSCRBEF:
break;
default:
invalidValue(CodePoint.QRYSCRORN);
}
break;
//optional
case CodePoint.QRYROWNBR:
checkLength(CodePoint.QRYROWNBR, 8);
qryrownbr = reader.readNetworkLong();
if (SanityManager.DEBUG)
trace("qryrownbr = "+qryrownbr);
break;
//optional
case CodePoint.QRYROWSNS:
checkLength(CodePoint.QRYROWSNS, 1);
qryrowsns = readBoolean(CodePoint.QRYROWSNS);
if (SanityManager.DEBUG)
trace("qryrowsns = "+qryrowsns);
gotQryrowsns = true;
break;
//optional
case CodePoint.QRYBLKRST:
checkLength(CodePoint.QRYBLKRST, 1);
qryblkrst = readBoolean(CodePoint.QRYBLKRST);
if (SanityManager.DEBUG)
trace("qryblkrst = "+qryblkrst);
break;
//optional
case CodePoint.QRYRTNDTA:
qryrtndta = readBoolean(CodePoint.QRYRTNDTA);
if (SanityManager.DEBUG)
trace("qryrtndta = "+qryrtndta);
break;
//optional
case CodePoint.QRYROWSET:
//Note minimum for CNTQRY is 1
qryrowset = parseQRYROWSET(1);
if (SanityManager.DEBUG)
trace("qryrowset = "+qryrowset);
break;
//optional
case CodePoint.QRYRFRTBL:
qryrfrtbl = readBoolean(CodePoint.QRYRFRTBL);
if (SanityManager.DEBUG)
trace("qryrfrtbl = "+qryrfrtbl);
break;
//optional
case CodePoint.NBRROW:
checkLength(CodePoint.NBRROW, 4);
nbrrow = reader.readNetworkInt();
if (SanityManager.DEBUG)
trace("nbrrow = "+nbrrow);
break;
//optional
case CodePoint.MAXBLKEXT:
checkLength(CodePoint.MAXBLKEXT, 2);
maxblkext = reader.readSignedNetworkShort();
if (SanityManager.DEBUG)
trace("maxblkext = "+maxblkext);
break;
//optional
case CodePoint.RTNEXTDTA:
checkLength(CodePoint.RTNEXTDTA, 1);
rtnextdta = reader.readUnsignedByte();
if (rtnextdta != CodePoint.RTNEXTROW &&
rtnextdta != CodePoint.RTNEXTALL)
invalidValue(CodePoint.RTNEXTDTA);
if (SanityManager.DEBUG)
trace("rtnextdta = "+rtnextdta);
break;
// required for SQLAM >= 7
case CodePoint.QRYINSID:
checkLength(CodePoint.QRYINSID, 8);
qryinsid = reader.readNetworkLong();
gotQryinsid = true;
if (SanityManager.DEBUG)
trace("qryinsid = "+qryinsid);
break;
// optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for required variables
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
if (!gotQryblksz)
missingCodePoint(CodePoint.QRYBLKSZ);
if (sqlamLevel >= MGRLVL_7 && !gotQryinsid)
missingCodePoint(CodePoint.QRYINSID);
// get the statement we are continuing
DRDAStatement stmt = database.getDRDAStatement(pkgnamcsn);
if (stmt == null)
{
//XXX should really throw a SQL Exception here
invalidValue(CodePoint.CNTQRY);
}
if (stmt.rsIsClosed())
{
writeQRYNOPRM(CodePoint.SVRCOD_ERROR);
skipRemainder(true);
return null;
}
stmt.setQueryOptions(blksize,qryrelscr,qryrownbr,qryrfrtbl,nbrrow,maxblkext,
qryscrorn,qryrowsns,qryblkrst,qryrtndta,qryrowset,
rtnextdta);
if (reader.isChainedWithSameID())
parseCNTQRYobjects(stmt);
return stmt;
}
/**
* Skip remainder of current DSS and all chained DSS'es
*
* @param onlySkipSameIds True if we _only_ want to skip DSS'es
* that are chained with the SAME id as the current DSS.
* False means skip ALL chained DSSes, whether they're
* chained with same or different ids.
* @exception DRDAProtocolException
*/
private void skipRemainder(boolean onlySkipSameIds) throws DRDAProtocolException
{
reader.skipDss();
while (reader.isChainedWithSameID() ||
(!onlySkipSameIds && reader.isChainedWithDiffID()))
{
reader.readDssHeader();
reader.skipDss();
}
}
/**
* Parse CNTQRY objects
* Instance Variables
* OUTOVR - Output Override Descriptor - optional
*
* @param stmt DRDA statement we are working on
* @exception DRDAProtocolException
*/
private void parseCNTQRYobjects(DRDAStatement stmt) throws DRDAProtocolException, SQLException
{
int codePoint;
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// optional
case CodePoint.OUTOVR:
parseOUTOVR(stmt);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
}
/**
* Parse OUTOVR - Output Override Descriptor
* This specifies the output format for data to be returned as output to a SQL
* statement or as output from a query.
*
* @param stmt DRDA statement this applies to
* @exception DRDAProtocolException
*/
private void parseOUTOVR(DRDAStatement stmt) throws DRDAProtocolException, SQLException
{
boolean first = true;
int numVars;
int dtaGrpLen;
int tripType;
int tripId;
int precision;
int start = 0;
while (true)
{
dtaGrpLen = reader.readUnsignedByte();
tripType = reader.readUnsignedByte();
tripId = reader.readUnsignedByte();
// check if we have reached the end of the data
if (tripType == FdocaConstants.RLO_TRIPLET_TYPE)
{
//read last part of footer
reader.skipBytes();
break;
}
numVars = (dtaGrpLen - 3) / 3;
if (SanityManager.DEBUG)
trace("num of vars is: "+numVars);
int[] outovr_drdaType = null;
if (first)
{
outovr_drdaType = new int[numVars];
first = false;
}
else
{
int[] oldoutovr_drdaType = stmt.getOutovr_drdaType();
int oldlen = oldoutovr_drdaType.length;
// create new array and copy over already read stuff
outovr_drdaType = new int[oldlen + numVars];
System.arraycopy(oldoutovr_drdaType, 0,
outovr_drdaType,0,
oldlen);
start = oldlen;
}
for (int i = start; i < numVars + start; i++)
{
int drdaType = reader.readUnsignedByte();
if (!database.supportsLocator()) {
// ignore requests for locator when it is not supported
if ((drdaType >= DRDAConstants.DRDA_TYPE_LOBLOC)
&& (drdaType <= DRDAConstants.DRDA_TYPE_NCLOBLOC)) {
if (SanityManager.DEBUG) {
trace("ignoring drdaType: " + drdaType);
}
reader.readNetworkShort(); // Skip rest
continue;
}
}
outovr_drdaType[i] = drdaType;
if (SanityManager.DEBUG)
trace("drdaType is: "+ outovr_drdaType[i]);
precision = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("drdaLength is: "+precision);
outovr_drdaType[i] |= (precision << 8);
}
stmt.setOutovr_drdaType(outovr_drdaType);
}
}
/**
* Piggy-back any modified session attributes on the current message. Writes
* a PBSD conataining one or both of PBSD_ISO and PBSD_SCHEMA. PBSD_ISO is
* followed by the jdbc isolation level as an unsigned byte. PBSD_SCHEMA is
* followed by the name of the current schema as an UTF-8 String.
* @throws java.sql.SQLException
* @throws com.splicemachine.db.impl.drda.DRDAProtocolException
*/
private void writePBSD() throws SQLException, DRDAProtocolException
{
if (!appRequester.supportsSessionDataCaching()) {
return;
}
PiggyBackedSessionData pbsd = database.getPiggyBackedSessionData(true);
if (SanityManager.DEBUG) {
SanityManager.ASSERT(pbsd != null, "pbsd is not expected to be null");
}
// DERBY-3596
// Reset the flag. In sane builds it is used to avoid an assert, but
// we want to reset it as soon as possible to avoid masking real bugs.
// We have to do this because we are changing the connection state
// at an unexpected time (deferred reset, see parseSECCHK). This was
// done to avoid having to change the client code.
this.deferredReset = false;
pbsd.refresh();
if (pbsd.isModified()) {
writer.createDssReply();
writer.startDdm(CodePoint.PBSD);
if (pbsd.isIsoModified()) {
writer.writeScalar1Byte(CodePoint.PBSD_ISO, pbsd.getIso());
}
if (pbsd.isSchemaModified()) {
writer.startDdm(CodePoint.PBSD_SCHEMA);
writer.writeString(pbsd.getSchema());
writer.endDdm();
}
writer.endDdmAndDss();
}
pbsd.setUnmodified();
if (SanityManager.DEBUG) {
PiggyBackedSessionData pbsdNew =
database.getPiggyBackedSessionData(true);
SanityManager.ASSERT(pbsdNew == pbsd,
"pbsdNew and pbsd are expected to reference " +
"the same object");
pbsd.refresh();
SanityManager.ASSERT
(!pbsd.isModified(),
"pbsd=("+pbsd+") is not expected to be modified");
}
}
/**
* Write OPNQRYRM - Open Query Complete
* Instance Variables
* SVRCOD - Severity Code - required
* QRYPRCTYP - Query Protocol Type - required
* SQLCSRHLD - Hold Cursor Position - optional
* QRYATTSCR - Query Attribute for Scrollability - optional - level 7
* QRYATTSNS - Query Attribute for Sensitivity - optional - level 7
* QRYATTUPD - Query Attribute for Updatability -optional - level 7
* QRYINSID - Query Instance Identifier - required - level 7
* SRVDGN - Server Diagnostic Information - optional
*
* @param isDssObject - return as a DSS object (part of a reply)
* @param stmt - DRDA statement we are processing
*
* @exception DRDAProtocolException
*/
private void writeOPNQRYRM(boolean isDssObject, DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
if (SanityManager.DEBUG)
trace("WriteOPNQRYRM");
if (isDssObject)
writer.createDssObject();
else
writer.createDssReply();
writer.startDdm(CodePoint.OPNQRYRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD,CodePoint.SVRCOD_INFO);
// There is currently a problem specifying LMTBLKPRC for LOBs with JCC
// JCC will throw an ArrayOutOfBounds exception. Once this is fixed, we
// don't need to pass the two arguments for getQryprctyp.
int prcType = stmt.getQryprctyp();
if (SanityManager.DEBUG)
trace("sending QRYPRCTYP: " + prcType);
writer.writeScalar2Bytes(CodePoint.QRYPRCTYP, prcType);
//pass the SQLCSRHLD codepoint only if statement producing the ResultSet has
//hold cursors over commit set. In case of stored procedures which use server-side
//JDBC, the holdability of the ResultSet will be the holdability of the statement
//in the stored procedure, not the holdability of the calling statement.
if (stmt.getCurrentDrdaResultSet().withHoldCursor == ResultSet.HOLD_CURSORS_OVER_COMMIT)
writer.writeScalar1Byte(CodePoint.SQLCSRHLD, CodePoint.TRUE);
if (sqlamLevel >= MGRLVL_7)
{
writer.writeScalarHeader(CodePoint.QRYINSID, 8);
//This is implementer defined. DB2 uses this for the nesting level
//of the query. A query from an application would be nesting level 0,
//from a stored procedure, nesting level 1, from a recursive call of
//a stored procedure, nesting level 2, etc.
writer.writeInt(0);
//This is a unique sequence number per session
writer.writeInt(session.qryinsid++);
//Write the scroll attributes if they are set
if (stmt.isScrollable())
{
writer.writeScalar1Byte(CodePoint.QRYATTSCR, CodePoint.TRUE);
if ((stmt.getConcurType() == ResultSet.CONCUR_UPDATABLE) &&
(stmt.getResultSet().getType() ==
ResultSet.TYPE_SCROLL_INSENSITIVE)) {
writer.writeScalar1Byte(CodePoint.QRYATTSNS,
CodePoint.QRYSNSSTC);
} else {
writer.writeScalar1Byte(CodePoint.QRYATTSNS,
CodePoint.QRYINS);
}
}
if (stmt.getConcurType() == ResultSet.CONCUR_UPDATABLE) {
if (stmt.getResultSet() != null) {
// Resultset concurrency can be less than statement
// concurreny if the underlying language resultset
// is not updatable.
if (stmt.getResultSet().getConcurrency() ==
ResultSet.CONCUR_UPDATABLE) {
writer.writeScalar1Byte(CodePoint.QRYATTUPD,
CodePoint.QRYUPD);
} else {
writer.writeScalar1Byte(CodePoint.QRYATTUPD,
CodePoint.QRYRDO);
}
} else {
writer.writeScalar1Byte(CodePoint.QRYATTUPD,
CodePoint.QRYUPD);
}
} else {
writer.writeScalar1Byte(CodePoint.QRYATTUPD, CodePoint.QRYRDO);
}
}
writer.endDdmAndDss ();
}
/**
* Write ENDQRYRM - query process has terminated in such a manner that the
* query or result set is now closed. It cannot be resumed with the CNTQRY
* command or closed with the CLSQRY command
* @param svrCod Severity code - WARNING or ERROR
* @exception DRDAProtocolException
*/
private void writeENDQRYRM(int svrCod) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.ENDQRYRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD,svrCod);
writer.endDdmAndDss();
}
/**
* Write ABNUOWRM - query process has terminated in an error condition
* such as deadlock or lock timeout.
* Severity code is always error
* * @exception DRDAProtocolException
*/
private void writeABNUOWRM() throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.ABNUOWRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD,CodePoint.SVRCOD_ERROR);
writeRDBNAM(database.getDatabaseName());
writer.endDdmAndDss();
}
/**
* Parse database name
*
* @return database name
*
* @exception DRDAProtocolException
*/
private String parseRDBNAM() throws DRDAProtocolException
{
String name;
byte [] rdbName = reader.readBytes();
if (rdbName.length == 0)
{
// throw RDBNFNRM
rdbNotFound(null);
}
//SQLAM level 7 allows db name up to 255, level 6 fixed len 18
if (rdbName.length < CodePoint.RDBNAM_LEN || rdbName.length > CodePoint.MAX_NAME)
badObjectLength(CodePoint.RDBNAM);
name = reader.convertBytes(rdbName);
// trim trailing blanks from the database name
name = name.trim();
if (SanityManager.DEBUG)
trace("RdbName " + name);
return name;
}
/**
* Write ACCSECRD
* If the security mechanism is known, we just send it back along with
* the security token if encryption is going to be used.
* If the security mechanism is not known, we send a list of the ones
* we know.
* Instance Variables
* SECMEC - security mechanism - required
* SECTKN - security token - optional (required if security mechanism
* uses encryption)
* SECCHKCD - security check code - error occurred in processing ACCSEC
*
* @param securityCheckCode
*
* @exception DRDAProtocolException
*/
private void writeACCSECRD(int securityCheckCode)
throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.ACCSECRD);
if (securityCheckCode != CodePoint.SECCHKCD_NOTSUPPORTED)
writer.writeScalar2Bytes(CodePoint.SECMEC, database.securityMechanism);
else
{
// if server doesnt recognize or allow the client requested security mechanism,
// then need to return the list of security mechanisms supported/allowed by the server
// check if server is set to accept connections from client at a certain
// security mechanism, if so send only the security mechanism that the
// server will accept, to the client
if ( server.getSecurityMechanism() != NetworkServerControlImpl.INVALID_OR_NOTSET_SECURITYMECHANISM )
writer.writeScalar2Bytes(CodePoint.SECMEC, server.getSecurityMechanism());
else
{
// note: per the DDM manual , ACCSECRD response is of
// form SECMEC (value{value..})
// Need to fix the below to send a list of supported security
// mechanisms for value of one SECMEC codepoint (JIRA 926)
// these are the ones we know about
writer.writeScalar2Bytes(CodePoint.SECMEC, CodePoint.SECMEC_USRIDPWD);
// include EUSRIDPWD in the list of supported secmec only if
// server can truely support it in the jvm that is running in
if ( server.supportsEUSRIDPWD())
writer.writeScalar2Bytes(CodePoint.SECMEC, CodePoint.SECMEC_EUSRIDPWD);
writer.writeScalar2Bytes(CodePoint.SECMEC, CodePoint.SECMEC_USRIDONL);
writer.writeScalar2Bytes(CodePoint.SECMEC, CodePoint.SECMEC_USRSSBPWD);
}
}
if (securityCheckCode != 0)
{
writer.writeScalar1Byte(CodePoint.SECCHKCD, securityCheckCode);
}
else
{
// we need to send back the key if encryption is being used
if (database.securityMechanism == CodePoint.SECMEC_EUSRIDPWD)
writer.writeScalarBytes(CodePoint.SECTKN, myPublicKey);
else if (database.securityMechanism == CodePoint.SECMEC_USRSSBPWD)
writer.writeScalarBytes(CodePoint.SECTKN, myTargetSeed);
}
writer.endDdmAndDss ();
if (securityCheckCode != 0) {
// then we have an error and so can ignore the rest of the
// DSS request chain.
skipRemainder(false);
}
finalizeChain();
}
/**
* Parse security check
* Instance Variables
* SECMGRNM - security manager name - optional, ignorable
* SECMEC - security mechanism - required
* SECTKN - security token - optional, (required if encryption used)
* PASSWORD - password - optional, (required if security mechanism uses it)
* NEWPASSWORD - new password - optional, (required if sec mech. uses it)
* USRID - user id - optional, (required if sec mec. uses it)
* RDBNAM - database name - optional (required if databases can have own sec.)
*
*
* @return security check code
* @exception DRDAProtocolException
*/
private int parseSECCHK() throws DRDAProtocolException
{
int codePoint, securityCheckCode = 0;
int securityMechanism = 0;
databaseAccessException = null;
reader.markCollection();
codePoint = reader.getCodePoint();
if (this.deferredReset) {
// Skip the SECCHK, but assure a minimal degree of correctness.
while (codePoint != -1) {
switch (codePoint) {
// Note the fall-through.
// Minimal level of checking to detect protocol errors.
// NOTE: SECMGR level 8 code points are not handled.
case CodePoint.SECMGRNM:
case CodePoint.SECMEC:
case CodePoint.SECTKN:
case CodePoint.PASSWORD:
case CodePoint.NEWPASSWORD:
case CodePoint.USRID:
case CodePoint.RDBNAM:
reader.skipBytes();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
} else {
while (codePoint != -1)
{
switch (codePoint)
{
//optional, ignorable
case CodePoint.SECMGRNM:
reader.skipBytes();
break;
//required
case CodePoint.SECMEC:
checkLength(CodePoint.SECMEC, 2);
securityMechanism = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("parseSECCHK - Security mechanism = " + securityMechanism);
//RESOLVE - spec is not clear on what should happen
//in this case
if (securityMechanism != database.securityMechanism)
invalidValue(CodePoint.SECMEC);
break;
//optional - depending on security Mechanism
case CodePoint.SECTKN:
if ((database.securityMechanism !=
CodePoint.SECMEC_EUSRIDPWD) &&
(database.securityMechanism !=
CodePoint.SECMEC_USRSSBPWD))
{
securityCheckCode = CodePoint.SECCHKCD_SECTKNMISSING_OR_INVALID;
reader.skipBytes();
}
else if (database.securityMechanism ==
CodePoint.SECMEC_EUSRIDPWD)
{
if (database.decryptedUserId == null)
{
try {
database.decryptedUserId =
reader.readEncryptedString(
decryptionManager,
database.securityMechanism,
myPublicKey,
database.secTokenIn);
} catch (SQLException se) {
println2Log(database.getDatabaseName(), session.drdaID,
se.getMessage());
if (securityCheckCode == 0)
//userid invalid
securityCheckCode = CodePoint.SECCHKCD_13;
}
database.userId = database.decryptedUserId;
if (SanityManager.DEBUG)
trace("**decrypted userid is: "+database.userId);
}
else if (database.decryptedPassword == null)
{
try {
database.decryptedPassword =
reader.readEncryptedString(
decryptionManager,
database.securityMechanism,
myPublicKey,
database.secTokenIn);
} catch (SQLException se) {
println2Log(database.getDatabaseName(), session.drdaID,
se.getMessage());
if (securityCheckCode == 0)
//password invalid
securityCheckCode = CodePoint.SECCHKCD_0F;
}
database.password = database.decryptedPassword;
if (SanityManager.DEBUG)
trace("**decrypted password is: " +
database.password);
}
}
else if (database.securityMechanism ==
CodePoint.SECMEC_USRSSBPWD)
{
if (database.passwordSubstitute == null)
{
database.passwordSubstitute = reader.readBytes();
if (SanityManager.DEBUG)
trace("** Substitute Password is:" +
DecryptionManager.toHexString(
database.passwordSubstitute, 0,
database.passwordSubstitute.length));
database.password =
DecryptionManager.toHexString(
database.passwordSubstitute, 0,
database.passwordSubstitute.length);
}
}
else
{
tooMany(CodePoint.SECTKN);
}
break;
//optional - depending on security Mechanism
case CodePoint.PASSWORD:
database.password = reader.readString();
if (SanityManager.DEBUG) trace("PASSWORD " + database.password);
break;
//optional - depending on security Mechanism
//we are not supporting this method so we'll skip bytes
case CodePoint.NEWPASSWORD:
reader.skipBytes();
break;
//optional - depending on security Mechanism
case CodePoint.USRID:
database.userId = reader.readString();
if (SanityManager.DEBUG) trace("USERID " + database.userId);
break;
//optional - depending on security Mechanism
case CodePoint.RDBNAM:
String dbname = parseRDBNAM();
if (database != null)
{
if (database.getDatabaseName() == null) {
// we didn't get the RDBNAM on ACCSEC. Set it here
database.setDatabaseName(dbname);
session.addDatabase(database);
session.database = database;
}
else if (!database.getDatabaseName().equals(dbname))
rdbnamMismatch(CodePoint.SECCHK);
}
else
{
// we should already have added the database in ACCSEC
// added code here in case we make the SECMEC session rather
// than database wide
initializeDatabase(dbname);
}
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for SECMEC which is required
if (securityMechanism == 0)
missingCodePoint(CodePoint.SECMEC);
// Check that we have a database name.
if (database == null || database.getDatabaseName() == null)
missingCodePoint(CodePoint.RDBNAM);
//check if we have a userid and password when we need it
if (securityCheckCode == 0 &&
(database.securityMechanism == CodePoint.SECMEC_USRIDPWD||
database.securityMechanism == CodePoint.SECMEC_USRIDONL ))
{
if (database.userId == null)
securityCheckCode = CodePoint.SECCHKCD_USERIDMISSING;
else if (database.securityMechanism == CodePoint.SECMEC_USRIDPWD)
{
if (database.password == null)
securityCheckCode = CodePoint.SECCHKCD_PASSWORDMISSING;
}
//Note, we'll ignore encryptedUserId and encryptedPassword if they
//are also set
}
if (securityCheckCode == 0 &&
database.securityMechanism == CodePoint.SECMEC_USRSSBPWD)
{
if (database.userId == null)
securityCheckCode = CodePoint.SECCHKCD_USERIDMISSING;
else if (database.passwordSubstitute == null)
securityCheckCode = CodePoint.SECCHKCD_PASSWORDMISSING;
}
if (securityCheckCode == 0 &&
database.securityMechanism == CodePoint.SECMEC_EUSRIDPWD)
{
if (database.decryptedUserId == null)
securityCheckCode = CodePoint.SECCHKCD_USERIDMISSING;
else if (database.decryptedPassword == null)
securityCheckCode = CodePoint.SECCHKCD_PASSWORDMISSING;
}
// RESOLVE - when we do security we need to decrypt encrypted userid & password
// before proceeding
} // End "if (deferredReset) ... else ..." block
// verify userid and password, if we haven't had any errors thus far.
if ((securityCheckCode == 0) && (databaseAccessException == null))
{
// DERBY-3596: Reset server side (embedded) physical connection for
// use with a new logical connection on the client.
if (this.deferredReset) {
// Reset the existing connection here.
try {
database.getConnection().resetFromPool();
database.getConnection().setHoldability(
ResultSet.HOLD_CURSORS_OVER_COMMIT);
// Reset isolation level to default, as the client is in
// the process of creating a new logical connection.
database.getConnection().setTransactionIsolation(
Connection.TRANSACTION_READ_COMMITTED);
} catch (SQLException sqle) {
handleException(sqle);
}
} else {
securityCheckCode = verifyUserIdPassword();
}
}
// Security all checked
if (securityCheckCode == 0)
session.setState(session.CHKSEC);
return securityCheckCode;
}
/**
* Write security check reply
* Instance variables
* SVRCOD - serverity code - required
* SECCHKCD - security check code - required
* SECTKN - security token - optional, ignorable
* SVCERRNO - security service error number
* SRVDGN - Server Diagnostic Information
*
* @exception DRDAProtocolException
*/
private void writeSECCHKRM(int securityCheckCode) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.SECCHKRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, svrcodFromSecchkcd(securityCheckCode));
writer.writeScalar1Byte(CodePoint.SECCHKCD, securityCheckCode);
writer.endDdmAndDss ();
if (securityCheckCode != 0) {
// then we have an error and are going to end up ignoring the rest
// of the DSS request chain.
skipRemainder(false);
}
finalizeChain();
}
/**
* Calculate SVRCOD value from SECCHKCD
*
* @param securityCheckCode
* @return SVRCOD value
*/
private int svrcodFromSecchkcd(int securityCheckCode)
{
if (securityCheckCode == 0 || securityCheckCode == 2 ||
securityCheckCode == 5 || securityCheckCode == 8)
return CodePoint.SVRCOD_INFO;
else
return CodePoint.SVRCOD_ERROR;
}
/**
* Parse access RDB
* Instance variables
* RDBACCCL - RDB Access Manager Class - required must be SQLAM
* CRRTKN - Correlation Token - required
* RDBNAM - Relational database name -required
* PRDID - Product specific identifier - required
* TYPDEFNAM - Data Type Definition Name -required
* TYPDEFOVR - Type definition overrides -required
* RDBALWUPD - RDB Allow Updates optional
* PRDDTA - Product Specific Data - optional - ignorable
* STTDECDEL - Statement Decimal Delimiter - optional
* STTSTRDEL - Statement String Delimiter - optional
* TRGDFTRT - Target Default Value Return - optional
*
* @return severity code
*
* @exception DRDAProtocolException
*/
private int parseACCRDB() throws DRDAProtocolException
{
int codePoint;
int svrcod = 0;
copyToRequired(ACCRDB_REQUIRED);
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
//required
case CodePoint.RDBACCCL:
checkLength(CodePoint.RDBACCCL, 2);
int sqlam = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("RDBACCCL = " + sqlam);
// required to be SQLAM
if (sqlam != CodePoint.SQLAM)
invalidValue(CodePoint.RDBACCCL);
removeFromRequired(CodePoint.RDBACCCL);
break;
//required
case CodePoint.CRRTKN:
database.crrtkn = reader.readBytes();
if (SanityManager.DEBUG)
trace("crrtkn " + convertToHexString(database.crrtkn));
removeFromRequired(CodePoint.CRRTKN);
int l = database.crrtkn.length;
if (l > CodePoint.MAX_NAME)
tooBig(CodePoint.CRRTKN);
// the format of the CRRTKN is defined in the DRDA reference
// x.yz where x is 1 to 8 bytes (variable)
// y is 1 to 8 bytes (variable)
// x is 6 bytes fixed
// size is variable between 9 and 23
if (l < 9 || l > 23)
invalidValue(CodePoint.CRRTKN);
byte[] part1 = new byte[l - 6];
for (int i = 0; i < part1.length; i++)
part1[i] = database.crrtkn[i];
long time = SignedBinary.getLong(database.crrtkn,
l-8, SignedBinary.BIG_ENDIAN); // as "long" as unique
session.drdaID = reader.convertBytes(part1) +
time + leftBrace + session.connNum + rightBrace;
if (SanityManager.DEBUG)
trace("******************************************drdaID is: " + session.drdaID);
database.setDrdaID(session.drdaID);
break;
//required
case CodePoint.RDBNAM:
String dbname = parseRDBNAM();
if (database != null)
{
if (!database.getDatabaseName().equals(dbname))
rdbnamMismatch(CodePoint.ACCRDB);
}
else
{
//first time we have seen a database name
Database d = session.getDatabase(dbname);
if (d == null)
initializeDatabase(dbname);
else
{
database = d;
database.accessCount++;
}
}
removeFromRequired(CodePoint.RDBNAM);
break;
//required
case CodePoint.PRDID:
appRequester.setClientVersion(reader.readString());
if (SanityManager.DEBUG)
trace("prdId " + appRequester.prdid);
if (appRequester.prdid.length() > CodePoint.PRDID_MAX)
tooBig(CodePoint.PRDID);
if (appRequester.getClientType() != appRequester.DNC_CLIENT) {
invalidClient(appRequester.prdid);
}
// All versions of DNC,the only client supported, handle
// warnings on CNTQRY
sendWarningsOnCNTQRY = true;
// The client can not request DIAGLVL because when run with
// an older server it will cause an exception. Older version
// of the server do not recognize requests for DIAGLVL.
if ((appRequester.getClientType() == appRequester.DNC_CLIENT) &&
appRequester.greaterThanOrEqualTo(10, 2, 0)) {
diagnosticLevel = CodePoint.DIAGLVL1;
}
removeFromRequired(CodePoint.PRDID);
break;
//required
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(true, null, parseTYPDEFNAM());
removeFromRequired(CodePoint.TYPDEFNAM);
break;
//required
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(null);
removeFromRequired(CodePoint.TYPDEFOVR);
break;
//optional
case CodePoint.RDBALWUPD:
checkLength(CodePoint.RDBALWUPD, 1);
database.rdbAllowUpdates = readBoolean(CodePoint.RDBALWUPD);
if (SanityManager.DEBUG)
trace("rdbAllowUpdates = "+database.rdbAllowUpdates);
break;
//optional, ignorable
case CodePoint.PRDDTA:
// check that it fits in maximum but otherwise ignore for now
if (reader.getDdmLength() > CodePoint.MAX_NAME)
tooBig(CodePoint.PRDDTA);
reader.skipBytes();
break;
case CodePoint.TRGDFTRT:
byte b = reader.readByte();
if (b == (byte)0xF1)
database.sendTRGDFTRT = true;
break;
//optional - not used in JCC so skip for now
case CodePoint.STTDECDEL:
case CodePoint.STTSTRDEL:
codePointNotSupported(codePoint);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
checkRequired(CodePoint.ACCRDB);
// check that we can support the double-byte and mixed-byte CCSIDS
// set svrcod to warning if they are not supported
if ((database.ccsidDBC != 0 && !server.supportsCCSID(database.ccsidDBC)) ||
(database.ccsidMBC != 0 && !server.supportsCCSID(database.ccsidMBC)))
svrcod = CodePoint.SVRCOD_WARNING;
return svrcod;
}
/**
* Parse TYPDEFNAM
*
* @return typdefnam
* @exception DRDAProtocolException
*/
private String parseTYPDEFNAM() throws DRDAProtocolException
{
String typDefNam = reader.readString();
if (SanityManager.DEBUG) trace("typeDefName " + typDefNam);
if (typDefNam.length() > CodePoint.MAX_NAME)
tooBig(CodePoint.TYPDEFNAM);
checkValidTypDefNam(typDefNam);
// check if the typedef is one we support
if (!typDefNam.equals(CodePoint.TYPDEFNAM_QTDSQLASC) &&
!typDefNam.equals(CodePoint.TYPDEFNAM_QTDSQLJVM) &&
!typDefNam.equals(CodePoint.TYPDEFNAM_QTDSQLX86))
valueNotSupported(CodePoint.TYPDEFNAM);
return typDefNam;
}
/**
* Set a statement or the database' byte order, depending on the arguments
*
* @param setDatabase if true, set database' byte order, otherwise set statement's
* @param stmt DRDAStatement, used when setDatabase is false
* @param typDefNam TYPDEFNAM value
*/
private void setStmtOrDbByteOrder(boolean setDatabase, DRDAStatement stmt, String typDefNam)
{
int byteOrder = (typDefNam.equals(CodePoint.TYPDEFNAM_QTDSQLX86) ?
SignedBinary.LITTLE_ENDIAN : SignedBinary.BIG_ENDIAN);
if (setDatabase)
{
database.typDefNam = typDefNam;
database.byteOrder = byteOrder;
}
else
{
stmt.typDefNam = typDefNam;
stmt.byteOrder = byteOrder;
}
}
/**
* Write Access to RDB Completed
* Instance Variables
* SVRCOD - severity code - 0 info, 4 warning -required
* PRDID - product specific identifier -required
* TYPDEFNAM - type definition name -required
* TYPDEFOVR - type definition overrides - required
* RDBINTTKN - token which can be used to interrupt DDM commands - optional
* CRRTKN - correlation token - only returned if we didn't get one from requester
* SRVDGN - server diagnostic information - optional
* PKGDFTCST - package default character subtype - optional
* USRID - User ID at the target system - optional
* SRVLST - Server List
*
* @exception DRDAProtocolException
*/
private void writeACCRDBRM(int svrcod) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.ACCRDBRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, svrcod);
writer.writeScalarString(CodePoint.PRDID, server.prdId);
//TYPDEFNAM -required - JCC doesn't support QTDSQLJVM so for now we
// just use ASCII, though we should eventually be able to use QTDSQLJVM
// at level 7
writer.writeScalarString(CodePoint.TYPDEFNAM,
CodePoint.TYPDEFNAM_QTDSQLASC);
writeTYPDEFOVR();
writer.endDdmAndDss ();
// Write the initial piggy-backed data, currently the isolation level
// and the schema name. Only write it if the client supports session
// data caching.
// Sending the session data on connection initialization was introduced
// in Derby 10.7.
if ((appRequester.getClientType() == appRequester.DNC_CLIENT) &&
appRequester.greaterThanOrEqualTo(10, 7, 0)) {
try {
writePBSD();
} catch (SQLException se) {
server.consoleExceptionPrint(se);
errorInChain(se);
}
}
finalizeChain();
}
private void writeTYPDEFOVR() throws DRDAProtocolException
{
//TYPDEFOVR - required - only single byte and mixed byte are specified
writer.startDdm(CodePoint.TYPDEFOVR);
writer.writeScalar2Bytes(CodePoint.CCSIDSBC, server.CCSIDSBC);
writer.writeScalar2Bytes(CodePoint.CCSIDMBC, server.CCSIDMBC);
// PKGDFTCST - Send character subtype and userid if requested
if (database.sendTRGDFTRT)
{
// default to multibyte character
writer.startDdm(CodePoint.PKGDFTCST);
writer.writeShort(CodePoint.CSTMBCS);
writer.endDdm();
// userid
writer.startDdm(CodePoint.USRID);
writer.writeString(database.userId);
writer.endDdm();
}
writer.endDdm();
}
/**
* Parse Type Defintion Overrides
* TYPDEF Overrides specifies the Coded Character SET Identifiers (CCSIDs)
* that are in a named TYPDEF.
* Instance Variables
* CCSIDSBC - CCSID for Single-Byte - optional
* CCSIDDBC - CCSID for Double-Byte - optional
* CCSIDMBC - CCSID for Mixed-byte characters -optional
*
* @param st Statement this TYPDEFOVR applies to
*
* @exception DRDAProtocolException
*/
private void parseTYPDEFOVR(DRDAStatement st) throws DRDAProtocolException
{
int codePoint;
int ccsidSBC = 0;
int ccsidDBC = 0;
int ccsidMBC = 0;
String ccsidSBCEncoding = null;
String ccsidDBCEncoding = null;
String ccsidMBCEncoding = null;
reader.markCollection();
codePoint = reader.getCodePoint();
// at least one of the following instance variable is required
// if the TYPDEFOVR is specified in a command object
if (codePoint == -1 && st != null)
missingCodePoint(CodePoint.CCSIDSBC);
while (codePoint != -1)
{
switch (codePoint)
{
case CodePoint.CCSIDSBC:
checkLength(CodePoint.CCSIDSBC, 2);
ccsidSBC = reader.readNetworkShort();
try {
ccsidSBCEncoding =
CharacterEncodings.getJavaEncoding(ccsidSBC);
} catch (Exception e) {
valueNotSupported(CodePoint.CCSIDSBC);
}
if (SanityManager.DEBUG)
trace("ccsidsbc = " + ccsidSBC + " encoding = " + ccsidSBCEncoding);
break;
case CodePoint.CCSIDDBC:
checkLength(CodePoint.CCSIDDBC, 2);
ccsidDBC = reader.readNetworkShort();
try {
ccsidDBCEncoding =
CharacterEncodings.getJavaEncoding(ccsidDBC);
} catch (Exception e) {
// we write a warning later for this so no error
// unless for a statement
ccsidDBCEncoding = null;
if (st != null)
valueNotSupported(CodePoint.CCSIDSBC);
}
if (SanityManager.DEBUG)
trace("ccsiddbc = " + ccsidDBC + " encoding = " + ccsidDBCEncoding);
break;
case CodePoint.CCSIDMBC:
checkLength(CodePoint.CCSIDMBC, 2);
ccsidMBC = reader.readNetworkShort();
try {
ccsidMBCEncoding =
CharacterEncodings.getJavaEncoding(ccsidMBC);
} catch (Exception e) {
// we write a warning later for this so no error
ccsidMBCEncoding = null;
if (st != null)
valueNotSupported(CodePoint.CCSIDMBC);
}
if (SanityManager.DEBUG)
trace("ccsidmbc = " + ccsidMBC + " encoding = " + ccsidMBCEncoding);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
if (st == null)
{
if (ccsidSBC != 0)
{
database.ccsidSBC = ccsidSBC;
database.ccsidSBCEncoding = ccsidSBCEncoding;
}
if (ccsidDBC != 0)
{
database.ccsidDBC = ccsidDBC;
database.ccsidDBCEncoding = ccsidDBCEncoding;
}
if (ccsidMBC != 0)
{
database.ccsidMBC = ccsidMBC;
database.ccsidMBCEncoding = ccsidMBCEncoding;
}
}
else
{
if (ccsidSBC != 0)
{
st.ccsidSBC = ccsidSBC;
st.ccsidSBCEncoding = ccsidSBCEncoding;
}
if (ccsidDBC != 0)
{
st.ccsidDBC = ccsidDBC;
st.ccsidDBCEncoding = ccsidDBCEncoding;
}
if (ccsidMBC != 0)
{
st.ccsidMBC = ccsidMBC;
st.ccsidMBCEncoding = ccsidMBCEncoding;
}
}
}
/**
* Parse PRPSQLSTT - Prepare SQL Statement
* Instance Variables
* RDBNAM - Relational Database Name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number - required
* RTNSQLDA - Return SQL Descriptor Area - optional
* MONITOR - Monitor events - optional.
*
* @return return 0 - don't return sqlda, 1 - return input sqlda,
* 2 - return output sqlda
* @throws DRDAProtocolException
* @throws SQLException
*/
private int parsePRPSQLSTT() throws DRDAProtocolException,SQLException
{
int codePoint;
boolean rtnsqlda = false;
boolean rtnOutput = true; // Return output SQLDA is default
String typdefnam;
Pkgnamcsn pkgnamcsn = null;
DRDAStatement stmt = null;
Database databaseToSet = null;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.PRPSQLSTT);
databaseToSet = database;
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
//optional
case CodePoint.RTNSQLDA:
// Return SQLDA with description of statement
rtnsqlda = readBoolean(CodePoint.RTNSQLDA);
break;
//optional
case CodePoint.TYPSQLDA:
rtnOutput = parseTYPSQLDA();
break;
//optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
stmt = database.newDRDAStatement(pkgnamcsn);
String sqlStmt = parsePRPSQLSTTobjects(stmt);
if (databaseToSet != null)
stmt.setDatabase(database);
stmt.explicitPrepare(sqlStmt);
// set the statement as the current statement
database.setCurrentStatement(stmt);
if (!rtnsqlda)
return 0;
else if (rtnOutput)
return 2;
else
return 1;
}
/**
* Parse PRPSQLSTT objects
* Objects
* TYPDEFNAM - Data type definition name - optional
* TYPDEFOVR - Type defintion overrides - optional
* SQLSTT - SQL Statement required
* SQLATTR - Cursor attributes on prepare - optional - level 7
*
* If TYPDEFNAM and TYPDEFOVR are supplied, they apply to the objects
* sent with the statement. Once the statement is over, the default values
* sent in the ACCRDB are once again in effect. If no values are supplied,
* the values sent in the ACCRDB are used.
* Objects may follow in one DSS or in several DSS chained together.
*
* @return SQL statement
* @throws DRDAProtocolException
* @throws SQLException
*/
private String parsePRPSQLSTTobjects(DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
String sqlStmt = null;
int codePoint;
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// required
case CodePoint.SQLSTT:
sqlStmt = parseEncodedString();
if (SanityManager.DEBUG)
trace("sqlStmt = " + sqlStmt);
break;
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, stmt, parseTYPDEFNAM());
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(stmt);
break;
// optional
case CodePoint.SQLATTR:
parseSQLATTR(stmt);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
if (sqlStmt == null)
missingCodePoint(CodePoint.SQLSTT);
return sqlStmt;
}
/**
* Parse TYPSQLDA - Type of the SQL Descriptor Area
*
* @return true if for output; false otherwise
* @exception DRDAProtocolException
*/
private boolean parseTYPSQLDA() throws DRDAProtocolException
{
checkLength(CodePoint.TYPSQLDA, 1);
byte sqldaType = reader.readByte();
if (SanityManager.DEBUG)
trace("typSQLDa " + sqldaType);
if (sqldaType == CodePoint.TYPSQLDA_STD_OUTPUT ||
sqldaType == CodePoint.TYPSQLDA_LIGHT_OUTPUT ||
sqldaType == CodePoint.TYPSQLDA_X_OUTPUT)
return true;
else if (sqldaType == CodePoint.TYPSQLDA_STD_INPUT ||
sqldaType == CodePoint.TYPSQLDA_LIGHT_INPUT ||
sqldaType == CodePoint.TYPSQLDA_X_INPUT)
return false;
else
invalidValue(CodePoint.TYPSQLDA);
// shouldn't get here but have to shut up compiler
return false;
}
/**
* Parse SQLATTR - Cursor attributes on prepare
* This is an encoded string. Can have combination of following, eg INSENSITIVE SCROLL WITH HOLD
* Possible strings are
* SENSITIVE DYNAMIC SCROLL [FOR UPDATE]
* SENSITIVE STATIC SCROLL [FOR UPDATE]
* INSENSITIVE SCROLL
* FOR UPDATE
* WITH HOLD
*
* @param stmt DRDAStatement
* @exception DRDAProtocolException
*/
protected void parseSQLATTR(DRDAStatement stmt) throws DRDAProtocolException
{
String attrs = parseEncodedString();
if (SanityManager.DEBUG)
trace("sqlattr = '" + attrs+"'");
//let Derby handle any errors in the types it doesn't support
//just set the attributes
boolean validAttribute = false;
if (attrs.indexOf("INSENSITIVE SCROLL") != -1 || attrs.indexOf("SCROLL INSENSITIVE") != -1) //CLI
{
stmt.scrollType = ResultSet.TYPE_SCROLL_INSENSITIVE;
stmt.concurType = ResultSet.CONCUR_READ_ONLY;
validAttribute = true;
}
if ((attrs.indexOf("SENSITIVE DYNAMIC SCROLL") != -1) || (attrs.indexOf("SENSITIVE STATIC SCROLL") != -1))
{
stmt.scrollType = ResultSet.TYPE_SCROLL_SENSITIVE;
validAttribute = true;
}
if ((attrs.indexOf("FOR UPDATE") != -1))
{
validAttribute = true;
stmt.concurType = ResultSet.CONCUR_UPDATABLE;
}
if (attrs.indexOf("WITH HOLD") != -1)
{
stmt.withHoldCursor = ResultSet.HOLD_CURSORS_OVER_COMMIT;
validAttribute = true;
}
if (!validAttribute)
{
invalidValue(CodePoint.SQLATTR);
}
}
/**
* Parse DSCSQLSTT - Describe SQL Statement previously prepared
* Instance Variables
* TYPSQLDA - sqlda type expected (output or input)
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* MONITOR - Monitor events - optional.
*
* @return expect "output sqlda" or not
* @throws DRDAProtocolException
* @throws SQLException
*/
private boolean parseDSCSQLSTT() throws DRDAProtocolException,SQLException
{
int codePoint;
boolean rtnOutput = true; // default
Pkgnamcsn pkgnamcsn = null;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.TYPSQLDA:
rtnOutput = parseTYPSQLDA();
break;
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.DSCSQLSTT);
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
DRDAStatement stmt = database.getDRDAStatement(pkgnamcsn);
if (stmt == null)
{
invalidValue(CodePoint.PKGNAMCSN);
}
break;
//optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
return rtnOutput;
}
/**
* Parse EXCSQLSTT - Execute non-cursor SQL Statement previously prepared
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* OUTEXP - Output expected
* NBRROW - Number of rows to be inserted if it's an insert
* PRCNAM - procedure name if specified by host variable, not needed for Derby
* QRYBLKSZ - query block size
* MAXRSLCNT - max resultset count
* MAXBLKEXT - Max number of extra blocks
* RSLSETFLG - resultset flag
* RDBCMTOK - RDB Commit Allowed - optional
* OUTOVROPT - output override option
* QRYROWSET - Query Rowset Size - Level 7
* MONITOR - Monitor events - optional.
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void parseEXCSQLSTT() throws DRDAProtocolException,SQLException
{
int codePoint;
String strVal;
reader.markCollection();
codePoint = reader.getCodePoint();
boolean outputExpected = false;
Pkgnamcsn pkgnamcsn = null;
int numRows = 1; // default value
int blkSize = 0;
int maxrslcnt = 0; // default value
int maxblkext = CodePoint.MAXBLKEXT_DEFAULT;
int qryrowset = CodePoint.QRYROWSET_DEFAULT;
int outovropt = CodePoint.OUTOVRFRS;
byte [] rslsetflg = null;
String procName = null;
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.EXCSQLSTT);
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
// optional
case CodePoint.OUTEXP:
outputExpected = readBoolean(CodePoint.OUTEXP);
if (SanityManager.DEBUG)
trace("outexp = "+ outputExpected);
break;
// optional
case CodePoint.NBRROW:
checkLength(CodePoint.NBRROW, 4);
numRows = reader.readNetworkInt();
if (SanityManager.DEBUG)
trace("# of rows: "+numRows);
break;
// optional
case CodePoint.PRCNAM:
procName = reader.readString();
if (SanityManager.DEBUG)
trace("Procedure Name = " + procName);
break;
// optional
case CodePoint.QRYBLKSZ:
blkSize = parseQRYBLKSZ();
break;
// optional
case CodePoint.MAXRSLCNT:
// this is the maximum result set count
// values are 0 - requester is not capabable of receiving result
// sets as reply data in the response to EXCSQLSTT
// -1 - requester is able to receive all result sets
checkLength(CodePoint.MAXRSLCNT, 2);
maxrslcnt = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("max rs count: "+maxrslcnt);
break;
// optional
case CodePoint.MAXBLKEXT:
// number of extra qury blocks of answer set data per result set
// 0 - no extra query blocks
// -1 - can receive entire result set
checkLength(CodePoint.MAXBLKEXT, 2);
maxblkext = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("max extra blocks: "+maxblkext);
break;
// optional
case CodePoint.RSLSETFLG:
//Result set flags
rslsetflg = reader.readBytes();
for (int i=0;i<rslsetflg.length;i++)
if (SanityManager.DEBUG)
trace("rslsetflg: "+rslsetflg[i]);
break;
// optional
case CodePoint.RDBCMTOK:
parseRDBCMTOK();
break;
// optional
case CodePoint.OUTOVROPT:
outovropt = parseOUTOVROPT();
break;
// optional
case CodePoint.QRYROWSET:
//Note minimum for OPNQRY is 0, we'll assume it is the same
//for EXCSQLSTT though the standard doesn't say
qryrowset = parseQRYROWSET(0);
break;
//optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
DRDAStatement stmt;
boolean needPrepareCall = false;
stmt = database.getDRDAStatement(pkgnamcsn);
boolean isProcedure = (procName !=null ||
(stmt != null &&
stmt.wasExplicitlyPrepared() &&
stmt.isCall));
if (isProcedure) // stored procedure call
{
if ( stmt == null || !(stmt.wasExplicitlyPrepared()))
{
stmt = database.newDRDAStatement(pkgnamcsn);
stmt.setQryprctyp(CodePoint.QRYBLKCTL_DEFAULT);
needPrepareCall = true;
}
stmt.procName = procName;
stmt.outputExpected = outputExpected;
}
else
{
// we can't find the statement
if (stmt == null)
{
invalidValue(CodePoint.PKGNAMCSN);
}
stmt.setQryprctyp(CodePoint.QRYBLKCTL_DEFAULT);
}
stmt.nbrrow = numRows;
stmt.qryrowset = qryrowset;
stmt.blksize = blkSize;
stmt.maxblkext = maxblkext;
stmt.maxrslcnt = maxrslcnt;
stmt.outovropt = outovropt;
stmt.rslsetflg = rslsetflg;
if (pendingStatementTimeout >= 0) {
stmt.getPreparedStatement().setQueryTimeout(pendingStatementTimeout);
pendingStatementTimeout = -1;
}
// set the statement as the current statement
database.setCurrentStatement(stmt);
boolean hasResultSet;
if (reader.isChainedWithSameID())
{
hasResultSet = parseEXCSQLSTTobjects(stmt);
} else
{
if (isProcedure && (needPrepareCall))
{
// if we had parameters the callable statement would
// be prepared with parseEXCQLSTTobjects, otherwise we
// have to do it here
String prepareString = "call " + stmt.procName +"()";
if (SanityManager.DEBUG)
trace ("$$$prepareCall is: "+prepareString);
database.getConnection().clearWarnings();
CallableStatement cs = (CallableStatement) stmt.prepare(prepareString);
}
stmt.ps.clearWarnings();
hasResultSet = stmt.execute();
}
ResultSet rs = null;
if (hasResultSet)
{
rs = stmt.getResultSet();
}
// temp until ps.execute() return value fixed
hasResultSet = (rs != null);
int numResults = 0;
if (hasResultSet)
{
numResults = stmt.getNumResultSets();
writeRSLSETRM(stmt);
}
// First of all, we send if there really are output params. Otherwise
// CLI (.Net driver) fails. DRDA spec (page 151,152) says send SQLDTARD
// if server has output param data to send.
boolean sendSQLDTARD = stmt.hasOutputParams() && outputExpected;
if (isProcedure)
{
if (sendSQLDTARD) {
writer.createDssObject();
writer.startDdm(CodePoint.SQLDTARD);
writer.startDdm(CodePoint.FDODSC);
writeQRYDSC(stmt, true);
writer.endDdm();
writer.startDdm(CodePoint.FDODTA);
writeFDODTA(stmt);
writer.endDdm();
writer.endDdmAndDss();
if (stmt.getExtDtaObjects() != null)
{
// writeScalarStream() ends the dss
writeEXTDTA(stmt);
}
}
else if (hasResultSet)
// DRDA spec says that we MUST return either an
// SQLDTARD or an SQLCARD--the former when we have
// output parameters, the latter when we don't.
// If we have a result set, then we have to write
// the SQLCARD _now_, since it is expected before
// we send the result set info below; if we don't
// have a result set and we don't send SQLDTARD,
// then we can wait until we reach the call to
// checkWarning() below, which will write an
// SQLCARD for us.
writeNullSQLCARDobject();
}
//We need to marke that params are finished so that we know we
// are ready to send resultset info.
stmt.finishParams();
PreparedStatement ps = stmt.getPreparedStatement();
int rsNum = 0;
do {
if (hasResultSet)
{
stmt.setCurrentDrdaResultSet(rsNum);
//indicate that we are going to return data
stmt.setQryrtndta(true);
if (! isProcedure)
checkWarning(null, ps, null, -1, true, true);
if (rsNum == 0)
writeSQLRSLRD(stmt);
writeOPNQRYRM(true, stmt);
writeSQLCINRD(stmt);
writeQRYDSC(stmt, false);
stmt.rsSuspend();
/* Currently, if LMTBLKPRC is used, a pre-condition is that no lob columns.
* But in the future, when we do support LOB in LMTBLKPRC, the drda spec still
* does not allow LOB to be sent with OPNQRYRM. So this "if" here will have
* to add "no lob columns".
*/
if (stmt.getQryprctyp() == CodePoint.LMTBLKPRC)
writeQRYDTA(stmt);
}
else if (! sendSQLDTARD)
{
int updateCount = ps.getUpdateCount();
if (false && (database.RDBUPDRM_sent == false) &&
! isProcedure)
{
writeRDBUPDRM();
}
checkWarning(database.getConnection(), stmt.ps, null, updateCount, true, true);
}
} while(hasResultSet && (++rsNum < numResults));
return; // we are done
}
/**
* Parse RDBCMTOK - tells the database whether to allow commits or rollbacks
* to be executed as part of the command
* Since we don't have a SQL commit or rollback command, we will just ignore
* this for now
*
* @exception DRDAProtocolException
*/
private void parseRDBCMTOK() throws DRDAProtocolException
{
boolean rdbcmtok = readBoolean(CodePoint.RDBCMTOK);
if (SanityManager.DEBUG)
trace("rdbcmtok = " + rdbcmtok);
}
/**
* Parse EXCSQLSTT command objects
* Command Objects
* TYPDEFNAM - Data Type Definition Name - optional
* TYPDEFOVR - TYPDEF Overrides -optional
* SQLDTA - optional, variable data, specified if prpared statement has input parameters
* EXTDTA - optional, externalized FD:OCA data
* OUTOVR - output override descriptor, not allowed for stored procedure calls
*
* If TYPDEFNAM and TYPDEFOVR are supplied, they apply to the objects
* sent with the statement. Once the statement is over, the default values
* sent in the ACCRDB are once again in effect. If no values are supplied,
* the values sent in the ACCRDB are used.
* Objects may follow in one DSS or in several DSS chained together.
*
* @param stmt the DRDAStatement to execute
* @throws DRDAProtocolException
* @throws SQLException
*/
private boolean parseEXCSQLSTTobjects(DRDAStatement stmt) throws DRDAProtocolException, SQLException
{
int codePoint;
boolean gotSQLDTA = false, gotEXTDTA = false;
boolean result = false;
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( true );
switch(codePoint)
{
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, stmt, parseTYPDEFNAM());
stmt.setTypDefValues();
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(stmt);
stmt.setTypDefValues();
break;
// required
case CodePoint.SQLDTA:
parseSQLDTA(stmt);
gotSQLDTA = true;
break;
// optional
case CodePoint.EXTDTA:
readAndSetAllExtParams(stmt, true);
stmt.ps.clearWarnings();
result = stmt.execute();
gotEXTDTA = true;
break;
// optional
case CodePoint.OUTOVR:
parseOUTOVR(stmt);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
// SQLDTA is required
if (! gotSQLDTA)
missingCodePoint(CodePoint.SQLDTA);
if (! gotEXTDTA) {
stmt.ps.clearWarnings();
result = stmt.execute();
}
return result;
}
/**
* Write SQLCINRD - result set column information
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLCINRD(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
ResultSet rs = stmt.getResultSet();
writer.createDssObject();
writer.startDdm(CodePoint.SQLCINRD);
if (sqlamLevel >= MGRLVL_7)
writeSQLDHROW(((EngineResultSet) rs).getHoldability());
ResultSetMetaData rsmeta = rs.getMetaData();
int ncols = rsmeta.getColumnCount();
writer.writeShort(ncols); // num of columns
if (sqlamLevel >= MGRLVL_7)
{
for (int i = 0; i < ncols; i++)
writeSQLDAGRP (rsmeta, null, i, true);
}
else
{
for (int i = 0; i < ncols; i++)
{
writeVCMorVCS(rsmeta.getColumnName(i+1));
writeVCMorVCS(rsmeta.getColumnLabel(i+1));
writeVCMorVCS(null);
}
}
writer.endDdmAndDss();
}
/**
* Write SQLRSLRD - result set reply data
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLRSLRD(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
int numResults = stmt.getNumResultSets();
writer.createDssObject();
writer.startDdm(CodePoint.SQLRSLRD);
writer.writeShort(numResults); // num of result sets
for (int i = 0; i < numResults; i ++)
{
writer.writeInt(i); // rsLocator
writeVCMorVCS(stmt.getResultSetCursorName(i));
writer.writeInt(1); // num of rows XXX resolve, it doesn't matter for now
}
writer.endDdmAndDss();
}
/**
* Write RSLSETRM
* Instance variables
* SVRCOD - Severity code - Information only - required
* PKGSNLST - list of PKGNAMCSN -required
* SRVDGN - Server Diagnostic Information -optional
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeRSLSETRM(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
int numResults = stmt.getNumResultSets();
writer.createDssReply();
writer.startDdm(CodePoint.RSLSETRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, 0);
writer.startDdm(CodePoint.PKGSNLST);
for (int i = 0; i < numResults; i++)
writePKGNAMCSN(stmt.getResultSetPkgcnstkn(i).getBytes());
writer.endDdm();
writer.endDdmAndDss();
}
/**
* Parse SQLDTA - SQL program variable data
* and handle exception.
* @see #parseSQLDTA_work
*/
private void parseSQLDTA(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
try {
parseSQLDTA_work(stmt);
}
catch (SQLException se)
{
skipRemainder(true);
throw se;
}
}
/**
* Parse SQLDTA - SQL program variable data
* Instance Variables
* FDODSC - FD:OCA data descriptor - required
* FDODTA - FD:OCA data - optional
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void parseSQLDTA_work(DRDAStatement stmt) throws DRDAProtocolException,SQLException
{
String strVal;
PreparedStatement ps = stmt.getPreparedStatement();
int codePoint;
ParameterMetaData pmeta = null;
// Clear params without releasing storage
stmt.clearDrdaParams();
int numVars = 0;
boolean rtnParam = false;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// required
case CodePoint.FDODSC:
while (reader.getDdmLength() > 6) //we get parameter info til last 6 byte
{
int dtaGrpLen = reader.readUnsignedByte();
int numVarsInGrp = (dtaGrpLen - 3) / 3;
if (SanityManager.DEBUG)
trace("num of vars in this group is: "+numVarsInGrp);
reader.readByte(); // tripletType
reader.readByte(); // id
for (int j = 0; j < numVarsInGrp; j++)
{
final byte t = reader.readByte();
if (SanityManager.DEBUG)
trace("drdaType is: "+ "0x" +
Integer.toHexString(t));
int drdaLength = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("drdaLength is: "+drdaLength);
stmt.addDrdaParam(t, drdaLength);
}
}
numVars = stmt.getDrdaParamCount();
if (SanityManager.DEBUG)
trace("numVars = " + numVars);
if (ps == null) // it is a CallableStatement under construction
{
StringBuffer marks = new StringBuffer(); // construct parameter marks
marks.append("(?");
for (int i = 1; i < numVars; i++)
marks.append(", ?");
String prepareString = "call " + stmt.procName + marks.toString() + ")";
if (SanityManager.DEBUG)
trace ("$$ prepareCall is: "+prepareString);
CallableStatement cs = null;
try {
cs = (CallableStatement)
stmt.prepare(prepareString);
stmt.registerAllOutParams();
} catch (SQLException se) {
if (! stmt.outputExpected ||
(!se.getSQLState().equals(SQLState.LANG_NO_METHOD_FOUND)))
throw se;
if (SanityManager.DEBUG)
trace("****** second try with return parameter...");
// Save first SQLException most likely suspect
if (numVars == 1)
prepareString = "? = call " + stmt.procName +"()";
else
prepareString = "? = call " + stmt.procName +"("+marks.substring(3) + ")";
if (SanityManager.DEBUG)
trace ("$$ prepareCall is: "+prepareString);
try {
cs = (CallableStatement) stmt.prepare(prepareString);
} catch (SQLException se2)
{
// The first exception is the most likely suspect
throw se;
}
rtnParam = true;
}
ps = cs;
stmt.ps = ps;
}
pmeta = stmt.getParameterMetaData();
reader.readBytes(6); // descriptor footer
break;
// optional
case CodePoint.FDODTA:
reader.readByte(); // row indicator
for (int i = 0; i < numVars; i++)
{
if ((stmt.getParamDRDAType(i+1) & 0x1) == 0x1) // nullable
{
int nullData = reader.readUnsignedByte();
if ((nullData & 0xFF) == FdocaConstants.NULL_DATA)
{
if (SanityManager.DEBUG)
trace("******param null");
if (pmeta.getParameterMode(i + 1)
!= JDBC30Translation.PARAMETER_MODE_OUT )
ps.setNull(i+1, pmeta.getParameterType(i+1));
if (stmt.isOutputParam(i+1))
stmt.registerOutParam(i+1);
continue;
}
}
// not null, read and set it
readAndSetParams(i, stmt, pmeta);
}
break;
case CodePoint.EXTDTA:
readAndSetAllExtParams(stmt, false);
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
}
private int getByteOrder()
{
DRDAStatement stmt = database.getCurrentStatement();
return ((stmt != null && stmt.typDefNam != null) ? stmt.byteOrder : database.byteOrder);
}
/** A cached {@code Calendar} instance using the GMT time zone. */
private Calendar gmtCalendar;
/**
* Get a {@code Calendar} instance with time zone set to GMT. The instance
* is cached for reuse by this thread. This calendar can be used to
* consistently read and write date and time values using the same
* calendar. Since the local default calendar may not be able to represent
* all times (for instance because the time would fall into a non-existing
* hour of the day when switching to daylight saving time, see DERBY-4582),
* we use the GMT time zone which doesn't observe daylight saving time.
*
* @return a calendar in the GMT time zone
*/
private Calendar getGMTCalendar() {
if (gmtCalendar == null) {
TimeZone gmt = TimeZone.getTimeZone("GMT");
gmtCalendar = Calendar.getInstance(gmt);
}
return gmtCalendar;
}
/**
* Read different types of input parameters and set them in
* PreparedStatement
* @param i index of the parameter
* @param stmt drda statement
* @param pmeta parameter meta data
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void readAndSetParams(int i,
DRDAStatement stmt,
ParameterMetaData pmeta)
throws DRDAProtocolException, SQLException
{
PreparedStatement ps = stmt.getPreparedStatement();
// mask out null indicator
final int drdaType = ((stmt.getParamDRDAType(i+1) | 0x01) & 0xff);
final int paramLenNumBytes = stmt.getParamLen(i+1);
if (ps instanceof CallableStatement)
{
if (stmt.isOutputParam(i+1))
{
CallableStatement cs = (CallableStatement) ps;
cs.registerOutParameter(i+1, stmt.getOutputParamType(i+1));
}
}
switch (drdaType)
{
case DRDAConstants.DRDA_TYPE_NBOOLEAN:
{
boolean paramVal = (reader.readByte() == 1);
if (SanityManager.DEBUG)
trace("boolean parameter value is: " + paramVal);
ps.setBoolean(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NSMALL:
{
short paramVal = (short) reader.readShort(getByteOrder());
if (SanityManager.DEBUG)
trace("short parameter value is: "+paramVal);
ps.setShort(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NINTEGER:
{
int paramVal = reader.readInt(getByteOrder());
if (SanityManager.DEBUG)
trace("integer parameter value is: "+paramVal);
ps.setInt(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NINTEGER8:
{
long paramVal = reader.readLong(getByteOrder());
if (SanityManager.DEBUG)
trace("parameter value is: "+paramVal);
ps.setLong(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NFLOAT4:
{
float paramVal = reader.readFloat(getByteOrder());
if (SanityManager.DEBUG)
trace("parameter value is: "+paramVal);
ps.setFloat(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NFLOAT8:
{
double paramVal = reader.readDouble(getByteOrder());
if (SanityManager.DEBUG)
trace("nfloat8 parameter value is: "+paramVal);
ps.setDouble(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NDECIMAL:
{
int precision = (paramLenNumBytes >> 8) & 0xff;
int scale = paramLenNumBytes & 0xff;
BigDecimal paramVal = reader.readBigDecimal(precision, scale);
if (SanityManager.DEBUG)
trace("ndecimal parameter value is: "+paramVal);
ps.setBigDecimal(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NDATE:
{
String paramVal = reader.readStringData(10).trim(); //parameter may be char value
if (SanityManager.DEBUG)
trace("ndate parameter value is: \""+paramVal+"\"");
try {
Calendar cal = getGMTCalendar();
ps.setDate(i+1, parseDate(paramVal, cal), cal);
} catch (java.lang.IllegalArgumentException e) {
// Just use SQLSTATE as message since, if user wants to
// retrieve it, the message will be looked up by the
// sqlcamessage() proc, which will get the localized
// message based on SQLSTATE, and will ignore the
// the message we use here...
throw new SQLException(SQLState.LANG_DATE_SYNTAX_EXCEPTION,
SQLState.LANG_DATE_SYNTAX_EXCEPTION.substring(0,5));
}
break;
}
case DRDAConstants.DRDA_TYPE_NTIME:
{
String paramVal = reader.readStringData(8).trim(); //parameter may be char value
if (SanityManager.DEBUG)
trace("ntime parameter value is: "+paramVal);
try {
Calendar cal = getGMTCalendar();
ps.setTime(i+1, parseTime(paramVal, cal), cal);
} catch (java.lang.IllegalArgumentException e) {
throw new SQLException(SQLState.LANG_DATE_SYNTAX_EXCEPTION,
SQLState.LANG_DATE_SYNTAX_EXCEPTION.substring(0,5));
}
break;
}
case DRDAConstants.DRDA_TYPE_NTIMESTAMP:
{
// JCC represents ts in a slightly different format than Java standard, so
// we do the conversion to Java standard here.
int timestampLength = appRequester.getTimestampLength();
String paramVal = reader.readStringData( timestampLength ).trim(); //parameter may be char value
if (SanityManager.DEBUG)
trace("ntimestamp parameter value is: "+paramVal);
try {
Calendar cal = getGMTCalendar();
ps.setTimestamp(i+1, parseTimestamp(paramVal, cal), cal);
} catch (java.lang.IllegalArgumentException e1) {
// thrown by parseTimestamp(...) for bad syntax...
throw new SQLException(SQLState.LANG_DATE_SYNTAX_EXCEPTION,
SQLState.LANG_DATE_SYNTAX_EXCEPTION.substring(0,5));
}
break;
}
case DRDAConstants.DRDA_TYPE_NCHAR:
case DRDAConstants.DRDA_TYPE_NVARCHAR:
case DRDAConstants.DRDA_TYPE_NLONG:
case DRDAConstants.DRDA_TYPE_NVARMIX:
case DRDAConstants.DRDA_TYPE_NLONGMIX:
{
String paramVal = reader.readLDStringData(stmt.ccsidMBCEncoding);
if (SanityManager.DEBUG)
trace("char/varchar parameter value is: "+paramVal);
ps.setString(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NFIXBYTE:
{
byte[] paramVal = reader.readBytes();
if (SanityManager.DEBUG)
trace("fix bytes parameter value is: "+ convertToHexString(paramVal));
ps.setBytes(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NVARBYTE:
case DRDAConstants.DRDA_TYPE_NLONGVARBYTE:
{
int length = reader.readNetworkShort(); //protocol control data always follows big endian
if (SanityManager.DEBUG)
trace("===== binary param length is: " + length);
byte[] paramVal = reader.readBytes(length);
ps.setBytes(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NUDT:
{
Object paramVal = readUDT();
ps.setObject(i+1, paramVal);
break;
}
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
case DRDAConstants.DRDA_TYPE_NLOBCSBCS:
case DRDAConstants.DRDA_TYPE_NLOBCDBCS:
{
long length = readLobLength(paramLenNumBytes);
if (length != 0) //can be -1 for CLI if "data at exec" mode, see clifp/exec test
{
stmt.addExtPosition(i);
}
else /* empty */
{
if (drdaType == DRDAConstants.DRDA_TYPE_NLOBBYTES)
ps.setBytes(i+1, new byte[0]);
else
ps.setString(i+1, "");
}
break;
}
case DRDAConstants.DRDA_TYPE_NLOBLOC:
{
//read the locator value
int paramVal = reader.readInt(getByteOrder());
if (SanityManager.DEBUG)
trace("locator value is: "+paramVal);
//Map the locator value to the Blob object in the
//Hash map.
java.sql.Blob blobFromLocator = (java.sql.Blob)
database.getConnection().getLOBMapping(paramVal);
//set the PreparedStatement parameter to the mapped
//Blob object.
ps.setBlob(i+1, blobFromLocator);
break;
}
case DRDAConstants.DRDA_TYPE_NCLOBLOC:
{
//read the locator value.
int paramVal = reader.readInt(getByteOrder());
if (SanityManager.DEBUG)
trace("locator value is: "+paramVal);
//Map the locator value to the Clob object in the
//Hash Map.
java.sql.Clob clobFromLocator = (java.sql.Clob)
database.getConnection().getLOBMapping(paramVal);
//set the PreparedStatement parameter to the mapped
//Clob object.
ps.setClob(i+1, clobFromLocator);
break;
}
case DRDAConstants.DRDA_TYPE_NROWID:
{
byte[] b = reader.readBytes();
SQLRowId paramVal = new SQLRowId(b);
ps.setRowId(i + 1, paramVal);
break;
}
default:
{
String paramVal = reader.readLDStringData(stmt.ccsidMBCEncoding);
if (SanityManager.DEBUG)
trace("default type parameter value is: "+paramVal);
ps.setObject(i+1, paramVal);
}
}
}
/** Read a UDT from the stream */
private Object readUDT() throws DRDAProtocolException
{
int length = reader.readNetworkShort(); //protocol control data always follows big endian
if (SanityManager.DEBUG) { trace("===== udt param length is: " + length); }
byte[] bytes = reader.readBytes(length);
try {
ByteArrayInputStream bais = new ByteArrayInputStream( bytes );
ObjectInputStream ois = new ObjectInputStream( bais );
return ois.readObject();
}
catch (Exception e)
{
markCommunicationsFailure
( e,"DRDAConnThread.readUDT()", "", e.getMessage(), "*" );
return null;
}
}
private long readLobLength(int extLenIndicator)
throws DRDAProtocolException
{
switch (extLenIndicator)
{
case 0x8002:
return (long) reader.readNetworkShort();
case 0x8004:
return (long) reader.readNetworkInt();
case 0x8006:
return (long) reader.readNetworkSixByteLong();
case 0x8008:
return (long) reader.readNetworkLong();
default:
throwSyntaxrm(CodePoint.SYNERRCD_INCORRECT_EXTENDED_LEN, extLenIndicator);
return 0L;
}
}
/**
* Parse a date string as it is received from the client.
*
* @param dateString the date string to parse
* @param cal the calendar in which the date is parsed
* @return a Date object representing the date in the specified calendar
* @see com.splicemachine.db.client.am.DateTime#dateToDateBytes
* @throws IllegalArgumentException if the date is not correctly formatted
*/
private java.sql.Date parseDate(String dateString, Calendar cal) {
// Get each component out of YYYY-MM-DD
String[] components = dateString.split("-");
if (components.length != 3) {
throw new IllegalArgumentException();
}
cal.clear();
// Set date components
cal.set(Calendar.YEAR, Integer.parseInt(components[0]));
cal.set(Calendar.MONTH, Integer.parseInt(components[1]) - 1);
cal.set(Calendar.DAY_OF_MONTH, Integer.parseInt(components[2]));
// Normalize time components as specified by java.sql.Date
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return new java.sql.Date(cal.getTimeInMillis());
}
/**
* Parse a time string as it is received from the client.
*
* @param timeString the time string to parse
* @param cal the calendar in which the time is parsed
* @return a Date object representing the time in the specified calendar
* @see com.splicemachine.db.client.am.DateTime#timeToTimeBytes
* @throws IllegalArgumentException if the time is not correctly formatted
*/
private Time parseTime(String timeString, Calendar cal) {
// Get each component out of HH:MM:SS
String[] components = timeString.split(":");
if (components.length != 3) {
throw new IllegalArgumentException();
}
cal.clear();
// Normalize date components as specified by java.sql.Time
cal.set(Calendar.YEAR, 1970);
cal.set(Calendar.MONTH, Calendar.JANUARY);
cal.set(Calendar.DAY_OF_MONTH, 1);
// Set time components
cal.set(Calendar.HOUR_OF_DAY, Integer.parseInt(components[0]));
cal.set(Calendar.MINUTE, Integer.parseInt(components[1]));
cal.set(Calendar.SECOND, Integer.parseInt(components[2]));
// No millisecond resolution for Time
cal.set(Calendar.MILLISECOND, 0);
return new Time(cal.getTimeInMillis());
}
/**
* Parse a timestamp string as it is received from the client.
*
* @param timeString the time string to parse
* @param cal the calendar in which the timestamp is parsed
* @return a Date object representing the timestamp in the specified
* calendar
* @see com.splicemachine.db.client.am.DateTime#timestampToTimestampBytes
* @throws IllegalArgumentException if the timestamp is not correctly
* formatted
*/
private Timestamp parseTimestamp(String timeString, Calendar cal) {
// Get each component out of YYYY-MM-DD-HH.MM.SS.fffffffff
String[] components = timeString.split("[-.]");
if (components.length != 7) {
throw new IllegalArgumentException();
}
cal.clear();
cal.set(Calendar.YEAR, Integer.parseInt(components[0]));
cal.set(Calendar.MONTH, Integer.parseInt(components[1]) - 1);
cal.set(Calendar.DAY_OF_MONTH, Integer.parseInt(components[2]));
cal.set(Calendar.HOUR_OF_DAY, Integer.parseInt(components[3]));
cal.set(Calendar.MINUTE, Integer.parseInt(components[4]));
cal.set(Calendar.SECOND, Integer.parseInt(components[5]));
int nanos = 0;
final int radix = 10;
String nanoString = components[6];
// Get up to nine digits from the nano second component
for (int i = 0; i < 9; i++) {
// Scale up the intermediate result
nanos *= radix;
// Add the next digit, if there is one. Continue the loop even if
// there are no more digits, since we still need to scale up the
// intermediate result as if the fraction part were padded with
// zeros.
if (i < nanoString.length()) {
int digit = Character.digit(nanoString.charAt(i), radix);
if (digit == -1) {
// not a digit
throw new IllegalArgumentException();
}
nanos += digit;
}
}
Timestamp ts = new Timestamp(cal.getTimeInMillis());
ts.setNanos(nanos);
return ts;
}
private void readAndSetAllExtParams(final DRDAStatement stmt, final boolean streamLOB)
throws SQLException, DRDAProtocolException
{
final int numExt = stmt.getExtPositionCount();
for (int i = 0; i < numExt; i++)
{
int paramPos = stmt.getExtPosition(i);
// Only the last EXTDTA is streamed. This is because all of
// the parameters have to be set before execution and are
// consecutive in the network server stream, so only the last
// one can be streamed.
final boolean doStreamLOB = (streamLOB && i == numExt -1);
readAndSetExtParam(paramPos,
stmt,
stmt.getParamDRDAType(paramPos+1),
stmt.getParamLen(paramPos+1),
doStreamLOB);
// Each extdta in it's own dss
if (i < numExt -1)
{
correlationID = reader.readDssHeader();
int codePoint = reader.readLengthAndCodePoint( true );
}
}
}
/**
* Read different types of input parameters and set them in PreparedStatement
* @param i zero-based index of the parameter
* @param stmt associated ps
* @param drdaType drda type of the parameter
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void readAndSetExtParam( int i, DRDAStatement stmt,
int drdaType, int extLen, boolean streamLOB)
throws DRDAProtocolException, SQLException
{
// Note the switch from zero-based to one-based index below.
drdaType = (drdaType & 0x000000ff); // need unsigned value
boolean checkNullability = false;
if (sqlamLevel >= MGRLVL_7 &&
FdocaConstants.isNullable(drdaType))
checkNullability = true;
final EXTDTAReaderInputStream stream =
reader.getEXTDTAReaderInputStream(checkNullability);
// Determine encoding first, mostly for debug/tracing purposes
String encoding = "na";
switch (drdaType) {
case DRDAConstants.DRDA_TYPE_LOBCSBCS:
case DRDAConstants.DRDA_TYPE_NLOBCSBCS:
encoding = stmt.ccsidSBCEncoding;
break;
case DRDAConstants.DRDA_TYPE_LOBCDBCS:
case DRDAConstants.DRDA_TYPE_NLOBCDBCS:
encoding = stmt.ccsidDBCEncoding;
break;
case DRDAConstants.DRDA_TYPE_LOBCMIXED:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
encoding = stmt.ccsidMBCEncoding;
break;
}
traceEXTDTARead(drdaType, i+1, stream, streamLOB, encoding);
try {
switch (drdaType)
{
case DRDAConstants.DRDA_TYPE_LOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
setAsBinaryStream(stmt, i+1, stream, streamLOB);
break;
case DRDAConstants.DRDA_TYPE_LOBCSBCS:
case DRDAConstants.DRDA_TYPE_NLOBCSBCS:
case DRDAConstants.DRDA_TYPE_LOBCDBCS:
case DRDAConstants.DRDA_TYPE_NLOBCDBCS:
case DRDAConstants.DRDA_TYPE_LOBCMIXED:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
setAsCharacterStream(stmt, i+1, stream, streamLOB,
encoding);
break;
default:
invalidValue(drdaType);
}
}
catch (java.io.UnsupportedEncodingException e) {
throw new SQLException (e.getMessage());
} catch( IOException e ){
throw new SQLException ( e.getMessage() );
}
}
/**
* Parse EXCSQLIMM - Execute Immediate Statement
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* RDBCMTOK - RDB Commit Allowed - optional
* MONITOR - Monitor Events - optional
*
* Command Objects
* TYPDEFNAM - Data Type Definition Name - optional
* TYPDEFOVR - TYPDEF Overrides -optional
* SQLSTT - SQL Statement -required
*
* @return update count
* @throws DRDAProtocolException
* @throws SQLException
*/
private int parseEXCSQLIMM() throws DRDAProtocolException,SQLException
{
int codePoint;
reader.markCollection();
Pkgnamcsn pkgnamcsn = null;
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.EXCSQLIMM);
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
case CodePoint.RDBCMTOK:
parseRDBCMTOK();
break;
//optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
DRDAStatement drdaStmt = database.getDefaultStatement(pkgnamcsn);
// initialize statement for reuse
drdaStmt.initialize();
String sqlStmt = parseEXECSQLIMMobjects();
Statement statement = drdaStmt.getStatement();
statement.clearWarnings();
if (pendingStatementTimeout >= 0) {
statement.setQueryTimeout(pendingStatementTimeout);
pendingStatementTimeout = -1;
}
int updCount = statement.executeUpdate(sqlStmt);
return updCount;
}
/**
* Parse EXCSQLSET - Execute Set SQL Environment
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCT - RDB Package Name, Consistency Token - optional
* MONITOR - Monitor Events - optional
*
* Command Objects
* TYPDEFNAM - Data Type Definition Name - required
* TYPDEFOVR - TYPDEF Overrides - required
* SQLSTT - SQL Statement - required (at least one; may be more)
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private boolean parseEXCSQLSET() throws DRDAProtocolException,SQLException
{
int codePoint;
reader.markCollection();
codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.EXCSQLSET);
break;
// optional
case CodePoint.PKGNAMCT:
// we are going to ignore this for EXCSQLSET
// since we are just going to reuse an existing statement
String pkgnamct = parsePKGNAMCT();
break;
// optional
case CodePoint.MONITOR:
parseMONITOR();
break;
// required
case CodePoint.PKGNAMCSN:
// we are going to ignore this for EXCSQLSET.
// since we are just going to reuse an existing statement.
// NOTE: This codepoint is not in the DDM spec for 'EXCSQLSET',
// but since it DOES get sent by jcc1.2, we have to have
// a case for it...
Pkgnamcsn pkgnamcsn = parsePKGNAMCSN();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
parseEXCSQLSETobjects();
return true;
}
/**
* Parse EXCSQLIMM objects
* Objects
* TYPDEFNAM - Data type definition name - optional
* TYPDEFOVR - Type defintion overrides
* SQLSTT - SQL Statement required
*
* If TYPDEFNAM and TYPDEFOVR are supplied, they apply to the objects
* sent with the statement. Once the statement is over, the default values
* sent in the ACCRDB are once again in effect. If no values are supplied,
* the values sent in the ACCRDB are used.
* Objects may follow in one DSS or in several DSS chained together.
*
* @return SQL Statement
* @throws DRDAProtocolException
* @throws SQLException
*/
private String parseEXECSQLIMMobjects() throws DRDAProtocolException, SQLException
{
String sqlStmt = null;
int codePoint;
DRDAStatement stmt = database.getDefaultStatement();
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, stmt, parseTYPDEFNAM());
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(stmt);
break;
// required
case CodePoint.SQLSTT:
sqlStmt = parseEncodedString();
if (SanityManager.DEBUG)
trace("sqlStmt = " + sqlStmt);
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
// SQLSTT is required
if (sqlStmt == null)
missingCodePoint(CodePoint.SQLSTT);
return sqlStmt;
}
/**
* Parse EXCSQLSET objects
* Objects
* TYPDEFNAM - Data type definition name - optional
* TYPDEFOVR - Type defintion overrides - optional
* SQLSTT - SQL Statement - required (a list of at least one)
*
* Objects may follow in one DSS or in several DSS chained together.
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void parseEXCSQLSETobjects()
throws DRDAProtocolException, SQLException
{
boolean gotSqlStt = false;
boolean hadUnrecognizedStmt = false;
String sqlStmt = null;
int codePoint;
DRDAStatement drdaStmt = database.getDefaultStatement();
drdaStmt.initialize();
do
{
correlationID = reader.readDssHeader();
while (reader.moreDssData())
{
codePoint = reader.readLengthAndCodePoint( false );
switch(codePoint)
{
// optional
case CodePoint.TYPDEFNAM:
setStmtOrDbByteOrder(false, drdaStmt, parseTYPDEFNAM());
break;
// optional
case CodePoint.TYPDEFOVR:
parseTYPDEFOVR(drdaStmt);
break;
// required
case CodePoint.SQLSTT:
sqlStmt = parseEncodedString();
if (sqlStmt != null)
// then we have at least one SQL Statement.
gotSqlStt = true;
if (sqlStmt.startsWith(TIMEOUT_STATEMENT)) {
String timeoutString = sqlStmt.substring(TIMEOUT_STATEMENT.length());
pendingStatementTimeout = Integer.parseInt(timeoutString);
break;
}
if (canIgnoreStmt(sqlStmt)) {
// We _know_ Derby doesn't recognize this
// statement; don't bother trying to execute it.
// NOTE: at time of writing, this only applies
// to "SET CLIENT" commands, and it was decided
// that throwing a Warning for these commands
// would confuse people, so even though the DDM
// spec says to do so, we choose not to (but
// only for SET CLIENT cases). If this changes
// at some point in the future, simply remove
// the follwing line; we will then throw a
// warning.
// hadUnrecognizedStmt = true;
break;
}
if (SanityManager.DEBUG)
trace("sqlStmt = " + sqlStmt);
// initialize statement for reuse
drdaStmt.initialize();
drdaStmt.getStatement().clearWarnings();
try {
drdaStmt.getStatement().executeUpdate(sqlStmt);
} catch (SQLException e) {
// if this is a syntax error, then we take it
// to mean that the given SET statement is not
// recognized; take note (so we can throw a
// warning later), but don't interfere otherwise.
if (e.getSQLState().equals(SYNTAX_ERR))
hadUnrecognizedStmt = true;
else
// something else; assume it's serious.
throw e;
}
break;
default:
invalidCodePoint(codePoint);
}
}
} while (reader.isChainedWithSameID());
// SQLSTT is required.
if (!gotSqlStt)
missingCodePoint(CodePoint.SQLSTT);
// Now that we've processed all SET statements (assuming no
// severe exceptions), check for warnings and, if we had any,
// note this in the SQLCARD reply object (but DON'T cause the
// EXCSQLSET statement to fail).
if (hadUnrecognizedStmt) {
SQLWarning warn = new SQLWarning("One or more SET statements " +
"not recognized.", "01000");
throw warn;
} // end if.
return;
}
private boolean canIgnoreStmt(String stmt)
{
if (stmt.indexOf("SET CLIENT") != -1)
return true;
return false;
}
/**
* Write RDBUPDRM
* Instance variables
* SVRCOD - Severity code - Information only - required
* RDBNAM - Relational database name -required
* SRVDGN - Server Diagnostic Information -optional
*
* @exception DRDAProtocolException
*/
private void writeRDBUPDRM() throws DRDAProtocolException
{
database.RDBUPDRM_sent = true;
writer.createDssReply();
writer.startDdm(CodePoint.RDBUPDRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_INFO);
writeRDBNAM(database.getDatabaseName());
writer.endDdmAndDss();
}
private String parsePKGNAMCT() throws DRDAProtocolException
{
reader.skipBytes();
return null;
}
/**
* Parse PKGNAMCSN - RDB Package Name, Consistency Token, and Section Number
* Instance Variables
* NAMESYMDR - database name - not validated
* RDBCOLID - RDB Collection Identifier
* PKGID - RDB Package Identifier
* PKGCNSTKN - RDB Package Consistency Token
* PKGSN - RDB Package Section Number
*
* @return <code>Pkgnamcsn</code> value
* @throws DRDAProtocolException
*/
private Pkgnamcsn parsePKGNAMCSN() throws DRDAProtocolException
{
if (reader.getDdmLength() == CodePoint.PKGNAMCSN_LEN)
{
// This is a scalar object with the following fields
reader.readString(rdbnam, CodePoint.RDBNAM_LEN, true);
if (SanityManager.DEBUG)
trace("rdbnam = " + rdbnam);
// A check that the rdbnam field corresponds to a database
// specified in a ACCRDB term.
// The check is not performed if the client is DNC_CLIENT
// with version before 10.3.0 because these clients
// are broken and send incorrect database name
// if multiple connections to different databases
// are created
// This check was added because of DERBY-1434
// check the client version first
if (appRequester.greaterThanOrEqualTo(10,3,0) ) {
// check the database name
if (!rdbnam.toString().equals(database.getDatabaseName()))
rdbnamMismatch(CodePoint.PKGNAMCSN);
}
reader.readString(rdbcolid, CodePoint.RDBCOLID_LEN, true);
if (SanityManager.DEBUG)
trace("rdbcolid = " + rdbcolid);
reader.readString(pkgid, CodePoint.PKGID_LEN, true);
if (SanityManager.DEBUG)
trace("pkgid = " + pkgid);
// we need to use the same UCS2 encoding, as this can be
// bounced back to jcc (or keep the byte array)
reader.readString(pkgcnstkn, CodePoint.PKGCNSTKN_LEN, false);
if (SanityManager.DEBUG)
trace("pkgcnstkn = " + pkgcnstkn);
pkgsn = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("pkgsn = " + pkgsn);
}
else // extended format
{
int length = reader.readNetworkShort();
if (length < CodePoint.RDBNAM_LEN || length > CodePoint.MAX_NAME)
badObjectLength(CodePoint.RDBNAM);
reader.readString(rdbnam, length, true);
if (SanityManager.DEBUG)
trace("rdbnam = " + rdbnam);
// A check that the rdbnam field corresponds to a database
// specified in a ACCRDB term.
// The check is not performed if the client is DNC_CLIENT
// with version before 10.3.0 because these clients
// are broken and send incorrect database name
// if multiple connections to different databases
// are created
// This check was added because of DERBY-1434
// check the client version first
if ( appRequester.getClientType() != AppRequester.DNC_CLIENT
|| appRequester.greaterThanOrEqualTo(10,3,0) ) {
// check the database name
if (!rdbnam.toString().equals(database.getDatabaseName()))
rdbnamMismatch(CodePoint.PKGNAMCSN);
}
//RDBCOLID can be variable length in this format
length = reader.readNetworkShort();
reader.readString(rdbcolid, length, true);
if (SanityManager.DEBUG)
trace("rdbcolid = " + rdbcolid);
length = reader.readNetworkShort();
if (length != CodePoint.PKGID_LEN)
badObjectLength(CodePoint.PKGID);
reader.readString(pkgid, CodePoint.PKGID_LEN, true);
if (SanityManager.DEBUG)
trace("pkgid = " + pkgid);
reader.readString(pkgcnstkn, CodePoint.PKGCNSTKN_LEN, false);
if (SanityManager.DEBUG)
trace("pkgcnstkn = " + pkgcnstkn);
pkgsn = reader.readNetworkShort();
if (SanityManager.DEBUG)
trace("pkgsn = " + pkgsn);
}
// In most cases, the pkgnamcsn object is equal to the
// previously returned object. To avoid allocation of a new
// object in these cases, we first check to see if the old
// object can be reused.
if ((prevPkgnamcsn == null) ||
rdbnam.wasModified() ||
rdbcolid.wasModified() ||
pkgid.wasModified() ||
pkgcnstkn.wasModified() ||
(prevPkgnamcsn.getPkgsn() != pkgsn))
{
// The byte array returned by pkgcnstkn.getBytes() might
// be modified by DDMReader.readString() later, so we have
// to create a copy of the array.
byte[] token = new byte[pkgcnstkn.length()];
System.arraycopy(pkgcnstkn.getBytes(), 0, token, 0, token.length);
prevPkgnamcsn =
new Pkgnamcsn(rdbnam.toString(), rdbcolid.toString(),
pkgid.toString(), pkgsn,
new ConsistencyToken(token));
}
return prevPkgnamcsn;
}
/**
* Parse SQLSTT Dss
* @exception DRDAProtocolException
*/
private String parseSQLSTTDss() throws DRDAProtocolException
{
correlationID = reader.readDssHeader();
int codePoint = reader.readLengthAndCodePoint( false );
String strVal = parseEncodedString();
if (SanityManager.DEBUG)
trace("SQL Statement = " + strVal);
return strVal;
}
/**
* Parse an encoded data string from the Application Requester
*
* @return string value
* @exception DRDAProtocolException
*/
private String parseEncodedString() throws DRDAProtocolException
{
if (sqlamLevel < 7)
return parseVCMorVCS();
else
return parseNOCMorNOCS();
}
/**
* Parse variable character mixed byte or variable character single byte
* Format
* I2 - VCM Length
* N bytes - VCM value
* I2 - VCS Length
* N bytes - VCS value
* Only 1 of VCM length or VCS length can be non-zero
*
* @return string value
*/
private String parseVCMorVCS() throws DRDAProtocolException
{
String strVal = null;
int vcm_length = reader.readNetworkShort();
if (vcm_length > 0)
strVal = parseCcsidMBC(vcm_length);
int vcs_length = reader.readNetworkShort();
if (vcs_length > 0)
{
if (strVal != null)
agentError ("Both VCM and VCS have lengths > 0");
strVal = parseCcsidSBC(vcs_length);
}
return strVal;
}
/**
* Parse nullable character mixed byte or nullable character single byte
* Format
* 1 byte - null indicator
* I4 - mixed character length
* N bytes - mixed character string
* 1 byte - null indicator
* I4 - single character length
* N bytes - single character length string
*
* @return string value
* @exception DRDAProtocolException
*/
private String parseNOCMorNOCS() throws DRDAProtocolException
{
byte nocm_nullByte = reader.readByte();
String strVal = null;
int length;
if (nocm_nullByte != NULL_VALUE)
{
length = reader.readNetworkInt();
strVal = parseCcsidMBC(length);
}
byte nocs_nullByte = reader.readByte();
if (nocs_nullByte != NULL_VALUE)
{
if (strVal != null)
agentError("Both CM and CS are non null");
length = reader.readNetworkInt();
strVal = parseCcsidSBC(length);
}
return strVal;
}
/**
* Parse mixed character string
*
* @return string value
* @exception DRDAProtocolException
*/
private String parseCcsidMBC(int length) throws DRDAProtocolException
{
String strVal = null;
DRDAStatement currentStatement;
currentStatement = database.getCurrentStatement();
if (currentStatement == null)
{
currentStatement = database.getDefaultStatement();
currentStatement.initialize();
}
String ccsidMBCEncoding = currentStatement.ccsidMBCEncoding;
if (length == 0)
return null;
byte [] byteStr = reader.readBytes(length);
if (ccsidMBCEncoding != null)
{
try {
strVal = new String(byteStr, 0, length, ccsidMBCEncoding);
} catch (UnsupportedEncodingException e) {
agentError("Unsupported encoding " + ccsidMBCEncoding +
"in parseCcsidMBC");
}
}
else
agentError("Attempt to decode mixed byte string without CCSID being set");
return strVal;
}
/**
* Parse single byte character string
*
* @return string value
* @exception DRDAProtocolException
*/
private String parseCcsidSBC(int length) throws DRDAProtocolException
{
String strVal = null;
DRDAStatement currentStatement;
currentStatement = database.getCurrentStatement();
if (currentStatement == null)
{
currentStatement = database.getDefaultStatement();
currentStatement.initialize();
}
String ccsidSBCEncoding = currentStatement.ccsidSBCEncoding;
System.out.println("ccsidSBCEncoding - " + ccsidSBCEncoding);
if (length == 0)
return null;
byte [] byteStr = reader.readBytes(length);
if (ccsidSBCEncoding != null)
{
try {
strVal = new String(byteStr, 0, length, ccsidSBCEncoding);
} catch (UnsupportedEncodingException e) {
agentError("Unsupported encoding " + ccsidSBCEncoding +
"in parseCcsidSBC");
}
}
else
agentError("Attempt to decode single byte string without CCSID being set");
return strVal;
}
/**
* Parse CLSQRY
* Instance Variables
* RDBNAM - relational database name - optional
* PKGNAMCSN - RDB Package Name, Consistency Token and Section Number - required
* QRYINSID - Query Instance Identifier - required - level 7
* MONITOR - Monitor events - optional.
*
* @return DRDAstatement being closed
* @throws DRDAProtocolException
* @throws SQLException
*/
private DRDAStatement parseCLSQRY() throws DRDAProtocolException, SQLException
{
Pkgnamcsn pkgnamcsn = null;
reader.markCollection();
long qryinsid = 0;
boolean gotQryinsid = false;
int codePoint = reader.getCodePoint();
while (codePoint != -1)
{
switch (codePoint)
{
// optional
case CodePoint.RDBNAM:
setDatabase(CodePoint.CLSQRY);
break;
// required
case CodePoint.PKGNAMCSN:
pkgnamcsn = parsePKGNAMCSN();
break;
case CodePoint.QRYINSID:
qryinsid = reader.readNetworkLong();
gotQryinsid = true;
break;
// optional
case CodePoint.MONITOR:
parseMONITOR();
break;
default:
invalidCodePoint(codePoint);
}
codePoint = reader.getCodePoint();
}
// check for required variables
if (pkgnamcsn == null)
missingCodePoint(CodePoint.PKGNAMCSN);
if (sqlamLevel >= MGRLVL_7 && !gotQryinsid)
missingCodePoint(CodePoint.QRYINSID);
DRDAStatement stmt = database.getDRDAStatement(pkgnamcsn);
if (stmt == null)
{
//XXX should really throw a SQL Exception here
invalidValue(CodePoint.PKGNAMCSN);
}
if (stmt.wasExplicitlyClosed())
{
// JCC still sends a CLSQRY even though we have
// implicitly closed the resultSet.
// Then complains if we send the writeQRYNOPRM
// So for now don't send it
// Also metadata calls seem to get bound to the same
// PGKNAMCSN, so even for explicit closes we have
// to ignore.
//writeQRYNOPRM(CodePoint.SVRCOD_ERROR);
pkgnamcsn = null;
}
stmt.CLSQRY();
return stmt;
}
/**
* Parse MONITOR
* DRDA spec says this is optional. Since we
* don't currently support it, we just ignore.
*/
private void parseMONITOR()
throws DRDAProtocolException
{
// Just ignore it.
reader.skipBytes();
return;
}
private void writeSQLCARDs(SQLException e, int updateCount)
throws DRDAProtocolException
{
writeSQLCARDs(e, updateCount, false);
}
private void writeSQLCARDs(SQLException e, int updateCount, boolean sendSQLERRRM)
throws DRDAProtocolException
{
int severity = CodePoint.SVRCOD_INFO;
if (e == null)
{
writeSQLCARD(e,severity, updateCount, 0);
return;
}
// instead of writing a chain of sql error or warning, we send the first one, this is
// jcc/db2 limitation, see beetle 4629
// If it is a real SQL Error write a SQLERRRM first
severity = getExceptionSeverity(e);
if (severity > CodePoint.SVRCOD_ERROR)
{
// For a session ending error > CodePoint.SRVCOD_ERROR you cannot
// send a SQLERRRM. A CMDCHKRM is required. In XA if there is a
// lock timeout it ends the whole session. I am not sure this
// is the correct behaviour but if it occurs we have to send
// a CMDCHKRM instead of SQLERRM
writeCMDCHKRM(severity);
}
else if (sendSQLERRRM)
{
writeSQLERRRM(severity);
}
writeSQLCARD(e,severity, updateCount, 0);
}
private int getSqlCode(int severity)
{
if (severity == CodePoint.SVRCOD_WARNING) // warning
return 100; //CLI likes it
else if (severity == CodePoint.SVRCOD_INFO)
return 0;
else
return -1;
}
private void writeSQLCARD(SQLException e,int severity,
int updateCount, long rowCount ) throws DRDAProtocolException
{
writer.createDssObject();
writer.startDdm(CodePoint.SQLCARD);
writeSQLCAGRP(e, updateCount, rowCount);
writer.endDdmAndDss();
// If we have a shutdown exception, restart the server.
if (e != null) {
String sqlState = e.getSQLState();
if (sqlState.regionMatches(0,
SQLState.CLOUDSCAPE_SYSTEM_SHUTDOWN, 0, 5)) {
// then we're here because of a shutdown exception;
// "clean up" by restarting the server.
try {
server.startNetworkServer();
} catch (Exception restart)
// any error messages should have already been printed,
// so we ignore this exception here.
{}
}
}
}
/**
* Write a null SQLCARD as an object
*
* @exception DRDAProtocolException
*/
private void writeNullSQLCARDobject()
throws DRDAProtocolException
{
writer.createDssObject();
writer.startDdm(CodePoint.SQLCARD);
writeSQLCAGRP(nullSQLState, 0, 0, 0);
writer.endDdmAndDss();
}
/**
* Write SQLERRRM
*
* Instance Variables
* SVRCOD - Severity Code - required
*
* @param severity severity of error
*
* @exception DRDAProtocolException
*/
private void writeSQLERRRM(int severity) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.SQLERRRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, severity);
writer.endDdmAndDss ();
}
/**
* Write CMDCHKRM
*
* Instance Variables
* SVRCOD - Severity Code - required
*
* @param severity severity of error
*
* @exception DRDAProtocolException
*/
private void writeCMDCHKRM(int severity) throws DRDAProtocolException
{
writer.createDssReply();
writer.startDdm(CodePoint.CMDCHKRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, severity);
writer.endDdmAndDss ();
}
/**
* Translate from Derby exception severity to SVRCOD
*
* @param e SQLException
*/
private int getExceptionSeverity (SQLException e)
{
int severity= CodePoint.SVRCOD_INFO;
if (e == null)
return severity;
int ec = e.getErrorCode();
switch (ec)
{
case ExceptionSeverity.STATEMENT_SEVERITY:
case ExceptionSeverity.TRANSACTION_SEVERITY:
severity = CodePoint.SVRCOD_ERROR;
break;
case ExceptionSeverity.WARNING_SEVERITY:
severity = CodePoint.SVRCOD_WARNING;
break;
case ExceptionSeverity.SESSION_SEVERITY:
case ExceptionSeverity.DATABASE_SEVERITY:
case ExceptionSeverity.SYSTEM_SEVERITY:
severity = CodePoint.SVRCOD_SESDMG;
break;
default:
String sqlState = e.getSQLState();
if (sqlState != null && sqlState.startsWith("01")) // warning
severity = CodePoint.SVRCOD_WARNING;
else
severity = CodePoint.SVRCOD_ERROR;
}
return severity;
}
/**
* Write SQLCAGRP
*
* SQLCAGRP : FDOCA EARLY GROUP
* SQL Communcations Area Group Description
*
* FORMAT FOR SQLAM <= 6
* SQLCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLSTATE; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLERRPROC; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLCAXGRP; DRDA TYPE N-GDA; ENVLID 0x52; Length Override 0
*
* FORMAT FOR SQLAM >= 7
* SQLCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLSTATE; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLERRPROC; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLCAXGRP; DRDA TYPE N-GDA; ENVLID 0x52; Length Override 0
* SQLDIAGGRP; DRDA TYPE N-GDA; ENVLID 0x56; Length Override 0
*
* @param e SQLException encountered
*
* @exception DRDAProtocolException
*/
private void writeSQLCAGRP(SQLException e, int updateCount, long rowCount)
throws DRDAProtocolException
{
int sqlcode = 0;
if (e == null) {
// Forwarding to the optimized version when there is no
// exception object
writeSQLCAGRP(nullSQLState, sqlcode, updateCount, rowCount);
return;
}
// SQLWarnings should have warning severity, except if it's a
// DataTruncation warning for write operations (with SQLState 22001),
// which is supposed to be used as an exception even though it's a
// sub-class of SQLWarning.
if (e instanceof SQLWarning &&
!SQLState.LANG_STRING_TRUNCATION.equals(e.getSQLState())) {
sqlcode = ExceptionSeverity.WARNING_SEVERITY;
} else {
// Get the SQLCODE for exceptions. Note that this call will always
// return -1, so the real error code will be lost.
sqlcode = getSqlCode(getExceptionSeverity(e));
}
if (rowCount < 0 && updateCount < 0)
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
if (SanityManager.DEBUG && server.debugOutput && sqlcode < 0) {
trace("handle SQLException here");
trace("reason is: "+e.getMessage());
trace("SQLState is: "+e.getSQLState());
trace("vendorCode is: "+e.getErrorCode());
trace("nextException is: "+e.getNextException());
server.consoleExceptionPrint(e);
trace("wrapping SQLException into SQLCARD...");
}
//null indicator
writer.writeByte(0);
// SQLCODE
writer.writeInt(sqlcode);
// SQLSTATE
writer.writeString(e.getSQLState());
// SQLERRPROC
// Write the byte[] constant rather than the string, for efficiency
writer.writeBytes(server.prdIdBytes_);
// SQLCAXGRP
writeSQLCAXGRP(updateCount, rowCount, buildSqlerrmc(e), e.getNextException());
}
/**
* Same as writeSQLCAGRP, but optimized for the case
* when there is no real exception, i.e. the exception is null, or "End
* of data"
*
* SQLCAGRP : FDOCA EARLY GROUP
* SQL Communcations Area Group Description
*
* FORMAT FOR SQLAM <= 6
* SQLCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLSTATE; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLERRPROC; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLCAXGRP; DRDA TYPE N-GDA; ENVLID 0x52; Length Override 0
*
* FORMAT FOR SQLAM >= 7
* SQLCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLSTATE; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLERRPROC; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLCAXGRP; DRDA TYPE N-GDA; ENVLID 0x52; Length Override 0
* SQLDIAGGRP; DRDA TYPE N-GDA; ENVLID 0x56; Length Override 0
*
* @param sqlState SQLState (already converted to UTF8)
* @param sqlcode sqlcode
* @param updateCount
* @param rowCount
*
* @exception DRDAProtocolException
*/
private void writeSQLCAGRP(byte[] sqlState, int sqlcode,
int updateCount, long rowCount) throws DRDAProtocolException
{
if (rowCount < 0 && updateCount < 0) {
writer.writeByte(CodePoint.NULLDATA);
return;
}
//null indicator
writer.writeByte(0);
// SQLCODE
writer.writeInt(sqlcode);
// SQLSTATE
writer.writeBytes(sqlState);
// SQLERRPROC
writer.writeBytes(server.prdIdBytes_);
// SQLCAXGRP (Uses null as sqlerrmc since there is no error)
writeSQLCAXGRP(updateCount, rowCount, null, null);
}
// Delimiters for SQLERRMC values.
// The token delimiter value will be used to parse the MessageId from the
// SQLERRMC in MessageService.getLocalizedMessage and the MessageId will be
// used to retrive the localized message. If this delimiter value is changed
// please make sure to make appropriate changes in
// MessageService.getLocalizedMessage that gets called from
// SystemProcedures.SQLCAMESSAGE
/**
* <code>SQLERRMC_TOKEN_DELIMITER</code> separates message argument tokens
*/
private static String SQLERRMC_TOKEN_DELIMITER = new String(new char[] {(char)20});
/**
* <code>SQLERRMC_PREFORMATTED_MESSAGE_DELIMITER</code>, When full message text is
* sent for severe errors. This value separates the messages.
*/
private static String SQLERRMC_PREFORMATTED_MESSAGE_DELIMITER = "::";
/**
* Create error message or message argements to return to client.
* The SQLERRMC will normally be passed back to the server in a call
* to the SYSIBM.SQLCAMESSAGE but for severe exceptions the stored procedure
* call cannot be made. So for Severe messages we will just send the message text.
*
* This method will also truncate the value according the client capacity.
* CCC can only handle 70 characters.
*
* Server sends the sqlerrmc using UTF8 encoding to the client.
* To get the message, client sends back information to the server
* calling SYSIBM.SQLCAMESSAGE (see Sqlca.getMessage). Several parameters
* are sent to this procedure including the locale, the sqlerrmc that the
* client received from the server.
* On server side, the procedure SQLCAMESSAGE in SystemProcedures then calls
* the MessageService.getLocalizedMessage to retrieve the localized error message.
* In MessageService.getLocalizedMessage the sqlerrmc that is passed in,
* is parsed to retrieve the message id. The value it uses to parse the MessageId
* is char value of 20, otherwise it uses the entire sqlerrmc as the message id.
* This messageId is then used to retrieve the localized message if present, to
* the client.
*
* @param se SQLException to build SQLERRMC
*
* @return String which is either the message arguments to be passed to
* SYSIBM.SQLCAMESSAGE or just message text for severe errors.
*/
private String buildSqlerrmc (SQLException se)
{
boolean severe = (se.getErrorCode() >= ExceptionSeverity.SESSION_SEVERITY);
String sqlerrmc = null;
// get exception which carries Derby messageID and args, per DERBY-1178
se = Util.getExceptionFactory().getArgumentFerry( se );
if (se instanceof EmbedSQLException && ! severe)
sqlerrmc = buildTokenizedSqlerrmc(se);
else if (se instanceof DataTruncation)
sqlerrmc = buildDataTruncationSqlerrmc((DataTruncation) se);
else {
// If this is not an EmbedSQLException or is a severe excecption where
// we have no hope of succussfully calling the SYSIBM.SQLCAMESSAGE send
// preformatted message using the server locale
sqlerrmc = buildPreformattedSqlerrmc(se);
}
// Truncate the sqlerrmc to a length that the client can support.
int maxlen = (sqlerrmc == null) ? -1 : Math.min(sqlerrmc.length(),
appRequester.supportedMessageParamLength());
if ((maxlen >= 0) && (sqlerrmc.length() > maxlen))
// have to truncate so the client can handle it.
sqlerrmc = sqlerrmc.substring(0, maxlen);
return sqlerrmc;
}
/**
* Build preformatted SQLException text
* for severe exceptions or SQLExceptions that are not EmbedSQLExceptions.
* Just send the message text localized to the server locale.
*
* @param se SQLException for which to build SQLERRMC
* @return preformated message text
* with messages separted by SQLERRMC_PREFORMATED_MESSAGE_DELIMITER
*
*/
private String buildPreformattedSqlerrmc(SQLException se) {
if (se == null)
return "";
StringBuffer sb = new StringBuffer();
// String buffer to build up message
do {
sb.append(se.getLocalizedMessage());
se = se.getNextException();
if (se != null)
sb.append(SQLERRMC_PREFORMATTED_MESSAGE_DELIMITER +
"SQLSTATE: " + se.getSQLState());
} while (se != null);
return sb.toString();
}
/**
* Build Tokenized SQLERRMC to just send the tokenized arguments to the client.
* for a Derby SQLException or an SQLException thrown by user code.
* Message argument tokens are separated by SQLERRMC_TOKEN_DELIMITER
* Multiple messages are separated by SystemProcedures.SQLERRMC_MESSAGE_DELIMITER
*
* ...
* @param se SQLException to print
*
*/
private String buildTokenizedSqlerrmc(SQLException se) {
String sqlerrmc = "";
do {
if ( se instanceof EmbedSQLException)
{
String messageId = ((EmbedSQLException)se).getMessageId();
// arguments are variable part of a message
Object[] args = ((EmbedSQLException)se).getArguments();
for (int i = 0; args != null && i < args.length; i++)
sqlerrmc += args[i] + SQLERRMC_TOKEN_DELIMITER;
sqlerrmc += messageId;
se = se.getNextException();
}
else
{
// this could happen for instance if an SQLException was thrown
// from a stored procedure.
StringBuffer sb = new StringBuffer();
sb.append(se.getLocalizedMessage());
se = se.getNextException();
if (se != null)
sb.append(SQLERRMC_TOKEN_DELIMITER +
"SQLSTATE: " + se.getSQLState());
sqlerrmc += sb.toString();
}
if (se != null)
{
sqlerrmc += SystemProcedures.SQLERRMC_MESSAGE_DELIMITER + se.getSQLState() + ":";
}
} while (se != null);
return sqlerrmc;
}
/**
* Build the SQLERRMC for a {@code java.sql.DataTruncation} warning.
* Serialize all the fields of the {@code DataTruncation} instance in the
* order in which they appear in the parameter list of the constructor.
*
* @param dt the {@code DataTruncation} instance to serialize
* @return the SQLERRMC string with all fields of the warning
*/
private String buildDataTruncationSqlerrmc(DataTruncation dt) {
return dt.getIndex() + SQLERRMC_TOKEN_DELIMITER +
dt.getParameter() + SQLERRMC_TOKEN_DELIMITER +
dt.getRead() + SQLERRMC_TOKEN_DELIMITER +
dt.getDataSize() + SQLERRMC_TOKEN_DELIMITER +
dt.getTransferSize();
}
/**
* Write SQLCAXGRP
*
* SQLCAXGRP : EARLY FDOCA GROUP
* SQL Communications Area Exceptions Group Description
*
* FORMAT FOR SQLAM <= 6
* SQLRDBNME; DRDA TYPE FCS; ENVLID 0x30; Length Override 18
* SQLERRD1; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD2; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD3; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD4; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD5; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD6; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLWARN0; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN1; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN2; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN3; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN4; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN5; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN6; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN7; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN8; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN9; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARNA; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLERRMSG_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 70
* SQLERRMSG_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 70
*
* FORMAT FOR SQLAM >= 7
* SQLERRD1; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD2; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD3; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD4; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD5; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLERRD6; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLWARN0; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN1; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN2; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN3; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN4; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN5; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN6; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN7; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN8; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARN9; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLWARNA; DRDA TYPE FCS; ENVLID 0x30; Length Override 1
* SQLRDBNAME; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* SQLERRMSG_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 70
* SQLERRMSG_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 70
* @param nextException SQLException encountered
* @param sqlerrmc sqlcode
*
* @exception DRDAProtocolException
*/
private void writeSQLCAXGRP(int updateCount, long rowCount, String sqlerrmc,
SQLException nextException) throws DRDAProtocolException
{
writer.writeByte(0); // SQLCAXGRP INDICATOR
if (sqlamLevel < 7)
{
writeRDBNAM(database.getDatabaseName());
writeSQLCAERRWARN(updateCount, rowCount);
}
else
{
// SQL ERRD1 - D6, WARN0-WARNA (35 bytes)
writeSQLCAERRWARN(updateCount, rowCount);
writer.writeShort(0); //CCC on Win does not take RDBNAME
}
writeVCMorVCS(sqlerrmc);
if (sqlamLevel >=7)
writeSQLDIAGGRP(nextException);
}
/**
* Write the ERR and WARN part of the SQLCA
*
* @param updateCount
* @param rowCount
*/
private void writeSQLCAERRWARN(int updateCount, long rowCount)
{
// SQL ERRD1 - ERRD2 - row Count
writer.writeInt((int)((rowCount>>>32)));
writer.writeInt((int)(rowCount & 0x0000000ffffffffL));
// SQL ERRD3 - updateCount
writer.writeInt(updateCount);
// SQL ERRD4 - D6 (12 bytes)
writer.writeBytes(errD4_D6); // byte[] constant
// WARN0-WARNA (11 bytes)
writer.writeBytes(warn0_warnA); // byte[] constant
}
/**
* Write SQLDIAGGRP: SQL Diagnostics Group Description - Identity 0xD1
* Nullable Group
* SQLDIAGSTT; DRDA TYPE N-GDA; ENVLID 0xD3; Length Override 0
* SQLDIAGCN; DRFA TYPE N-RLO; ENVLID 0xF6; Length Override 0
* SQLDIAGCI; DRDA TYPE N-RLO; ENVLID 0xF5; Length Override 0
*/
private void writeSQLDIAGGRP(SQLException nextException)
throws DRDAProtocolException
{
// for now we only want to send ROW_DELETED and ROW_UPDATED warnings
// as extended diagnostics
// move to first ROW_DELETED or ROW_UPDATED exception. These have been
// added to the end of the warning chain.
while (
nextException != null &&
nextException.getSQLState() != SQLState.ROW_UPDATED &&
nextException.getSQLState() != SQLState.ROW_DELETED) {
nextException = nextException.getNextException();
}
if ((nextException == null) ||
(diagnosticLevel == CodePoint.DIAGLVL0)) {
writer.writeByte(CodePoint.NULLDATA);
return;
}
writer.writeByte(0); // SQLDIAGGRP indicator
writeSQLDIAGSTT();
writeSQLDIAGCI(nextException);
writeSQLDIAGCN();
}
/*
* writeSQLDIAGSTT: Write NULLDATA for now
*/
private void writeSQLDIAGSTT()
throws DRDAProtocolException
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
/**
* writeSQLDIAGCI: SQL Diagnostics Condition Information Array - Identity 0xF5
* SQLNUMROW; ROW LID 0x68; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLDCIROW; ROW LID 0xE5; ELEMENT TAKEN 0(all); REP FACTOR 0(all)
*/
private void writeSQLDIAGCI(SQLException nextException)
throws DRDAProtocolException
{
SQLException se = nextException;
long rowNum = 1;
/* Write the number of next exceptions to expect */
writeSQLNUMROW(se);
while (se != null)
{
String sqlState = se.getSQLState();
// SQLCode > 0 -> Warning
// SQLCode = 0 -> Info
// SQLCode < 0 -> Error
int severity = getExceptionSeverity(se);
int sqlCode = -1;
if (severity == CodePoint.SVRCOD_WARNING)
sqlCode = 1;
else if (severity == CodePoint.SVRCOD_INFO)
sqlCode = 0;
String sqlerrmc = "";
if (diagnosticLevel == CodePoint.DIAGLVL1) {
sqlerrmc = se.getLocalizedMessage();
}
// arguments are variable part of a message
// only send arguments for diagnostic level 0
if (diagnosticLevel == CodePoint.DIAGLVL0) {
// we are only able to get arguments of EmbedSQLException
if (se instanceof EmbedSQLException) {
Object[] args = ((EmbedSQLException)se).getArguments();
for (int i = 0; args != null && i < args.length; i++)
sqlerrmc += args[i].toString() + SQLERRMC_TOKEN_DELIMITER;
}
}
String dbname = null;
if (database != null)
dbname = database.getDatabaseName();
writeSQLDCROW(rowNum++, sqlCode, sqlState, dbname, sqlerrmc);
se = se.getNextException();
}
return;
}
/**
* writeSQLNUMROW: Writes SQLNUMROW : FDOCA EARLY ROW
* SQL Number of Elements Row Description
* FORMAT FOR SQLAM LEVELS
* SQLNUMGRP; GROUP LID 0x58; ELEMENT TAKEN 0(all); REP FACTOR 1
*/
private void writeSQLNUMROW(SQLException nextException)
throws DRDAProtocolException
{
writeSQLNUMGRP(nextException);
}
/**
* writeSQLNUMGRP: Writes SQLNUMGRP : FDOCA EARLY GROUP
* SQL Number of Elements Group Description
* FORMAT FOR ALL SQLAM LEVELS
* SQLNUM; DRDA TYPE I2; ENVLID 0x04; Length Override 2
*/
private void writeSQLNUMGRP(SQLException nextException)
throws DRDAProtocolException
{
int i=0;
SQLException se;
/* Count the number of chained exceptions to be sent */
for (se = nextException; se != null; se = se.getNextException()) i++;
writer.writeShort(i);
}
/**
* writeSQLDCROW: SQL Diagnostics Condition Row - Identity 0xE5
* SQLDCGRP; GROUP LID 0xD5; ELEMENT TAKEN 0(all); REP FACTOR 1
*/
private void writeSQLDCROW(long rowNum, int sqlCode, String sqlState, String dbname,
String sqlerrmc) throws DRDAProtocolException
{
writeSQLDCGRP(rowNum, sqlCode, sqlState, dbname, sqlerrmc);
}
/**
* writeSQLDCGRP: SQL Diagnostics Condition Group Description
*
* SQLDCCODE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCSTATE; DRDA TYPE FCS; ENVLID Ox30; Lengeh Override 5
* SQLDCREASON; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCLINEN; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCROWN; DRDA TYPE FD; ENVLID 0x0E; Lengeh Override 31
* SQLDCER01; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCER02; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCER03; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCER04; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCPART; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCPPOP; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLDCMSGID; DRDA TYPE FCS; ENVLID 0x30; Length Override 10
* SQLDCMDE; DRDA TYPE FCS; ENVLID 0x30; Length Override 8
* SQLDCPMOD; DRDA TYPE FCS; ENVLID 0x30; Length Override 5
* SQLDCRDB; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* SQLDCTOKS; DRDA TYPE N-RLO; ENVLID 0xF7; Length Override 0
* SQLDCMSG_m; DRDA TYPE NVMC; ENVLID 0x3F; Length Override 32672
* SQLDCMSG_S; DRDA TYPE NVCS; ENVLID 0x33; Length Override 32672
* SQLDCCOLN_m; DRDA TYPE NVCM ; ENVLID 0x3F; Length Override 255
* SQLDCCOLN_s; DRDA TYPE NVCS; ENVLID 0x33; Length Override 255
* SQLDCCURN_m; DRDA TYPE NVCM; ENVLID 0x3F; Length Override 255
* SQLDCCURN_s; DRDA TYPE NVCS; ENVLID 0x33; Length Override 255
* SQLDCPNAM_m; DRDA TYPE NVCM; ENVLID 0x3F; Length Override 255
* SQLDCPNAM_s; DRDA TYPE NVCS; ENVLID 0x33; Length Override 255
* SQLDCXGRP; DRDA TYPE N-GDA; ENVLID 0xD3; Length Override 1
*/
private void writeSQLDCGRP(long rowNum, int sqlCode, String sqlState, String dbname,
String sqlerrmc) throws DRDAProtocolException
{
// SQLDCCODE
writer.writeInt(sqlCode);
// SQLDCSTATE
writer.writeString(sqlState);
writer.writeInt(0); // REASON_CODE
writer.writeInt(0); // LINE_NUMBER
writer.writeLong(rowNum); // ROW_NUMBER
byte[] byteArray = new byte[1];
writer.writeScalarPaddedBytes(byteArray, 47, (byte) 0);
writer.writeShort(0); // CCC on Win does not take RDBNAME
writer.writeByte(CodePoint.NULLDATA); // MESSAGE_TOKENS
writer.writeLDString(sqlerrmc); // MESSAGE_TEXT
writeVCMorVCS(null); // COLUMN_NAME
writeVCMorVCS(null); // PARAMETER_NAME
writeVCMorVCS(null); // EXTENDED_NAME
writer.writeByte(CodePoint.NULLDATA); // SQLDCXGRP
}
/*
* writeSQLDIAGCN: Write NULLDATA for now
*/
private void writeSQLDIAGCN()
throws DRDAProtocolException
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
/**
* Write SQLDARD
*
* SQLDARD : FDOCA EARLY ARRAY
* SQL Descriptor Area Row Description with SQL Communications Area
*
* FORMAT FOR SQLAM <= 6
* SQLCARD; ROW LID 0x64; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLNUMROW; ROW LID 0x68; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLDAROW; ROW LID 0x60; ELEMENT TAKEN 0(all); REP FACTOR 0(all)
*
* FORMAT FOR SQLAM >= 7
* SQLCARD; ROW LID 0x64; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLDHROW; ROW LID 0xE0; ELEMENT TAKEN 0(all); REP FACTOR 1
* SQLNUMROW; ROW LID 0x68; ELEMENT TAKEN 0(all); REP FACTOR 1
*
* @param stmt prepared statement
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLDARD(DRDAStatement stmt, boolean rtnOutput, SQLException e) throws DRDAProtocolException, SQLException
{
PreparedStatement ps = stmt.getPreparedStatement();
ResultSetMetaData rsmeta = ps.getMetaData();
ParameterMetaData pmeta = stmt.getParameterMetaData();
int numElems = 0;
if (e == null || e instanceof SQLWarning)
{
if (rtnOutput && (rsmeta != null))
numElems = rsmeta.getColumnCount();
else if ((! rtnOutput) && (pmeta != null))
numElems = pmeta.getParameterCount();
}
writer.createDssObject();
// all went well we will just write a null SQLCA
writer.startDdm(CodePoint.SQLDARD);
writeSQLCAGRP(e, 0, 0);
if (sqlamLevel >= MGRLVL_7)
writeSQLDHROW(ps.getResultSetHoldability());
//SQLNUMROW
if (SanityManager.DEBUG)
trace("num Elements = " + numElems);
writer.writeShort(numElems);
for (int i=0; i < numElems; i++)
writeSQLDAGRP (rsmeta, pmeta, i, rtnOutput);
writer.endDdmAndDss();
}
/**
* Write QRYDSC - Query Answer Set Description
*
* @param stmt DRDAStatement we are working on
* @param FDODSConly simply the FDODSC, without the wrap
*
* Instance Variables
* SQLDTAGRP - required
*
* Only 84 columns can be sent in a single QRYDSC. If there are more columns
* they must be sent in subsequent QRYDSC.
* If the QRYDSC will not fit into the current block, as many columns as can
* fit are sent and then the remaining are sent in the following blocks.
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeQRYDSC(DRDAStatement stmt, boolean FDODSConly)
throws DRDAProtocolException, SQLException
{
ResultSet rs = null;
ResultSetMetaData rsmeta = null;
ParameterMetaData pmeta = null;
if (!stmt.needsToSendParamData)
rs = stmt.getResultSet();
if (rs == null) // this is a CallableStatement, use parameter meta data
pmeta = stmt.getParameterMetaData();
else
rsmeta = rs.getMetaData();
int numCols = (rsmeta != null ? rsmeta.getColumnCount() : pmeta.getParameterCount());
int numGroups = 1;
int colStart = 1;
int colEnd = numCols;
int blksize = stmt.getBlksize() > 0 ? stmt.getBlksize() : CodePoint.QRYBLKSZ_MAX;
// check for remaining space in current query block
// Need to mod with blksize so remaining doesn't go negative. 4868
int remaining = blksize - (writer.getDSSLength() % blksize) - (3 +
FdocaConstants.SQLCADTA_SQLDTARD_RLO_SIZE);
// calcuate how may columns can be sent in the current query block
int firstcols = remaining/FdocaConstants.SQLDTAGRP_COL_DSC_SIZE;
// check if it doesn't all fit into the first block and
// under FdocaConstants.MAX_VARS_IN_NGDA
if (firstcols < numCols || numCols > FdocaConstants.MAX_VARS_IN_NGDA)
{
// we are limited to FdocaConstants.MAX_VARS_IN_NGDA
if (firstcols > FdocaConstants.MAX_VARS_IN_NGDA)
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(numCols > FdocaConstants.MAX_VARS_IN_NGDA,
"Number of columns " + numCols +
" is less than MAX_VARS_IN_NGDA");
numGroups = numCols/FdocaConstants.MAX_VARS_IN_NGDA;
// some left over
if (FdocaConstants.MAX_VARS_IN_NGDA * numGroups < numCols)
numGroups++;
colEnd = FdocaConstants.MAX_VARS_IN_NGDA;
}
else
{
colEnd = firstcols;
numGroups += (numCols-firstcols)/FdocaConstants.MAX_VARS_IN_NGDA;
if (FdocaConstants.MAX_VARS_IN_NGDA * numGroups < numCols)
numGroups++;
}
}
if (! FDODSConly)
{
writer.createDssObject();
writer.startDdm(CodePoint.QRYDSC);
}
for (int i = 0; i < numGroups; i++)
{
writeSQLDTAGRP(stmt, rsmeta, pmeta, colStart, colEnd,
(i == 0 ? true : false));
colStart = colEnd + 1;
// 4868 - Limit range to MAX_VARS_IN_NGDA (used to have extra col)
colEnd = colEnd + FdocaConstants.MAX_VARS_IN_NGDA;
if (colEnd > numCols)
colEnd = numCols;
}
writer.writeBytes(FdocaConstants.SQLCADTA_SQLDTARD_RLO);
if (! FDODSConly)
writer.endDdmAndDss();
}
/**
* Write SQLDTAGRP
* SQLDAGRP : Late FDOCA GROUP
* SQL Data Value Group Descriptor
* LENGTH - length of the SQLDTAGRP
* TRIPLET_TYPE - NGDA for first, CPT for following
* ID - SQLDTAGRP_LID for first, NULL_LID for following
* For each column
* DRDA TYPE
* LENGTH OVERRIDE
* For numeric/decimal types
* PRECISON
* SCALE
* otherwise
* LENGTH or DISPLAY_WIDTH
*
* @param stmt drda statement
* @param rsmeta resultset meta data
* @param pmeta parameter meta data for CallableStatement
* @param colStart starting column for group to send
* @param colEnd end column to send
* @param first is this the first group
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLDTAGRP(DRDAStatement stmt, ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int colStart, int colEnd, boolean first)
throws DRDAProtocolException, SQLException
{
int length = (FdocaConstants.SQLDTAGRP_COL_DSC_SIZE *
((colEnd+1) - colStart)) + 3;
writer.writeByte(length);
if (first)
{
writer.writeByte(FdocaConstants.NGDA_TRIPLET_TYPE);
writer.writeByte(FdocaConstants.SQLDTAGRP_LID);
}
else
{
//continued
writer.writeByte(FdocaConstants.CPT_TRIPLET_TYPE);
writer.writeByte(FdocaConstants.NULL_LID);
}
boolean hasRs = (rsmeta != null); // if don't have result, then we look at parameter meta
for (int i = colStart; i <= colEnd; i++)
{
boolean nullable = (hasRs ? (rsmeta.isNullable(i) == rsmeta.columnNullable) :
(pmeta.isNullable(i) == JDBC30Translation.PARAMETER_NULLABLE));
int colType = (hasRs ? rsmeta.getColumnType(i) : pmeta.getParameterType(i));
int[] outlen = {-1};
int drdaType = FdocaConstants.mapJdbcTypeToDrdaType( colType, nullable, appRequester, outlen );
boolean isDecimal = ((drdaType | 1) == DRDAConstants.DRDA_TYPE_NDECIMAL);
int precision = 0, scale = 0;
if (hasRs)
{
precision = rsmeta.getPrecision(i);
scale = rsmeta.getScale(i);
stmt.setRsDRDAType(i,drdaType);
stmt.setRsPrecision(i, precision);
stmt.setRsScale(i,scale);
}
else if (isDecimal)
{
if (stmt.isOutputParam(i))
{
precision = pmeta.getPrecision(i);
scale = pmeta.getScale(i);
((CallableStatement) stmt.ps).registerOutParameter(i,Types.DECIMAL,scale);
}
}
if (SanityManager.DEBUG)
trace("jdbcType=" + colType + " \tdrdaType=" + Integer.toHexString(drdaType));
// Length or precision and scale for decimal values.
writer.writeByte(drdaType);
if (isDecimal)
{
writer.writeByte(precision);
writer.writeByte(scale);
}
else if (outlen[0] != -1)
writer.writeShort(outlen[0]);
else if (hasRs)
writer.writeShort(rsmeta.getColumnDisplaySize(i));
else
writer.writeShort(stmt.getParamLen(i));
}
}
/**
* Holdability passed in as it can represent the holdability of
* the statement or a specific result set.
* @param holdability HOLD_CURSORS_OVER_COMMIT or CLOSE_CURSORS_AT_COMMIT
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLDHROW(int holdability) throws DRDAProtocolException,SQLException
{
if (JVMInfo.JDK_ID < 2) //write null indicator for SQLDHROW because there is no holdability support prior to jdk1.3
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
writer.writeByte(0); // SQLDHROW INDICATOR
//SQLDHOLD
writer.writeShort(holdability);
//SQLDRETURN
writer.writeShort(0);
//SQLDSCROLL
writer.writeShort(0);
//SQLDSENSITIVE
writer.writeShort(0);
//SQLDFCODE
writer.writeShort(0);
//SQLDKEYTYPE
writer.writeShort(0);
//SQLRDBNAME
writer.writeShort(0); //CCC on Windows somehow does not take any dbname
//SQLDSCHEMA
writeVCMorVCS(null);
}
/**
* Write QRYDTA - Query Answer Set Data
* Contains some or all of the answer set data resulting from a query
* If the client is not using rowset processing, this routine attempts
* to pack as much data into the QRYDTA as it can. This may result in
* splitting the last row across the block, in which case when the
* client calls CNTQRY we will return the remainder of the row.
*
* Splitting a QRYDTA block is expensive, for several reasons:
* - extra logic must be run, on both client and server side
* - more network round-trips are involved
* - the QRYDTA block which contains the continuation of the split
* row is generally wasteful, since it contains the remainder of
* the split row but no additional rows.
* Since splitting is expensive, the server makes some attempt to
* avoid it. Currently, the server's algorithm for this is to
* compute the length of the current row, and to stop trying to pack
* more rows into this buffer if another row of that length would
* not fit. However, since rows can vary substantially in length,
* this algorithm is often ineffective at preventing splits. For
* example, if a short row near the end of the buffer is then
* followed by a long row, that long row will be split. It is possible
* to improve this algorithm substantially:
* - instead of just using the length of the previous row as a guide
* for whether to attempt packing another row in, use some sort of
* overall average row size computed over multiple rows (e.g., all
* the rows we've placed into this QRYDTA block, or all the rows
* we've process for this result set)
* - when we discover that the next row will not fit, rather than
* splitting the row across QRYDTA blocks, if it is relatively
* small, we could just hold the entire row in a buffer to place
* it entirely into the next QRYDTA block, or reset the result
* set cursor back one row to "unread" this row.
* - when splitting a row across QRYDTA blocks, we tend to copy
* data around multiple times. Careful coding could remove some
* of these copies.
* However, it is important not to over-complicate this code: it is
* better to be correct than to be efficient, and there have been
* several bugs in the split logic already.
*
* Instance Variables
* Byte string
*
* @param stmt DRDA statement we are processing
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeQRYDTA (DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
boolean getMoreData = true;
boolean sentExtData = false;
int startLength = 0;
writer.createDssObject();
if (SanityManager.DEBUG)
trace("Write QRYDTA");
writer.startDdm(CodePoint.QRYDTA);
// Check to see if there was leftover data from splitting
// the previous QRYDTA for this result set. If there was, and
// if we have now sent all of it, send any EXTDTA for that row
// and increment the rowCount which we failed to increment in
// writeFDODTA when we realized the row needed to be split.
if (processLeftoverQRYDTA(stmt))
{
if (stmt.getSplitQRYDTA() == null)
{
stmt.rowCount += 1;
if (stmt.getExtDtaObjects() != null)
writeEXTDTA(stmt);
}
return;
}
while(getMoreData)
{
sentExtData = false;
getMoreData = writeFDODTA(stmt);
if (stmt.getExtDtaObjects() != null &&
stmt.getSplitQRYDTA() == null)
{
writer.endDdmAndDss();
writeEXTDTA(stmt);
getMoreData=false;
sentExtData = true;
}
// if we don't have enough room for a row of the
// last row's size, don't try to cram it in.
// It would get split up but it is not very efficient.
if (getMoreData == true)
{
int endLength = writer.getDSSLength();
int rowsize = endLength - startLength;
if ((stmt.getBlksize() - endLength ) < rowsize)
getMoreData = false;
startLength = endLength;
}
}
// If we sent extDta we will rely on
// writeScalarStream to end the dss with the proper chaining.
// otherwise end it here.
if (! sentExtData)
writer.endDdmAndDss();
if (!stmt.hasdata()) {
final boolean qryclsOnLmtblkprc =
appRequester.supportsQryclsimpForLmtblkprc();
if (stmt.isRSCloseImplicit(qryclsOnLmtblkprc)) {
stmt.rsClose();
}
}
}
/**
* This routine places some data into the current QRYDTA block using
* FDODTA (Formatted Data Object DaTA rules).
*
* There are 3 basic types of processing flow for this routine:
* - In normal non-rowset, non-scrollable cursor flow, this routine
* places a single row into the QRYDTA block and returns TRUE,
* indicating that the caller can call us back to place another
* row into the result set if he wishes. (The caller may need to
* send Externalized Data, which would be a reason for him NOT to
* place any more rows into the QRYDTA).
* - In ROWSET processing, this routine places an entire ROWSET of
* rows into the QRYDTA block and returns FALSE, indicating that
* the QRYDTA block is full and should now be sent.
* - In callable statement processing, this routine places the
* results from the output parameters of the called procedure into
* the QRYDTA block. This code path is really dramatically
* different from the other two paths and shares only a very small
* amount of common code in this routine.
*
* In all cases, it is possible that the data we wish to return may
* not fit into the QRYDTA block, in which case we call splitQRYDTA
* to split the data and remember the remainder data in the result set.
* Splitting the data is relatively rare in the normal cursor case,
* because our caller (writeQRYDTA) uses a coarse estimation
* technique to avoid calling us if he thinks a split is likely.
*
* The overall structure of this routine is implemented as two
* loops:
* - the outer "do ... while ... " loop processes a ROWSET, one row
* at a time. For non-ROWSET cursors, and for callable statements,
* this loop executes only once.
* - the inner "for ... i < numCols ..." loop processes each column
* in the current row, or each output parmeter in the procedure.
*
* Most column data is written directly inline in the QRYDTA block.
* Some data, however, is written as Externalized Data. This is
* commonly used for Large Objects. In that case, an Externalized
* Data Pointer is written into the QRYDTA block, and the actual
* data flows in separate EXTDTA blocks which are returned
* after this QRYDTA block.
*/
private boolean writeFDODTA (DRDAStatement stmt)
throws DRDAProtocolException, SQLException
{
boolean hasdata = false;
int blksize = stmt.getBlksize() > 0 ? stmt.getBlksize() : CodePoint.QRYBLKSZ_MAX;
long rowCount = 0;
ResultSet rs =null;
boolean moreData = (stmt.getQryprctyp()
== CodePoint.LMTBLKPRC);
int numCols;
if (!stmt.needsToSendParamData)
{
rs = stmt.getResultSet();
}
if (rs != null)
{
numCols = stmt.getNumRsCols();
if (stmt.isScrollable())
hasdata = positionCursor(stmt, rs);
else
hasdata = rs.next();
}
else // it's for a CallableStatement
{
hasdata = stmt.hasOutputParams();
numCols = stmt.getDrdaParamCount();
}
do {
if (!hasdata)
{
doneData(stmt, rs);
moreData = false;
return moreData;
}
// Send ResultSet warnings if there are any
SQLWarning sqlw = (rs != null)? rs.getWarnings(): null;
if (rs != null) {
rs.clearWarnings();
}
// for updatable, insensitive result sets we signal the
// row updated condition to the client via a warning to be
// popped by client onto its rowUpdated state, i.e. this
// warning should not reach API level.
if (rs != null && rs.rowUpdated()) {
SQLWarning w = new SQLWarning("", SQLState.ROW_UPDATED,
ExceptionSeverity.WARNING_SEVERITY);
if (sqlw != null) {
sqlw.setNextWarning(w);
} else {
sqlw = w;
}
}
// Delete holes are manifest as a row consisting of a non-null
// SQLCARD and a null data group. The SQLCARD has a warning
// SQLSTATE of 02502
if (rs != null && rs.rowDeleted()) {
SQLWarning w = new SQLWarning("", SQLState.ROW_DELETED,
ExceptionSeverity.WARNING_SEVERITY);
if (sqlw != null) {
sqlw.setNextWarning(w);
} else {
sqlw = w;
}
}
// Save the position where we start writing the warnings in case
// we need to add more warnings later.
final int sqlcagrpStart = writer.getBufferPosition();
if (sqlw == null)
writeSQLCAGRP(nullSQLState, 0, -1, -1);
else
writeSQLCAGRP(sqlw, 1, -1);
// Save the position right after the warnings so we know where to
// insert more warnings later.
final int sqlcagrpEnd = writer.getBufferPosition();
// if we were asked not to return data, mark QRYDTA null; do not
// return yet, need to make rowCount right
// if the row has been deleted return QRYDTA null (delete hole)
boolean noRetrieveRS = (rs != null &&
(!stmt.getQryrtndta() || rs.rowDeleted()));
if (noRetrieveRS)
writer.writeByte(0xFF); //QRYDTA null indicator: IS NULL
else
writer.writeByte(0); //QRYDTA null indicator: not null
for (int i = 1; i <= numCols; i++)
{
if (noRetrieveRS)
break;
int drdaType;
int ndrdaType;
int precision;
int scale;
Object val = null;
boolean valNull;
if (rs != null)
{
drdaType = stmt.getRsDRDAType(i) & 0xff;
precision = stmt.getRsPrecision(i);
scale = stmt.getRsScale(i);
ndrdaType = drdaType | 1;
if (SanityManager.DEBUG)
trace("!!drdaType = " + java.lang.Integer.toHexString(drdaType) +
" precision=" + precision +" scale = " + scale);
switch (ndrdaType)
{
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
EXTDTAInputStream extdtaStream=
EXTDTAInputStream.getEXTDTAStream(rs, i, drdaType);
writeFdocaVal(i, extdtaStream, drdaType, precision,
scale, extdtaStream.isNull(), stmt, false);
break;
case DRDAConstants.DRDA_TYPE_NINTEGER:
int ival = rs.getInt(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing int: "+ ival + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeInt(ival);
break;
case DRDAConstants.DRDA_TYPE_NSMALL:
short sval = rs.getShort(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing small: "+ sval + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeShort(sval);
break;
case DRDAConstants.DRDA_TYPE_NINTEGER8:
long lval = rs.getLong(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing long: "+ lval + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeLong(lval);
break;
case DRDAConstants.DRDA_TYPE_NFLOAT4:
float fval = rs.getFloat(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing float: "+ fval + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeFloat(fval);
break;
case DRDAConstants.DRDA_TYPE_NFLOAT8:
double dval = rs.getDouble(i);
valNull = rs.wasNull();
if (SanityManager.DEBUG)
trace("====== writing double: "+ dval + " is null: " + valNull);
writeNullability(drdaType,valNull);
if (! valNull)
writer.writeDouble(dval);
break;
case DRDAConstants.DRDA_TYPE_NCHAR:
case DRDAConstants.DRDA_TYPE_NVARCHAR:
case DRDAConstants.DRDA_TYPE_NVARMIX:
case DRDAConstants.DRDA_TYPE_NLONG:
case DRDAConstants.DRDA_TYPE_NLONGMIX:
String valStr = rs.getString(i);
if (SanityManager.DEBUG)
trace("====== writing char/varchar/mix :"+ valStr + ":");
writeFdocaVal(i, valStr, drdaType,
precision, scale, rs.wasNull(),
stmt, false);
break;
default:
val = getObjectForWriteFdoca(rs, i, drdaType);
writeFdocaVal(i, val, drdaType,
precision, scale, rs.wasNull(),
stmt, false);
}
}
else
{
drdaType = stmt.getParamDRDAType(i) & 0xff;
precision = stmt.getParamPrecision(i);
scale = stmt.getParamScale(i);
if (stmt.isOutputParam(i)) {
int[] outlen = new int[1];
drdaType = FdocaConstants.mapJdbcTypeToDrdaType( stmt.getOutputParamType(i), true, appRequester, outlen );
precision = stmt.getOutputParamPrecision(i);
scale = stmt.getOutputParamScale(i);
if (SanityManager.DEBUG)
trace("***getting Object "+i);
val = getObjectForWriteFdoca(
(CallableStatement) stmt.ps, i, drdaType);
valNull = (val == null);
writeFdocaVal(i, val, drdaType, precision, scale,
valNull, stmt, true);
}
else
writeFdocaVal(i, null, drdaType, precision, scale,
true, stmt, true);
}
}
DataTruncation truncated = stmt.getTruncationWarnings();
if (truncated != null) {
// Some of the data was truncated, so we need to add a
// truncation warning. Save a copy of the row data, then move
// back to the SQLCAGRP section and overwrite it with the new
// warnings, and finally re-insert the row data after the new
// SQLCAGRP section.
byte[] data = writer.getBufferContents(sqlcagrpEnd);
writer.setBufferPosition(sqlcagrpStart);
if (sqlw != null) {
truncated.setNextWarning(sqlw);
}
writeSQLCAGRP(truncated, 1, -1);
writer.writeBytes(data);
stmt.clearTruncationWarnings();
}
// does all this fit in one QRYDTA
if (writer.getDSSLength() > blksize)
{
splitQRYDTA(stmt, blksize);
return false;
}
if (rs == null)
return moreData;
//get the next row
rowCount++;
if (rowCount < stmt.getQryrowset())
{
hasdata = rs.next();
}
/*(1) scrollable we return at most a row set; OR (2) no retrieve data
*/
else if (stmt.isScrollable() || noRetrieveRS)
moreData=false;
} while (hasdata && rowCount < stmt.getQryrowset());
// add rowCount to statement row count
// for non scrollable cursors
if (!stmt.isScrollable())
stmt.rowCount += rowCount;
if (!hasdata)
{
doneData(stmt, rs);
moreData=false;
}
if (!stmt.isScrollable())
stmt.setHasdata(hasdata);
return moreData;
}
/**
* <p>
* Get a column value of the specified type from a {@code ResultSet}, in
* a form suitable for being writted by {@link #writeFdocaVal}. For most
* types, this means just calling {@code ResultSet.getObject(int)}.
* </p>
*
* <p>
* The only exception currently is the data types representing dates and
* times, as they need to be fetched using the same
* {@code java.util.Calendar} as {@link #writeFdocaVal} uses when writing
* them (DERBY-4582).
* </p>
*
* <p>
* <b>Note:</b> Changes made in this method should also be made in the
* corresponding method for {@code CallableStatement}:
* {@link #getObjectForWriteFdoca(java.sql.CallableStatement, int, int)}.
* </p>
*
* @param rs the result set to fetch the object from
* @param index the column index
* @param drdaType the DRDA type of the object to fetch
* @return an object with the value of the column
* @throws if a database error occurs while fetching the column value
* @see #getObjectForWriteFdoca(java.sql.CallableStatement, int, int)
*/
private Object getObjectForWriteFdoca(ResultSet rs, int index, int drdaType)
throws SQLException {
// convert to corresponding nullable type to reduce number of cases
int ndrdaType = drdaType | 1;
switch (ndrdaType) {
case DRDAConstants.DRDA_TYPE_NDATE:
return rs.getDate(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NTIME:
return rs.getTime(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NTIMESTAMP:
return rs.getTimestamp(index, getGMTCalendar());
default:
return rs.getObject(index);
}
}
/**
* <p>
* Get the value of an output parameter of the specified type from a
* {@code CallableStatement}, in a form suitable for being writted by
* {@link #writeFdocaVal}. For most types, this means just calling
* {@code CallableStatement.getObject(int)}.
* </p>
*
* <p>
* This method should behave like the corresponding method for
* {@code ResultSet}, and changes made to one of these methods, must be
* reflected in the other method. See
* {@link #getObjectForWriteFdoca(java.sql.ResultSet, int, int)}
* for details.
* </p>
*
* @param cs the callable statement to fetch the object from
* @param index the parameter index
* @param drdaType the DRDA type of the object to fetch
* @return an object with the value of the output parameter
* @throws if a database error occurs while fetching the parameter value
* @see #getObjectForWriteFdoca(java.sql.ResultSet, int, int)
*/
private Object getObjectForWriteFdoca(CallableStatement cs,
int index, int drdaType)
throws SQLException {
// convert to corresponding nullable type to reduce number of cases
int ndrdaType = drdaType | 1;
switch (ndrdaType) {
case DRDAConstants.DRDA_TYPE_NDATE:
return cs.getDate(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NTIME:
return cs.getTime(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NTIMESTAMP:
return cs.getTimestamp(index, getGMTCalendar());
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
return EXTDTAInputStream.getEXTDTAStream(cs, index, drdaType);
default:
return cs.getObject(index);
}
}
/**
* Split QRYDTA into blksize chunks
*
* This routine is called if the QRYDTA data will not fit. It writes
* as much data as it can, then stores the remainder in the result
* set. At some later point, when the client returns with a CNTQRY,
* we will call processLeftoverQRYDTA to handle that data.
*
* The interaction between DRDAConnThread and DDMWriter is rather
* complicated here. This routine gets called because DRDAConnThread
* realizes that it has constructed a QRYDTA message which is too
* large. At that point, we need to reclaim the "extra" data and
* hold on to it. To aid us in that processing, DDMWriter provides
* the routines getDSSLength, copyDSSDataToEnd, and truncateDSS.
* For some additional detail on this complex sub-protocol, the
* interested reader should study bug DERBY-491 and 492 at:
* http://issues.apache.org/jira/browse/DERBY-491 and
* http://issues.apache.org/jira/browse/DERBY-492
*
* @param stmt DRDA statment
* @param blksize size of query block
*
* @throws SQLException
* @throws DRDAProtocolException
*/
private void splitQRYDTA(DRDAStatement stmt, int blksize) throws SQLException,
DRDAProtocolException
{
// make copy of extra data
byte [] temp = writer.copyDSSDataToEnd(blksize);
// truncate to end of blocksize
writer.truncateDSS(blksize);
if (temp.length == 0)
agentError("LMTBLKPRC violation: splitQRYDTA was " +
"called to split a QRYDTA block, but the " +
"entire row fit successfully into the " +
"current block. Server rowsize computation " +
"was probably incorrect (perhaps an off-by-" +
"one bug?). QRYDTA blocksize: " + blksize);
stmt.setSplitQRYDTA(temp);
}
/**
* Process remainder data resulting from a split.
*
* This routine is called at the start of building each QRYDTA block.
* Normally, it observes that there is no remainder data from the
* previous QRYDTA block, and returns FALSE, indicating that there
* was nothing to do.
*
* However, if it discovers that the previous QRYDTA block was split,
* then it retrieves the remainder data from the result set, writes
* as much of it as will fit into the QRYDTA block (hopefully all of
* it will fit, but the row may be very long), and returns TRUE,
* indicating that this QRYDTA block has been filled with remainder
* data and should now be sent immediately.
*/
private boolean processLeftoverQRYDTA(DRDAStatement stmt)
throws SQLException,DRDAProtocolException
{
byte []leftovers = stmt.getSplitQRYDTA();
if (leftovers == null)
return false;
int blksize = stmt.getBlksize() > 0 ? stmt.getBlksize() : CodePoint.QRYBLKSZ_MAX;
blksize = blksize - 10; //DSS header + QRYDTA and length
if (leftovers.length < blksize)
{
writer.writeBytes(leftovers, 0, leftovers.length);
stmt.setSplitQRYDTA(null);
}
else
{
writer.writeBytes(leftovers, 0, blksize);
byte []newLeftovers = new byte[leftovers.length-blksize];
for (int i = 0; i < newLeftovers.length; i++)
newLeftovers[i] = leftovers[blksize+i];
stmt.setSplitQRYDTA(newLeftovers);
}
// finish off query block and send
writer.endDdmAndDss();
return true;
}
/**
* Done data
* Send SQLCARD for the end of the data
*
* @param stmt DRDA statement
* @param rs Result set
* @throws DRDAProtocolException
* @throws SQLException
*/
private void doneData(DRDAStatement stmt, ResultSet rs)
throws DRDAProtocolException, SQLException
{
if (SanityManager.DEBUG)
trace("*****NO MORE DATA!!");
int blksize = stmt.getBlksize() > 0 ? stmt.getBlksize() : CodePoint.QRYBLKSZ_MAX;
if (rs != null)
{
if (stmt.isScrollable())
{
//keep isAfterLast and isBeforeFirst to be able
//to reposition after counting rows
boolean isAfterLast = rs.isAfterLast();
boolean isBeforeFirst = rs.isBeforeFirst();
// for scrollable cursors - calculate the row count
// since we may not have gone through each row
rs.last();
stmt.rowCount = rs.getRow();
// reposition after last or before first
if (isAfterLast) {
rs.afterLast();
}
if (isBeforeFirst) {
rs.beforeFirst();
}
}
else // non-scrollable cursor
{
final boolean qryclsOnLmtblkprc =
appRequester.supportsQryclsimpForLmtblkprc();
if (stmt.isRSCloseImplicit(qryclsOnLmtblkprc)) {
stmt.rsClose();
stmt.rsSuspend();
}
}
}
// For scrollable cursor's QRYSCRAFT, when we reach here, DRDA spec says sqlstate
// is 00000, sqlcode is not mentioned. But DB2 CLI code expects sqlcode to be 0.
// We return sqlcode 0 in this case, as the DB2 server does.
boolean isQRYSCRAFT = (stmt.getQryscrorn() == CodePoint.QRYSCRAFT);
// Using sqlstate 00000 or 02000 for end of data.
writeSQLCAGRP((isQRYSCRAFT ? eod00000 : eod02000),
(isQRYSCRAFT ? 0 : 100), 0, stmt.rowCount);
writer.writeByte(CodePoint.NULLDATA);
// does all this fit in one QRYDTA
if (writer.getDSSLength() > blksize)
{
splitQRYDTA(stmt, blksize);
}
}
/**
* Position cursor for insensitive scrollable cursors
*
* @param stmt DRDA statement
* @param rs Result set
*/
private boolean positionCursor(DRDAStatement stmt, ResultSet rs)
throws SQLException, DRDAProtocolException
{
boolean retval = false;
switch (stmt.getQryscrorn())
{
case CodePoint.QRYSCRREL:
int rows = (int)stmt.getQryrownbr();
if ((rs.isAfterLast() && rows > 0) || (rs.isBeforeFirst() && rows < 0)) {
retval = false;
} else {
retval = rs.relative(rows);
}
break;
case CodePoint.QRYSCRABS:
// JCC uses an absolute value of 0 which is not allowed in JDBC
// We translate it into beforeFirst which seems to work.
if (stmt.getQryrownbr() == 0)
{
rs.beforeFirst();
retval = false;
}
else
{
retval = rs.absolute((int)stmt.getQryrownbr());
}
break;
case CodePoint.QRYSCRAFT:
rs.afterLast();
retval = false;
break;
case CodePoint.QRYSCRBEF:
rs.beforeFirst();
retval = false;
break;
default:
agentError("Invalid value for cursor orientation "+ stmt.getQryscrorn());
}
return retval;
}
/**
* Write SQLDAGRP
* SQLDAGRP : EARLY FDOCA GROUP
* SQL Data Area Group Description
*
* FORMAT FOR SQLAM <= 6
* SQLPRECISION; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLSCALE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLLENGTH; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* SQLTYPE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLCCSID; DRDA TYPE FB; ENVLID 0x26; Length Override 2
* SQLNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 30
* SQLNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 30
* SQLLABEL_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 30
* SQLLABEL_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 30
* SQLCOMMENTS_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 254
* SQLCOMMENTS_m; DRDA TYPE VCS; ENVLID 0x32; Length Override 254
*
* FORMAT FOR SQLAM == 6
* SQLPRECISION; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLSCALE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLLENGTH; DRDA TYPE I8; ENVLID 0x16; Length Override 8
* SQLTYPE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLCCSID; DRDA TYPE FB; ENVLID 0x26; Length Override 2
* SQLNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 30
* SQLNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 30
* SQLLABEL_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 30
* SQLLABEL_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 30
* SQLCOMMENTS_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 254
* SQLCOMMENTS_m; DRDA TYPE VCS; ENVLID 0x32; Length Override 254
* SQLUDTGRP; DRDA TYPE N-GDA; ENVLID 0x51; Length Override 0
*
* FORMAT FOR SQLAM >= 7
* SQLPRECISION; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLSCALE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLLENGTH; DRDA TYPE I8; ENVLID 0x16; Length Override 8
* SQLTYPE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
* SQLCCSID; DRDA TYPE FB; ENVLID 0x26; Length Override 2
* SQLDOPTGRP; DRDA TYPE N-GDA; ENVLID 0xD2; Length Override 0
*
* @param rsmeta resultset meta data
* @param pmeta parameter meta data
* @param elemNum column number we are returning (in case of result set), or,
* parameter number (in case of parameter)
* @param rtnOutput whether this is for a result set
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLDAGRP(ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int elemNum, boolean rtnOutput)
throws DRDAProtocolException, SQLException
{
//jdbc uses offset of 1
int jdbcElemNum = elemNum +1;
// length to be retreived as output parameter
int[] outlen = {-1};
int elemType = rtnOutput ? rsmeta.getColumnType(jdbcElemNum) : pmeta.getParameterType(jdbcElemNum);
int precision = rtnOutput ? rsmeta.getPrecision(jdbcElemNum) : pmeta.getPrecision(jdbcElemNum);
if (precision > FdocaConstants.NUMERIC_MAX_PRECISION)
precision = FdocaConstants.NUMERIC_MAX_PRECISION;
// 2-byte precision
writer.writeShort(precision);
// 2-byte scale
int scale = (rtnOutput ? rsmeta.getScale(jdbcElemNum) : pmeta.getScale(jdbcElemNum));
writer.writeShort(scale);
boolean nullable = rtnOutput ? (rsmeta.isNullable(jdbcElemNum) ==
ResultSetMetaData.columnNullable) :
(pmeta.isNullable(jdbcElemNum) == JDBC30Translation.PARAMETER_NULLABLE);
int sqlType = SQLTypes.mapJdbcTypeToDB2SqlType(elemType,
nullable, appRequester,
outlen);
if (outlen[0] == -1) //some types not set
{
switch (elemType)
{
case Types.DECIMAL:
case Types.NUMERIC:
scale = rtnOutput ? rsmeta.getScale(jdbcElemNum) : pmeta.getScale(jdbcElemNum);
outlen[0] = ((precision <<8) | (scale <<0));
if (SanityManager.DEBUG)
trace("\n\nprecision =" +precision +
" scale =" + scale);
break;
default:
outlen[0] = Math.min(FdocaConstants.LONGVARCHAR_MAX_LEN,
(rtnOutput ? rsmeta.getColumnDisplaySize(jdbcElemNum) :
pmeta.getPrecision(jdbcElemNum)));
}
}
switch (elemType)
{
case Types.BINARY:
case Types.VARBINARY:
case Types.LONGVARBINARY:
case Types.BLOB: //for CLI describe to be correct
case Types.CLOB:
outlen[0] = (rtnOutput ? rsmeta.getPrecision(jdbcElemNum) :
pmeta.getPrecision(jdbcElemNum));
}
if (SanityManager.DEBUG)
trace("SQLDAGRP len =" + java.lang.Integer.toHexString(outlen[0]) + "for type:" + elemType);
// 8 or 4 byte sqllength
if (sqlamLevel >= MGRLVL_6)
writer.writeLong(outlen[0]);
else
writer.writeInt(outlen[0]);
String typeName = rtnOutput ? rsmeta.getColumnTypeName(jdbcElemNum) :
pmeta.getParameterTypeName(jdbcElemNum);
if (SanityManager.DEBUG)
trace("jdbcType =" + typeName + " sqlType =" + sqlType + "len =" +outlen[0]);
writer.writeShort(sqlType);
// CCSID
// CCSID should be 0 for Binary Types.
if (elemType == java.sql.Types.CHAR ||
elemType == java.sql.Types.VARCHAR
|| elemType == java.sql.Types.LONGVARCHAR
|| elemType == java.sql.Types.CLOB)
writer.writeScalar2Bytes(1208);
else
writer.writeScalar2Bytes(0);
if (sqlamLevel < MGRLVL_7)
{
//SQLName
writeVCMorVCS(rtnOutput ? rsmeta.getColumnName(jdbcElemNum) : null);
//SQLLabel
writeVCMorVCS(null);
//SQLComments
writeVCMorVCS(null);
if (sqlamLevel == MGRLVL_6)
writeSQLUDTGRP(rsmeta, pmeta, jdbcElemNum, rtnOutput);
}
else
{
writeSQLDOPTGRP(rsmeta, pmeta, jdbcElemNum, rtnOutput);
}
}
/**
* Write variable character mixed byte or single byte
* The preference is to write mixed byte if it is defined for the server,
* since that is our default and we don't allow it to be changed, we always
* write mixed byte.
*
* @param s string to write
* @exception DRDAProtocolException
*/
private void writeVCMorVCS(String s)
throws DRDAProtocolException
{
//Write only VCM and 0 length for VCS
if (s == null)
{
writer.writeShort(0);
writer.writeShort(0);
return;
}
// VCM
writer.writeLDString(s);
// VCS
writer.writeShort(0);
}
/**
* Write SQLUDTGRP (SQL Descriptor User-Defined Type Group Descriptor)
*
* This is the format from the DRDA spec, Volume 1, section 5.6.4.10.
* However, this format is not rich enough to carry the information needed
* by JDBC. This format does not have a subtype code for JAVA_OBJECT and
* this format does not convey the Java class name needed
* by ResultSetMetaData.getColumnClassName().
*
* SQLUDXTYPE; DRDA TYPE I4; ENVLID 0x02; Length Override 4
* Constants which map to java.sql.Types constants DISTINCT, STRUCT, and REF.
* But DRDA does not define a constant which maps to java.sql.Types.JAVA_OBJECT.
* SQLUDTRDB; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* Database name.
* SQLUDTSCHEMA_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
* SQLUDTSCHEMA_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* Schema name. One of the above.
* SQLUDTNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
* SQLUDTNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* Unqualified UDT name. One of the above.
*
* Instead, we use the following format and only for communication between
* Derby servers and Derby clients which are both at version 10.6 or higher.
* For all other client/server combinations, we send null.
*
* SQLUDTNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
* SQLUDTNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
* Fully qualified UDT name. One of the above.
* SQLUDTCLASSNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override FdocaConstants.LONGVARCHAR_MAX_LEN
* SQLUDTCLASSNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override FdocaConstants.LONGVARCHAR_MAX_LEN
* Name of the Java class bound to the UDT. One of the above.
*
* @param rsmeta resultset meta data
* @param pmeta parameter meta data
* @param jdbcElemNum column number we are returning (in case of result set), or,
* parameter number (in case of parameter)
* @param rtnOutput whether this is for a result set
*
* @throws DRDAProtocolException
* @throws SQLException
*/
private void writeSQLUDTGRP(ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int jdbcElemNum, boolean rtnOutput)
throws DRDAProtocolException,SQLException
{
int jdbcType = rtnOutput ?
rsmeta.getColumnType( jdbcElemNum) : pmeta.getParameterType( jdbcElemNum );
if ( !(jdbcType == Types.JAVA_OBJECT) || !appRequester.supportsUDTs() )
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
String typeName = rtnOutput ?
rsmeta.getColumnTypeName( jdbcElemNum ) : pmeta.getParameterTypeName( jdbcElemNum );
String className = rtnOutput ?
rsmeta.getColumnClassName( jdbcElemNum ) : pmeta.getParameterClassName( jdbcElemNum );
writeVCMorVCS( typeName );
writeVCMorVCS( className );
}
private void writeSQLDOPTGRP(ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int jdbcElemNum, boolean rtnOutput)
throws DRDAProtocolException,SQLException
{
writer.writeByte(0);
//SQLUNAMED
writer.writeShort(0);
//SQLName
writeVCMorVCS(rtnOutput ? rsmeta.getColumnName(jdbcElemNum) : null);
//SQLLabel
writeVCMorVCS(null);
//SQLComments
writeVCMorVCS(null);
//SQLDUDTGRP
writeSQLUDTGRP(rsmeta, pmeta, jdbcElemNum, rtnOutput);
//SQLDXGRP
writeSQLDXGRP(rsmeta, pmeta, jdbcElemNum, rtnOutput);
}
private void writeSQLDXGRP(ResultSetMetaData rsmeta,
ParameterMetaData pmeta,
int jdbcElemNum, boolean rtnOutput)
throws DRDAProtocolException,SQLException
{
// Null indicator indicates we have data
writer.writeByte(0);
// SQLXKEYMEM; DRDA TYPE I2; ENVLID 0x04; Length Override 2
// Hard to get primary key info. Send 0 for now
writer.writeShort(0);
// SQLXUPDATEABLE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
writer.writeShort(rtnOutput ? rsmeta.isWritable(jdbcElemNum) : false);
// SQLXGENERATED; DRDA TYPE I2; ENVLID 0x04; Length Override 2
if (rtnOutput && rsmeta.isAutoIncrement(jdbcElemNum))
writer.writeShort(2);
else
writer.writeShort(0);
// SQLXPARMMODE; DRDA TYPE I2; ENVLID 0x04; Length Override 2
if (pmeta != null && !rtnOutput)
{
int mode = pmeta.getParameterMode(jdbcElemNum);
if (mode == JDBC30Translation.PARAMETER_MODE_UNKNOWN)
{
// For old style callable statements. We assume in/out if it
// is an output parameter.
int type = DRDAStatement.getOutputParameterTypeFromClassName(
pmeta.getParameterClassName(jdbcElemNum));
if (type != DRDAStatement.NOT_OUTPUT_PARAM)
mode = JDBC30Translation.PARAMETER_MODE_IN_OUT;
}
writer.writeShort(mode);
}
else
{
writer.writeShort(0);
}
// SQLXRDBNAM; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
// JCC uses this as the catalog name so we will send null.
writer.writeShort(0);
// SQLXCORNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
// SQLXCORNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
writeVCMorVCS(null);
// SQLXBASENAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
// SQLXBASENAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
writeVCMorVCS(rtnOutput ? rsmeta.getTableName(jdbcElemNum) : null);
// SQLXSCHEMA_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
// SQLXSCHEMA_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
writeVCMorVCS(rtnOutput ? rsmeta.getSchemaName(jdbcElemNum): null);
// SQLXNAME_m; DRDA TYPE VCM; ENVLID 0x3E; Length Override 255
// SQLXNAME_s; DRDA TYPE VCS; ENVLID 0x32; Length Override 255
writeVCMorVCS(rtnOutput ? rsmeta.getColumnName(jdbcElemNum): null);
}
/**
* Write Fdoca Value to client
* @param index Index of column being returned
* @param val Value to write to client
* @param drdaType FD:OCA DRDA Type from FdocaConstants
* @param precision Precision
* @param stmt Statement being processed
* @param isParam True when writing a value for a procedure parameter
*
* @exception DRDAProtocolException
*
* @exception SQLException
*
* @see FdocaConstants
*/
protected void writeFdocaVal(int index, Object val, int drdaType,
int precision, int scale, boolean valNull,
DRDAStatement stmt, boolean isParam)
throws DRDAProtocolException, SQLException
{
writeNullability(drdaType,valNull);
if (! valNull)
{
int ndrdaType = drdaType | 1;
long valLength = 0;
switch (ndrdaType)
{
case DRDAConstants.DRDA_TYPE_NBOOLEAN:
writer.writeBoolean( ((Boolean) val).booleanValue() );
break;
case DRDAConstants.DRDA_TYPE_NSMALL:
// DB2 does not have a BOOLEAN java.sql.bit type,
// so we need to send it as a small
if (val instanceof Boolean)
{
writer.writeShort(((Boolean) val).booleanValue());
}
else if (val instanceof Short)
writer.writeShort(((Short) val).shortValue());
else if (val instanceof Byte)
writer.writeShort(((Byte) val).byteValue());
else
writer.writeShort(((Integer) val).shortValue());
break;
case DRDAConstants.DRDA_TYPE_NINTEGER:
writer.writeInt(((Integer) val).intValue());
break;
case DRDAConstants.DRDA_TYPE_NINTEGER8:
writer.writeLong(((Long) val).longValue());
break;
case DRDAConstants.DRDA_TYPE_NFLOAT4:
writer.writeFloat(((Float) val).floatValue());
break;
case DRDAConstants.DRDA_TYPE_NFLOAT8:
writer.writeDouble(((Double) val).doubleValue());
break;
case DRDAConstants.DRDA_TYPE_NDECIMAL:
if (precision == 0)
precision = FdocaConstants.NUMERIC_DEFAULT_PRECISION;
BigDecimal bd = (java.math.BigDecimal) val;
writer.writeBigDecimal(bd,precision,scale);
break;
case DRDAConstants.DRDA_TYPE_NDATE:
writer.writeString(formatDate((java.sql.Date) val));
break;
case DRDAConstants.DRDA_TYPE_NTIME:
writer.writeString(formatTime((Time) val));
break;
case DRDAConstants.DRDA_TYPE_NTIMESTAMP:
writer.writeString(formatTimestamp((Timestamp) val));
break;
case DRDAConstants.DRDA_TYPE_NCHAR:
writer.writeString(((String) val).toString());
break;
case DRDAConstants.DRDA_TYPE_NVARCHAR:
case DRDAConstants.DRDA_TYPE_NVARMIX:
case DRDAConstants.DRDA_TYPE_NLONG:
case DRDAConstants.DRDA_TYPE_NLONGMIX:
//WriteLDString and generate warning if truncated
// which will be picked up by checkWarning()
writer.writeLDString(val.toString(), index, stmt, isParam);
break;
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
// do not send EXTDTA for lob of length 0, beetle 5967
if( ! ((EXTDTAInputStream) val).isEmptyStream() ){
stmt.addExtDtaObject(val, index);
//indicate externalized and size is unknown.
writer.writeExtendedLength(0x8000);
}else{
writer.writeExtendedLength(0);
}
break;
case DRDAConstants.DRDA_TYPE_NFIXBYTE:
writer.writeBytes((byte[]) val);
break;
case DRDAConstants.DRDA_TYPE_NVARBYTE:
case DRDAConstants.DRDA_TYPE_NLONGVARBYTE:
writer.writeLDBytes((byte[]) val, index);
break;
case DRDAConstants.DRDA_TYPE_NLOBLOC:
case DRDAConstants.DRDA_TYPE_NCLOBLOC:
writer.writeInt(((EngineLOB)val).getLocator());
break;
case DRDAConstants.DRDA_TYPE_NUDT:
writer.writeUDT( val, index );
break;
case DRDAConstants.DRDA_TYPE_NROWID:
writer.writeRowId(val, index);
break;
default:
if (SanityManager.DEBUG)
trace("ndrdaType is: "+ndrdaType);
writer.writeLDString(val.toString(), index, stmt, isParam);
}
}
}
/**
* write nullability if this is a nullable drdatype and FDOCA null
* value if appropriate
* @param drdaType FDOCA type
* @param valNull true if this is a null value. False otherwise
*
**/
private void writeNullability(int drdaType, boolean valNull)
{
if(FdocaConstants.isNullable(drdaType))
{
if (valNull)
writer.writeByte(FdocaConstants.NULL_DATA);
else
{
writer.writeByte(FdocaConstants.INDICATOR_NULLABLE);
}
}
}
/**
* Convert a {@code java.sql.Date} to a string with the format expected
* by the client.
*
* @param date the date to format
* @return a string on the format YYYY-MM-DD representing the date
* @see com.splicemachine.db.client.am.DateTime#dateBytesToDate
*/
private String formatDate(java.sql.Date date) {
Calendar cal = getGMTCalendar();
cal.clear();
cal.setTime(date);
char[] buf = "YYYY-MM-DD".toCharArray();
padInt(buf, 0, 4, cal.get(Calendar.YEAR));
padInt(buf, 5, 2, cal.get(Calendar.MONTH) + 1);
padInt(buf, 8, 2, cal.get(Calendar.DAY_OF_MONTH));
return new String(buf);
}
/**
* Convert a {@code java.sql.Time} to a string with the format expected
* by the client.
*
* @param time the time to format
* @return a string on the format HH:MM:SS representing the time
* @see com.splicemachine.db.client.am.DateTime#timeBytesToTime
*/
private String formatTime(Time time) {
Calendar cal = getGMTCalendar();
cal.clear();
cal.setTime(time);
char[] buf = "HH:MM:SS".toCharArray();
padInt(buf, 0, 2, cal.get(Calendar.HOUR_OF_DAY));
padInt(buf, 3, 2, cal.get(Calendar.MINUTE));
padInt(buf, 6, 2, cal.get(Calendar.SECOND));
return new String(buf);
}
/**
* Convert a {@code java.sql.Timestamp} to a string with the format
* expected by the client.
*
* @param ts the timestamp to format
* @return a string on the format YYYY-MM-DD-HH.MM.SS.ffffff[fff]
* @see com.splicemachine.db.client.am.DateTime#timestampBytesToTimestamp
*/
private String formatTimestamp(Timestamp ts) {
Calendar cal = getGMTCalendar();
cal.clear();
cal.setTime(ts);
char[] buf = new char[appRequester.getTimestampLength()];
padInt(buf, 0, 4, cal.get(Calendar.YEAR));
buf[4] = '-';
padInt(buf, 5, 2, cal.get(Calendar.MONTH) + 1);
buf[7] = '-';
padInt(buf, 8, 2, cal.get(Calendar.DAY_OF_MONTH));
buf[10] = '-';
padInt(buf, 11, 2, cal.get(Calendar.HOUR_OF_DAY));
buf[13] = '.';
padInt(buf, 14, 2, cal.get(Calendar.MINUTE));
buf[16] = '.';
padInt(buf, 17, 2, cal.get(Calendar.SECOND));
buf[19] = '.';
int nanos = ts.getNanos();
if (appRequester.supportsTimestampNanoseconds()) {
padInt(buf, 20, 9, nanos);
} else {
padInt(buf, 20, 6, nanos / 1000);
}
return new String(buf);
}
/**
* Insert an integer into a char array and pad it with leading zeros if
* its string representation is shorter than {@code length} characters.
*
* @param buf the char array
* @param offset where in the array to start inserting the value
* @param length the desired length of the inserted string
* @param value the integer value to insert
*/
private void padInt(char[] buf, int offset, int length, int value) {
final int radix = 10;
for (int i = offset + length - 1; i >= offset; i--) {
buf[i] = Character.forDigit(value % radix, radix);
value /= radix;
}
}
/**
* Methods to keep track of required codepoints
*/
/**
* Copy a list of required code points to template for checking
*
* @param req list of required codepoints
*/
private void copyToRequired(int [] req)
{
currentRequiredLength = req.length;
if (currentRequiredLength > required.length)
required = new int[currentRequiredLength];
for (int i = 0; i < req.length; i++)
required[i] = req[i];
}
/**
* Remove codepoint from required list
*
* @param codePoint - code point to be removed
*/
private void removeFromRequired(int codePoint)
{
for (int i = 0; i < currentRequiredLength; i++)
if (required[i] == codePoint)
required[i] = 0;
}
/**
* Check whether we have seen all the required code points
*
* @param codePoint code point for which list of code points is required
*/
private void checkRequired(int codePoint) throws DRDAProtocolException
{
int firstMissing = 0;
for (int i = 0; i < currentRequiredLength; i++)
{
if (required[i] != 0)
{
firstMissing = required[i];
break;
}
}
if (firstMissing != 0)
missingCodePoint(firstMissing);
}
/**
* Error routines
*/
/**
* Seen too many of this code point
*
* @param codePoint code point which has been duplicated
*
* @exception DRDAProtocolException
*/
private void tooMany(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_TOO_MANY, codePoint);
}
/**
* Object too big
*
* @param codePoint code point with too big object
* @exception DRDAProtocolException
*/
private void tooBig(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_TOO_BIG, codePoint);
}
/**
* Invalid non-db client tried to connect.
* thrown a required Value not found error and log a message to db.log
*
* @param prdid product id that does not match DNC
* @throws DRDAProtocolException
*/
private void invalidClient(String prdid) throws DRDAProtocolException {
Monitor.logMessage(new Date()
+ " : "
+ server.localizeMessage("DRDA_InvalidClient.S",
new String[] { prdid }));
requiredValueNotFound(CodePoint.PRDID);
}
/*** Required value not found.
*
* @param codePoint code point with invalid value
*
*/
private void requiredValueNotFound(int codePoint) throws DRDAProtocolException {
throwSyntaxrm(CodePoint.SYNERRCD_REQ_VAL_NOT_FOUND, codePoint);
}
/**
* Object length not allowed
*
* @param codePoint code point with bad object length
* @exception DRDAProtocolException
*/
private void badObjectLength(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_OBJ_LEN_NOT_ALLOWED, codePoint);
}
/**
* RDB not found
*
* @param rdbnam name of database
* @exception DRDAProtocolException
*/
private void rdbNotFound(String rdbnam) throws DRDAProtocolException
{
Object[] oa = {rdbnam};
throw new
DRDAProtocolException(DRDAProtocolException.DRDA_Proto_RDBNFNRM,
this,0,
DRDAProtocolException.NO_ASSOC_ERRCD, oa);
}
/**
* Invalid value for this code point
*
* @param codePoint code point value
* @exception DRDAProtocolException
*/
private void invalidValue(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_REQ_VAL_NOT_FOUND, codePoint);
}
/**
* Invalid codepoint for this command
*
* @param codePoint code point value
*
* @exception DRDAProtocolException
*/
protected void invalidCodePoint(int codePoint) throws DRDAProtocolException
{
throwSyntaxrm(CodePoint.SYNERRCD_INVALID_CP_FOR_CMD, codePoint);
}
/**
* Don't support this code point
*
* @param codePoint code point value
* @exception DRDAProtocolException
*/
protected void codePointNotSupported(int codePoint) throws DRDAProtocolException
{
throw new
DRDAProtocolException(DRDAProtocolException.DRDA_Proto_CMDNSPRM,
this,codePoint,
DRDAProtocolException.NO_ASSOC_ERRCD);
}
/**
* Don't support this value
*
* @param codePoint code point value
* @exception DRDAProtocolException
*/
private void valueNotSupported(int codePoint) throws DRDAProtocolException
{
throw new
DRDAProtocolException(DRDAProtocolException.DRDA_Proto_VALNSPRM,
this,codePoint,
DRDAProtocolException.NO_ASSOC_ERRCD);
}
/**
* Verify that the code point is the required code point
*
* @param codePoint code point we have
* @param reqCodePoint code point required at this time
*
* @exception DRDAProtocolException
*/
private void verifyRequiredObject(int codePoint, int reqCodePoint)
throws DRDAProtocolException
{
if (codePoint != reqCodePoint )
{
throwSyntaxrm(CodePoint.SYNERRCD_REQ_OBJ_NOT_FOUND,codePoint);
}
}
/**
* Verify that the code point is in the right order
*
* @param codePoint code point we have
* @param reqCodePoint code point required at this time
*
* @exception DRDAProtocolException
*/
private void verifyInOrderACCSEC_SECCHK(int codePoint, int reqCodePoint)
throws DRDAProtocolException
{
if (codePoint != reqCodePoint )
{
throw
new DRDAProtocolException(DRDAProtocolException.DRDA_Proto_PRCCNVRM,
this, codePoint,
CodePoint.PRCCNVCD_ACCSEC_SECCHK_WRONG_STATE);
}
}
/**
* Database name given under code point doesn't match previous database names
*
* @param codePoint codepoint where the mismatch occurred
*
* @exception DRDAProtocolException
*/
private void rdbnamMismatch(int codePoint)
throws DRDAProtocolException
{
throw new DRDAProtocolException(DRDAProtocolException.DRDA_Proto_PRCCNVRM,
this, codePoint,
CodePoint.PRCCNVCD_RDBNAM_MISMATCH);
}
/**
* Close the current session
*/
private void closeSession()
{
if (session == null)
return;
/* DERBY-2220: Rollback the current XA transaction if it is
still associated with the connection. */
if (xaProto != null)
xaProto.rollbackCurrentTransaction();
server.removeFromSessionTable(session.connNum);
try {
session.close();
} catch (SQLException se)
{
// If something went wrong closing down the session.
// Print an error to the console and close this
//thread. (6013)
sendUnexpectedException(se);
close();
}
finally {
session = null;
database = null;
appRequester=null;
sockis = null;
sockos=null;
databaseAccessException=null;
}
}
/**
* Handle Exceptions - write error protocol if appropriate and close session
* or thread as appropriate
*/
private void handleException(Exception e)
{
try {
if (e instanceof DRDAProtocolException) {
// protocol error - write error message
sendProtocolException((DRDAProtocolException) e);
} else {
// something unexpected happened
sendUnexpectedException(e);
server.consoleExceptionPrintTrace(e);
}
} finally {
// always close the session and stop the thread after handling
// these exceptions
closeSession();
close();
}
}
/**
* Notice the client about a protocol error.
*
* @param de <code>DRDAProtocolException</code> to be sent
*/
private void sendProtocolException(DRDAProtocolException de) {
String dbname = null;
if (database != null) {
dbname = database.getDatabaseName();
}
try {
println2Log(dbname, session.drdaID, de.getMessage());
server.consoleExceptionPrintTrace(de);
reader.clearBuffer();
de.write(writer);
finalizeChain();
} catch (DRDAProtocolException ioe) {
// There may be an IO exception in the write.
println2Log(dbname, session.drdaID, de.getMessage());
server.consoleExceptionPrintTrace(ioe);
}
}
/**
* Send unpexpected error to the client
* @param e Exception to be sent
*/
private void sendUnexpectedException(Exception e)
{
DRDAProtocolException unExpDe;
String dbname = null;
try {
if (database != null)
dbname = database.getDatabaseName();
println2Log(dbname,session.drdaID, e.getMessage());
server.consoleExceptionPrintTrace(e);
unExpDe = DRDAProtocolException.newAgentError(this,
CodePoint.SVRCOD_PRMDMG,
dbname, e.getMessage());
reader.clearBuffer();
unExpDe.write(writer);
finalizeChain();
}
catch (DRDAProtocolException nde)
{
// we can't tell the client, but we tried.
}
}
/**
* Test if DRDA connection thread is closed
*
* @return true if close; false otherwise
*/
private boolean closed()
{
synchronized (closeSync)
{
return close;
}
}
/**
* Get whether connections are logged
*
* @return true if connections are being logged; false otherwise
*/
private boolean getLogConnections()
{
synchronized(logConnectionsSync) {
return logConnections;
}
}
/**
* Get time slice value for length of time to work on a session
*
* @return time slice
*/
private long getTimeSlice()
{
synchronized(timeSliceSync) {
return timeSlice;
}
}
/**
* Send string to console
*
* @param value - value to print on console
*/
protected void trace(String value)
{
if (SanityManager.DEBUG && server.debugOutput == true)
server.consoleMessage(value, true);
}
/**
* Sends a trace string to the console when reading an EXTDTA value (if
* tracing is enabled).
*
* @param drdaType the DRDA type of the EXTDTA value
* @param index the one-based parameter index
* @param stream the stream being read
* @param streamLOB whether or not the value is being streamed as the last
* parameter value in the DRDA protocol flow
* @param encoding the encoding of the data, if any
*/
private void traceEXTDTARead(int drdaType, int index,
EXTDTAReaderInputStream stream,
boolean streamLOB, String encoding) {
if (SanityManager.DEBUG && server.debugOutput == true) {
StringBuffer sb = new StringBuffer("Reading/setting EXTDTA: ");
// Data: t<type>/i<ob_index>/<streamLOB>/<encoding>/
// <statusByteExpected>/b<byteLength>
sb.append("t").append(drdaType).append("/i").append(index).
append("/").append(streamLOB).
append("/").append(encoding).append("/").
append(stream.readStatusByte). append("/b");
if (stream == null) {
sb.append("NULL");
} else if (stream.isLayerBStream()) {
sb.append("UNKNOWN_LENGTH");
} else {
sb.append(
((StandardEXTDTAReaderInputStream)stream).getLength());
}
trace(sb.toString());
}
}
/***
* Show runtime memory
*
***/
public static void showmem() {
Runtime rt = null;
Date d = null;
rt = Runtime.getRuntime();
rt.gc();
d = new Date();
System.out.println("total memory: "
+ rt.totalMemory()
+ " free: "
+ rt.freeMemory()
+ " " + d.toString());
}
/**
* convert byte array to a Hex string
*
* @param buf buffer to convert
* @return hex string representation of byte array
*/
private String convertToHexString(byte [] buf)
{
StringBuffer str = new StringBuffer();
str.append("0x");
String val;
int byteVal;
for (int i = 0; i < buf.length; i++)
{
byteVal = buf[i] & 0xff;
val = Integer.toHexString(byteVal);
if (val.length() < 2)
str.append("0");
str.append(val);
}
return str.toString();
}
/**
* check that the given typdefnam is acceptable
*
* @param typdefnam
*
* @exception DRDAProtocolException
*/
private void checkValidTypDefNam(String typdefnam)
throws DRDAProtocolException
{
if (typdefnam.equals("QTDSQL370"))
return;
if (typdefnam.equals("QTDSQL400"))
return;
if (typdefnam.equals("QTDSQLX86"))
return;
if (typdefnam.equals("QTDSQLASC"))
return;
if (typdefnam.equals("QTDSQLVAX"))
return;
if (typdefnam.equals("QTDSQLJVM"))
return;
invalidValue(CodePoint.TYPDEFNAM);
}
/**
* Check that the length is equal to the required length for this codepoint
*
* @param codepoint codepoint we are checking
* @param reqlen required length
*
* @exception DRDAProtocolException
*/
private void checkLength(int codepoint, int reqlen)
throws DRDAProtocolException
{
long len = reader.getDdmLength();
if (len < reqlen)
badObjectLength(codepoint);
else if (len > reqlen)
tooBig(codepoint);
}
/**
* Read and check a boolean value
*
* @param codepoint codePoint to be used in error reporting
* @return true or false depending on boolean value read
*
* @exception DRDAProtocolException
*/
private boolean readBoolean(int codepoint) throws DRDAProtocolException
{
checkLength(codepoint, 1);
byte val = reader.readByte();
if (val == CodePoint.TRUE)
return true;
else if (val == CodePoint.FALSE)
return false;
else
invalidValue(codepoint);
return false; //to shut the compiler up
}
/**
* Create a new database and intialize the
* DRDAConnThread database.
*
* @param dbname database name to initialize. If
* dbnam is non null, add database to the current session
*
*/
private void initializeDatabase(String dbname)
{
Database db;
if (appRequester.isXARequester())
{
db = new XADatabase(dbname);
}
else
db = new Database(dbname);
if (dbname != null) {
session.addDatabase(db);
session.database = db;
}
database = db;
}
/**
* Set the current database
*
* @param codePoint codepoint we are processing
*
* @exception DRDAProtocolException
*/
private void setDatabase(int codePoint) throws DRDAProtocolException
{
String rdbnam = parseRDBNAM();
// using same database so we are done
if (database != null && database.getDatabaseName().equals(rdbnam))
return;
Database d = session.getDatabase(rdbnam);
if (d == null)
rdbnamMismatch(codePoint);
else
database = d;
session.database = d;
}
/**
* Write ENDUOWRM
* Instance Variables
* SVCOD - severity code - WARNING - required
* UOWDSP - Unit of Work Disposition - required
* RDBNAM - Relational Database name - optional
* SRVDGN - Server Diagnostics information - optional
*
* @param opType - operation type 1 - commit, 2 -rollback
*/
private void writeENDUOWRM(int opType)
{
writer.createDssReply();
writer.startDdm(CodePoint.ENDUOWRM);
writer.writeScalar2Bytes(CodePoint.SVRCOD, CodePoint.SVRCOD_WARNING);
writer.writeScalar1Byte(CodePoint.UOWDSP, opType);
writer.endDdmAndDss();
}
void writeEXTDTA (DRDAStatement stmt) throws SQLException, DRDAProtocolException
{
ArrayList extdtaValues = stmt.getExtDtaObjects();
// build the EXTDTA data, if necessary
if (extdtaValues == null)
return;
boolean chainFlag, chainedWithSameCorrelator;
boolean writeNullByte = false;
for (int i = 0; i < extdtaValues.size(); i++) {
// is this the last EXTDTA to be built?
if (i != extdtaValues.size() - 1) { // no
chainFlag = true;
chainedWithSameCorrelator = true;
}
else { // yes
chainFlag = false; //last blob DSS stream itself is NOT chained with the NEXT DSS
chainedWithSameCorrelator = false;
}
if (sqlamLevel >= MGRLVL_7)
if (stmt.isExtDtaValueNullable(i))
writeNullByte = true;
Object o = extdtaValues.get(i);
if (o instanceof EXTDTAInputStream) {
EXTDTAInputStream stream = (EXTDTAInputStream) o;
try{
stream.initInputStream();
writer.writeScalarStream (chainedWithSameCorrelator,
CodePoint.EXTDTA,
stream,
writeNullByte);
}finally{
// close the stream when done
closeStream(stream);
}
}
}
// reset extdtaValues after sending
stmt.clearExtDtaObjects();
}
/**
* Check SQLWarning and write SQLCARD as needed.
*
* @param conn connection to check
* @param stmt statement to check
* @param rs result set to check
* @param updateCount update count to include in SQLCARD
* @param alwaysSend whether always send SQLCARD regardless of
* the existance of warnings
* @param sendWarn whether to send any warnings or not.
*
* @exception DRDAProtocolException
*/
private void checkWarning(Connection conn, Statement stmt, ResultSet rs,
int updateCount, boolean alwaysSend, boolean sendWarn)
throws DRDAProtocolException, SQLException
{
// instead of writing a chain of sql warning, we send the first one, this is
// jcc/db2 limitation, see beetle 4629
SQLWarning warning = null;
SQLWarning reportWarning = null;
try
{
if (stmt != null)
{
warning = stmt.getWarnings();
if (warning != null)
{
stmt.clearWarnings();
reportWarning = warning;
}
}
if (rs != null)
{
warning = rs.getWarnings();
if (warning != null)
{
rs.clearWarnings();
if (reportWarning == null)
reportWarning = warning;
}
}
if (conn != null)
{
warning = conn.getWarnings();
if (warning != null)
{
conn.clearWarnings();
if (reportWarning == null)
reportWarning = warning;
}
}
}
catch (SQLException se)
{
if (SanityManager.DEBUG)
trace("got SQLException while trying to get warnings.");
}
if ((alwaysSend || reportWarning != null) && sendWarn)
writeSQLCARDs(reportWarning, updateCount);
}
boolean hasSession() {
return session != null;
}
long getBytesRead() {
return reader.totalByteCount;
}
long getBytesWritten() {
return writer.totalByteCount;
}
protected String buildRuntimeInfo(String indent, LocalizedResource localLangUtil )
{
String s ="";
if (!hasSession())
return s;
else
s += session.buildRuntimeInfo("", localLangUtil);
s += "\n";
return s;
}
/**
* Finalize the current DSS chain and send it if
* needed.
*/
private void finalizeChain() throws DRDAProtocolException {
writer.finalizeChain(reader.getCurrChainState(), getOutputStream());
return;
}
/**
* Validate SECMEC_USRSSBPWD (Strong Password Substitute) can be used as
* DRDA security mechanism.
*
* Here we check that the target server can support SECMEC_USRSSBPWD
* security mechanism based on the environment, application
* requester's identity (PRDID) and connection URL.
*
* IMPORTANT NOTE:
* --------------
* SECMEC_USRSSBPWD is ONLY supported by the target server if:
* - current authentication provider is Derby BUILTIN or
* NONE. (database / system level) (Phase I)
* - database-level password must have been encrypted with the
* SHA-1 based authentication scheme
* - Application requester is 'DNC' (Derby Network Client)
* (Phase I)
*
* @return security check code - 0 if everything O.K.
*/
private int validateSecMecUSRSSBPWD() throws DRDAProtocolException
{
String dbName = null;
AuthenticationService authenticationService = null;
com.splicemachine.db.iapi.db.Database databaseObj = null;
String srvrlslv = appRequester.srvrlslv;
// Check if application requester is the Derby Network Client (DNC)
//
// We use a trick here - as the product ID is not yet available
// since ACCRDB message is only coming later, we check the server
// release level field sent as part of the initial EXCSAT message;
// indeed, the product ID (PRDID) is prefixed to in the field.
// Derby always sets it as part of the EXCSAT message so if it is
// not available, we stop here and inform the requester that
// SECMEC_USRSSBPWD cannot be supported for this connection.
if ((srvrlslv == null) || (srvrlslv.length() == 0) ||
(srvrlslv.length() < CodePoint.PRDID_MAX) ||
(srvrlslv.indexOf(DRDAConstants.DERBY_DRDA_CLIENT_ID)
== -1))
return CodePoint.SECCHKCD_NOTSUPPORTED; // Not Supported
// Client product version is extracted from the srvrlslv field.
// srvrlslv has the format <PRDID>/<ALTERNATE VERSION FORMAT>
// typically, a known Derby client has a four part version number
// with a pattern such as DNC10020/10.2.0.3 alpha. If the alternate
// version format is not specified, clientProductVersion_ will just
// be set to the srvrlslvl. Final fallback will be the product id.
//
// SECMEC_USRSSBPWD is only supported by the Derby engine and network
// server code starting at version major '10' and minor '02'. Hence,
// as this is the same for the db client driver, we need to ensure
// our DNC client is at version and release level of 10.2 at least.
// We set the client version in the application requester and check
// if it is at the level we require at a minimum.
appRequester.setClientVersion(
srvrlslv.substring(0, (int) CodePoint.PRDID_MAX));
if (appRequester.supportsSecMecUSRSSBPWD() == false)
return CodePoint.SECCHKCD_NOTSUPPORTED; // Not Supported
dbName = database.getShortDbName();
// Check if the database is available (booted)
//
// First we need to have the database name available and it should
// have been set as part of the ACCSEC request (in the case of a Derby
// 'DNC' client)
if ((dbName == null) || (dbName.length() == 0))
{
// No database specified in the connection URL attributes
//
// In this case, we get the authentication service handle from the
// local driver, as the requester may simply be trying to shutdown
// the engine.
authenticationService = ((InternalDriver)
NetworkServerControlImpl.getDriver()).getAuthenticationService();
}
else
{
// We get the authentication service from the database as this
// last one might have specified its own auth provider (at the
// database level).
//
// if monitor is never setup by any ModuleControl, getMonitor
// returns null and no Derby database has been booted.
if (Monitor.getMonitor() != null)
databaseObj = (com.splicemachine.db.iapi.db.Database)
Monitor.findService(Property.DATABASE_MODULE, dbName);
if (databaseObj == null)
{
// If database is not found, try connecting to it.
database.makeDummyConnection();
// now try to find it again
databaseObj = (com.splicemachine.db.iapi.db.Database)
Monitor.findService(Property.DATABASE_MODULE, dbName);
}
// If database still could not be found, it means the database
// does not exist - we just return security mechanism not
// supported down below as we could not verify we can handle
// it.
try {
if (databaseObj != null)
authenticationService =
databaseObj.getAuthenticationService();
} catch (StandardException se) {
println2Log(null, session.drdaID, se.getMessage());
// Local security service non-retryable error.
return CodePoint.SECCHKCD_0A;
}
}
// Now we check if the authentication provider is NONE or BUILTIN
if (authenticationService != null)
{
String authClassName = authenticationService.getClass().getName();
if (!authClassName.equals(AUTHENTICATION_PROVIDER_BUILTIN_CLASS) &&
!authClassName.equals(AUTHENTICATION_PROVIDER_NONE_CLASS))
return CodePoint.SECCHKCD_NOTSUPPORTED; // Not Supported
}
// SECMEC_USRSSBPWD target initialization
try {
myTargetSeed = DecryptionManager.generateSeed();
database.secTokenOut = myTargetSeed;
} catch (SQLException se) {
println2Log(null, session.drdaID, se.getMessage());
// Local security service non-retryable error.
return CodePoint.SECCHKCD_0A;
}
return 0; // SECMEC_USRSSBPWD is supported
}
/**
* Close a stream.
*
* @param stream the stream to close (possibly {@code null})
* @throws SQLException wrapped around an {@code IOException} if closing
* the stream failed
*/
private static void closeStream(InputStream stream) throws SQLException {
try {
if (stream != null) {
stream.close();
}
} catch (IOException e) {
throw Util.javaException(e);
}
}
private static InputStream
convertAsByteArrayInputStream( EXTDTAReaderInputStream stream )
throws IOException {
// Suppress the exception that may be thrown when reading the status
// byte here, we want the embedded statement to fail while executing.
stream.setSuppressException(true);
final int byteArrayLength =
stream instanceof StandardEXTDTAReaderInputStream ?
(int) ( ( StandardEXTDTAReaderInputStream ) stream ).getLength() :
1 + stream.available(); // +1 to avoid infinite loop
// TODO: We will run into OOMEs for large values here.
// Could avoid this by saving value temporarily to disk, for
// instance by using the existing LOB code.
PublicBufferOutputStream pbos =
new PublicBufferOutputStream( byteArrayLength );
byte[] buffer = new byte[Math.min(byteArrayLength, 32*1024)];
int c = 0;
while( ( c = stream.read( buffer,
0,
buffer.length ) ) > -1 ) {
pbos.write( buffer, 0, c );
}
// Check if the client driver encountered any errors when reading the
// source on the client side.
if (stream.isStatusSet() &&
stream.getStatus() != DRDAConstants.STREAM_OK) {
// Create a stream that will just fail when accessed.
return new FailingEXTDTAInputStream(stream.getStatus());
} else {
return new ByteArrayInputStream( pbos.getBuffer(),
0,
pbos.getCount() );
}
}
private static class PublicBufferOutputStream extends ByteArrayOutputStream{
PublicBufferOutputStream(int size){
super(size);
}
public byte[] getBuffer(){
return buf;
}
public int getCount(){
return count;
}
}
/**
* Sets the specified character EXTDTA parameter of the embedded statement.
*
* @param stmt the DRDA statement to use
* @param i the one-based index of the parameter
* @param extdtaStream the EXTDTA stream to read data from
* @param streamLOB whether or not the stream content is streamed as the
* last value in the DRDA protocol flow
* @param encoding the encoding of the EXTDTA stream
* @throws IOException if reading from the stream fails
* @throws SQLException if setting the stream fails
*/
private static void setAsCharacterStream(
DRDAStatement stmt,
int i,
EXTDTAReaderInputStream extdtaStream,
boolean streamLOB,
String encoding)
throws IOException, SQLException {
PreparedStatement ps = stmt.getPreparedStatement();
EnginePreparedStatement engnps =
( EnginePreparedStatement ) ps;
// DERBY-3085. Save the stream so it can be drained later
// if not used.
if (streamLOB)
stmt.setStreamedParameter(extdtaStream);
final InputStream is =
streamLOB ?
(InputStream) extdtaStream :
convertAsByteArrayInputStream( extdtaStream );
final InputStreamReader streamReader =
new InputStreamReader( is,
encoding ) ;
engnps.setCharacterStream(i, streamReader);
}
/**
* Sets the specified binary EXTDTA parameter of the embedded statement.
*
* @param stmt the DRDA statement to use
* @param index the one-based index of the parameter
* @param stream the EXTDTA stream to read data from
* @param streamLOB whether or not the stream content is streamed as the
* last value in the DRDA protocol flow
* @throws IOException if reading from the stream fails
* @throws SQLException if setting the stream fails
*/
private static void setAsBinaryStream(DRDAStatement stmt,
int index,
EXTDTAReaderInputStream stream,
boolean streamLOB)
throws IOException, SQLException {
int type = stmt.getParameterMetaData().getParameterType(index);
boolean useSetBinaryStream = (type == Types.BLOB);
PreparedStatement ps = stmt.getPreparedStatement();
if (streamLOB && useSetBinaryStream) {
// Save the streamed parameter so we can drain it if it does not
// get used by embedded when the statement is executed. DERBY-3085
stmt.setStreamedParameter(stream);
if (stream == null) {
ps.setBytes(index, null);
} else if (!stream.isLayerBStream()) {
int length = (int)((StandardEXTDTAReaderInputStream)
stream).getLength();
ps.setBinaryStream(index, stream, length);
} else {
((EnginePreparedStatement)ps).setBinaryStream(index, stream);
}
} else {
if (stream == null) {
ps.setBytes(index, null);
} else {
InputStream bais = convertAsByteArrayInputStream(stream);
ps.setBinaryStream(index, bais, bais.available());
}
}
}
}
| TODO remove subtransactions from batch
Check whether this is correct
| java/drda/com/splicemachine/db/impl/drda/DRDAConnThread.java | TODO remove subtransactions from batch |
|
Java | agpl-3.0 | 86f9fb5eaae6c64e683926e7de746a100052e2e9 | 0 | aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms | /**
* *****************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2012-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <[email protected]>
* http://www.opennms.org/
* http://www.opennms.com/
******************************************************************************
*/
package org.opennms.web.rest.v1;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.opennms.core.config.api.JaxbListWrapper;
import org.opennms.netmgt.config.KSC_PerformanceReportFactory;
import org.opennms.netmgt.config.kscReports.Graph;
import org.opennms.netmgt.config.kscReports.Report;
import org.opennms.web.svclayer.api.KscReportService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
@Component("kscRestService")
@Path("ksc")
public class KscRestService extends OnmsRestService {
private static final Logger LOG = LoggerFactory.getLogger(KscRestService.class);
@Autowired
private KscReportService m_kscReportService;
@Autowired
private KSC_PerformanceReportFactory m_kscReportFactory;
@GET
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON, MediaType.APPLICATION_ATOM_XML})
@Transactional
public KscReportCollection getReports() throws ParseException {
final KscReportCollection reports = new KscReportCollection(m_kscReportService.getReportMap(), true);
reports.setTotalCount(reports.size());
return reports;
}
@GET
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON, MediaType.APPLICATION_ATOM_XML})
@Path("{reportId}")
@Transactional
public KscReport getReport(@PathParam("reportId") final Integer reportId) {
final Map<Integer, Report> reportList = m_kscReportService.getReportMap();
final Report report = reportList.get(reportId);
if (report == null) {
throw getException(Status.NOT_FOUND, "No such report id {}.", Integer.toString(reportId));
}
return new KscReport(report);
}
@GET
@Produces(MediaType.TEXT_PLAIN)
@Path("count")
@Transactional
public String getCount() {
return Integer.toString(m_kscReportService.getReportList().size());
}
@PUT
@Path("{kscReportId}")
@Transactional
public Response addGraph(@PathParam("kscReportId") final Integer kscReportId, @QueryParam("title") final String title, @QueryParam("reportName") final String reportName, @QueryParam("resourceId") final String resourceId, @QueryParam("timespan") String timespan) {
writeLock();
try {
if (kscReportId == null || reportName == null || reportName == "" || resourceId == null || resourceId == "") {
throw getException(Status.BAD_REQUEST, "Invalid request: reportName and resourceId cannot be empty!");
}
final Report report = m_kscReportFactory.getReportByIndex(kscReportId);
if (report == null) {
throw getException(Status.NOT_FOUND, "Invalid request: No KSC report found with ID: {}.", Integer.toString(kscReportId));
}
final Graph graph = new Graph();
if (title != null) {
graph.setTitle(title);
}
boolean found = false;
for (final String valid : KSC_PerformanceReportFactory.TIMESPAN_OPTIONS) {
if (valid.equals(timespan)) {
found = true;
break;
}
}
if (!found) {
LOG.debug("invalid timespan ('{}'), setting to '7_day' instead.", timespan);
timespan = "7_day";
}
graph.setGraphtype(reportName);
graph.setResourceId(resourceId);
graph.setTimespan(timespan);
report.addGraph(graph);
m_kscReportFactory.setReport(kscReportId, report);
try {
m_kscReportFactory.saveCurrent();
} catch (final Exception e) {
throw getException(Status.INTERNAL_SERVER_ERROR, "Cannot save report with Id {} : {} ", kscReportId.toString(), e.getMessage());
}
return Response.noContent().build();
} finally {
writeUnlock();
}
}
@POST
@Consumes(MediaType.APPLICATION_XML)
public Response addKscReport(@Context final UriInfo uriInfo, final KscReport kscReport) {
writeLock();
try {
LOG.debug("addKscReport: Adding KSC Report {}", kscReport);
Report report = m_kscReportFactory.getReportByIndex(kscReport.getId());
if (report != null) {
throw getException(Status.CONFLICT, "Invalid request: Existing KSC report found with ID: {}.", Integer.toString(kscReport.getId()));
}
report = new Report();
report.setId(kscReport.getId());
report.setTitle(kscReport.getLabel());
if (kscReport.getShowGraphtypeButton() != null) {
report.setShow_graphtype_button(kscReport.getShowGraphtypeButton());
}
if (kscReport.getShowTimespanButton() != null) {
report.setShow_timespan_button(kscReport.getShowTimespanButton());
}
if (kscReport.getGraphsPerLine() != null) {
report.setGraphs_per_line(kscReport.getGraphsPerLine());
}
if (kscReport.hasGraphs()) {
for (KscGraph kscGraph : kscReport.getGraphs()) {
final Graph graph = kscGraph.buildGraph();
report.addGraph(graph);
}
}
m_kscReportFactory.addReport(report);
try {
m_kscReportFactory.saveCurrent();
} catch (final Exception e) {
throw getException(Status.BAD_REQUEST, e.getMessage());
}
return Response.created(getRedirectUri(uriInfo, kscReport.getId())).build();
} finally {
writeUnlock();
}
}
@XmlRootElement(name = "kscReports")
public static final class KscReportCollection extends JaxbListWrapper<KscReport> {
private static final long serialVersionUID = 1L;
public KscReportCollection() {
super();
}
public KscReportCollection(Collection<? extends KscReport> reports) {
super(reports);
}
public KscReportCollection(final Map<Integer, Report> reportList, boolean terse) {
super();
for (final Report report : reportList.values()) {
if (terse) {
add(new KscReport(report.getId(), report.getTitle()));
} else {
add(new KscReport(report));
}
}
}
@XmlElement(name = "kscReport")
public List<KscReport> getObjects() {
return super.getObjects();
}
}
@XmlRootElement(name = "kscReport")
@XmlAccessorType(XmlAccessType.NONE)
public static final class KscReport {
@XmlAttribute(name = "id", required = true)
private Integer m_id;
@XmlAttribute(name = "label", required = true)
private String m_label;
@XmlAttribute(name = "show_timespan_button", required = false)
private Boolean m_show_timespan_button;
@XmlAttribute(name = "show_graphtype_button", required = false)
private Boolean m_show_graphtype_button;
@XmlAttribute(name = "graphs_per_line", required = false)
private Integer m_graphs_per_line;
@XmlElement(name = "kscGraph")
private List<KscGraph> m_graphs = new ArrayList<KscGraph>();
public KscReport() {
}
public KscReport(final Integer reportId, final String label) {
m_id = reportId;
m_label = label;
}
public KscReport(Report report) {
m_id = report.getId();
m_label = report.getTitle();
m_show_timespan_button = report.getShow_timespan_button();
m_show_graphtype_button = report.getShow_graphtype_button();
m_graphs_per_line = report.getGraphs_per_line();
m_graphs.clear();
for(Graph graph : report.getGraphCollection()) {
m_graphs.add(new KscGraph(graph));
}
}
public Integer getId() {
return m_id;
}
public void setId(final Integer id) {
m_id = id;
}
public String getLabel() {
return m_label;
}
public void setLabel(final String label) {
m_label = label;
}
public Boolean getShowTimespanButton() {
return m_show_timespan_button;
}
public void setShowTimespanButton(final Boolean show) {
m_show_timespan_button = show;
}
public Boolean getShowGraphtypeButton() {
return m_show_graphtype_button;
}
public void setShowGraphtypeButton(final Boolean show) {
m_show_graphtype_button = show;
}
public Integer getGraphsPerLine() {
return m_graphs_per_line;
}
public void setGraphsPerLine(final Integer graphs) {
m_graphs_per_line = graphs;
}
public boolean hasGraphs() {
return !m_graphs.isEmpty();
}
public List<KscGraph> getGraphs() {
return m_graphs;
}
}
@XmlRootElement(name = "kscGraph")
@XmlAccessorType(XmlAccessType.NONE)
public static final class KscGraph {
@XmlAttribute(name = "title", required = true)
private String m_title;
@XmlAttribute(name = "timespan", required = true)
private String m_timespan;
@XmlAttribute(name = "graphtype", required = true)
private String m_graphtype;
@XmlAttribute(name = "resourceId", required = false)
private String m_resourceId;
@XmlAttribute(name = "nodeId", required = false)
private String m_nodeId;
@XmlAttribute(name = "nodeSource", required = false)
private String m_nodeSource;
@XmlAttribute(name = "domain", required = false)
private String m_domain;
@XmlAttribute(name = "interfaceId", required = false)
private String m_interfaceId;
@XmlAttribute(name = "extlink", required = false)
private String m_extlink;
public KscGraph() {
}
public KscGraph(Graph graph) {
m_title = graph.getTitle();
m_timespan = graph.getTimespan();
m_graphtype = graph.getGraphtype();
m_resourceId = graph.getResourceId();
m_nodeId = graph.getNodeId();
m_nodeSource = graph.getNodeSource();
m_domain = graph.getDomain();
m_interfaceId = graph.getInterfaceId();
m_extlink = graph.getExtlink();
}
public Graph buildGraph() {
boolean found = false;
for (final String valid : KSC_PerformanceReportFactory.TIMESPAN_OPTIONS) {
if (valid.equals(m_timespan)) {
found = true;
break;
}
}
if (!found) {
LOG.debug("invalid timespan ('{}'), setting to '7_day' instead.", m_timespan);
m_timespan = "7_day";
}
final Graph graph = new Graph();
graph.setTitle(m_title);
graph.setTimespan(m_timespan);
graph.setGraphtype(m_graphtype);
graph.setResourceId(m_resourceId);
graph.setNodeId(m_nodeId);
graph.setNodeSource(m_nodeSource);
graph.setDomain(m_domain);
graph.setInterfaceId(m_interfaceId);
graph.setExtlink(m_extlink);
return graph;
}
}
}
| opennms-webapp-rest/src/main/java/org/opennms/web/rest/v1/KscRestService.java | /**
* *****************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2012-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <[email protected]>
* http://www.opennms.org/
* http://www.opennms.com/
******************************************************************************
*/
package org.opennms.web.rest.v1;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import javax.persistence.Entity;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.annotate.JsonRootName;
import org.opennms.core.config.api.JaxbListWrapper;
import org.opennms.netmgt.config.KSC_PerformanceReportFactory;
import org.opennms.netmgt.config.kscReports.Graph;
import org.opennms.netmgt.config.kscReports.Report;
import org.opennms.web.svclayer.api.KscReportService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
@Component("kscRestService")
@Path("ksc")
public class KscRestService extends OnmsRestService {
private static final Logger LOG = LoggerFactory.getLogger(KscRestService.class);
@Autowired
private KscReportService m_kscReportService;
@Autowired
private KSC_PerformanceReportFactory m_kscReportFactory;
@GET
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON, MediaType.APPLICATION_ATOM_XML})
@Transactional
public KscReportCollection getReports() throws ParseException {
final KscReportCollection reports = new KscReportCollection(m_kscReportService.getReportMap(), true);
reports.setTotalCount(reports.size());
return reports;
}
@GET
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON, MediaType.APPLICATION_ATOM_XML})
@Path("{reportId}")
@Transactional
public KscReport getReport(@PathParam("reportId") final Integer reportId) {
final Map<Integer, Report> reportList = m_kscReportService.getReportMap();
final Report report = reportList.get(reportId);
if (report == null) {
throw getException(Status.NOT_FOUND, "No such report id {}.", Integer.toString(reportId));
}
return new KscReport(report);
}
@GET
@Produces(MediaType.TEXT_PLAIN)
@Path("count")
@Transactional
public String getCount() {
return Integer.toString(m_kscReportService.getReportList().size());
}
@PUT
@Path("{kscReportId}")
@Transactional
public Response addGraph(@PathParam("kscReportId") final Integer kscReportId, @QueryParam("title") final String title, @QueryParam("reportName") final String reportName, @QueryParam("resourceId") final String resourceId, @QueryParam("timespan") String timespan) {
writeLock();
try {
if (kscReportId == null || reportName == null || reportName == "" || resourceId == null || resourceId == "") {
throw getException(Status.BAD_REQUEST, "Invalid request: reportName and resourceId cannot be empty!");
}
final Report report = m_kscReportFactory.getReportByIndex(kscReportId);
if (report == null) {
throw getException(Status.NOT_FOUND, "Invalid request: No KSC report found with ID: {}.", Integer.toString(kscReportId));
}
final Graph graph = new Graph();
if (title != null) {
graph.setTitle(title);
}
boolean found = false;
for (final String valid : KSC_PerformanceReportFactory.TIMESPAN_OPTIONS) {
if (valid.equals(timespan)) {
found = true;
break;
}
}
if (!found) {
LOG.debug("invalid timespan ('{}'), setting to '7_day' instead.", timespan);
timespan = "7_day";
}
graph.setGraphtype(reportName);
graph.setResourceId(resourceId);
graph.setTimespan(timespan);
report.addGraph(graph);
m_kscReportFactory.setReport(kscReportId, report);
try {
m_kscReportFactory.saveCurrent();
} catch (final Exception e) {
throw getException(Status.INTERNAL_SERVER_ERROR, "Cannot save report with Id {} : {} ", kscReportId.toString(), e.getMessage());
}
return Response.noContent().build();
} finally {
writeUnlock();
}
}
@POST
@Consumes(MediaType.APPLICATION_XML)
public Response addKscReport(@Context final UriInfo uriInfo, final KscReport kscReport) {
writeLock();
try {
LOG.debug("addKscReport: Adding KSC Report {}", kscReport);
Report report = m_kscReportFactory.getReportByIndex(kscReport.getId());
if (report != null) {
throw getException(Status.CONFLICT, "Invalid request: Existing KSC report found with ID: {}.", Integer.toString(kscReport.getId()));
}
report = new Report();
report.setId(kscReport.getId());
report.setTitle(kscReport.getLabel());
if (kscReport.getShowGraphtypeButton() != null) {
report.setShow_graphtype_button(kscReport.getShowGraphtypeButton());
}
if (kscReport.getShowTimespanButton() != null) {
report.setShow_timespan_button(kscReport.getShowTimespanButton());
}
if (kscReport.getGraphsPerLine() != null) {
report.setGraphs_per_line(kscReport.getGraphsPerLine());
}
if (kscReport.hasGraphs()) {
for (KscGraph kscGraph : kscReport.getGraphs()) {
final Graph graph = kscGraph.buildGraph();
report.addGraph(graph);
}
}
m_kscReportFactory.addReport(report);
try {
m_kscReportFactory.saveCurrent();
} catch (final Exception e) {
throw getException(Status.BAD_REQUEST, e.getMessage());
}
return Response.created(getRedirectUri(uriInfo, kscReport.getId())).build();
} finally {
writeUnlock();
}
}
@Entity
@XmlRootElement(name = "kscReports")
@JsonRootName("kscReports")
public static final class KscReportCollection extends JaxbListWrapper<KscReport> {
private static final long serialVersionUID = 1L;
public KscReportCollection() {
super();
}
public KscReportCollection(Collection<? extends KscReport> reports) {
super(reports);
}
public KscReportCollection(final Map<Integer, Report> reportList, boolean terse) {
super();
for (final Report report : reportList.values()) {
if (terse) {
add(new KscReport(report.getId(), report.getTitle()));
} else {
add(new KscReport(report));
}
}
}
@XmlElement(name = "kscReport")
@JsonProperty("kscReport")
public List<KscReport> getObjects() {
return super.getObjects();
}
}
@Entity
@XmlRootElement(name = "kscReport")
@XmlAccessorType(XmlAccessType.NONE)
public static final class KscReport {
@XmlAttribute(name = "id", required = true)
private Integer m_id;
@XmlAttribute(name = "label", required = true)
private String m_label;
@XmlAttribute(name = "show_timespan_button", required = false)
private Boolean m_show_timespan_button;
@XmlAttribute(name = "show_graphtype_button", required = false)
private Boolean m_show_graphtype_button;
@XmlAttribute(name = "graphs_per_line", required = false)
private Integer m_graphs_per_line;
@XmlElements(@XmlElement(name = "kscGraph"))
private List<KscGraph> m_graphs = new ArrayList<KscGraph>();
public KscReport() {
}
public KscReport(final Integer reportId, final String label) {
m_id = reportId;
m_label = label;
}
public KscReport(Report report) {
m_id = report.getId();
m_label = report.getTitle();
m_show_timespan_button = report.getShow_timespan_button();
m_show_graphtype_button = report.getShow_graphtype_button();
m_graphs_per_line = report.getGraphs_per_line();
m_graphs.clear();
for(Graph graph : report.getGraphCollection()) {
m_graphs.add(new KscGraph(graph));
}
}
public Integer getId() {
return m_id;
}
public void setId(final Integer id) {
m_id = id;
}
public String getLabel() {
return m_label;
}
public void setLabel(final String label) {
m_label = label;
}
public Boolean getShowTimespanButton() {
return m_show_timespan_button;
}
public void setShowTimespanButton(final Boolean show) {
m_show_timespan_button = show;
}
public Boolean getShowGraphtypeButton() {
return m_show_graphtype_button;
}
public void setShowGraphtypeButton(final Boolean show) {
m_show_graphtype_button = show;
}
public Integer getGraphsPerLine() {
return m_graphs_per_line;
}
public void setGraphsPerLine(final Integer graphs) {
m_graphs_per_line = graphs;
}
public boolean hasGraphs() {
return !m_graphs.isEmpty();
}
public List<KscGraph> getGraphs() {
return m_graphs;
}
}
@Entity
@XmlRootElement(name = "kscGraph")
@XmlAccessorType(XmlAccessType.NONE)
public static final class KscGraph {
@XmlAttribute(name = "title", required = true)
private String m_title;
@XmlAttribute(name = "timespan", required = true)
private String m_timespan;
@XmlAttribute(name = "graphtype", required = true)
private String m_graphtype;
@XmlAttribute(name = "resourceId", required = false)
private String m_resourceId;
@XmlAttribute(name = "nodeId", required = false)
private String m_nodeId;
@XmlAttribute(name = "nodeSource", required = false)
private String m_nodeSource;
@XmlAttribute(name = "domain", required = false)
private String m_domain;
@XmlAttribute(name = "interfaceId", required = false)
private String m_interfaceId;
@XmlAttribute(name = "extlink", required = false)
private String m_extlink;
public KscGraph() {
}
public KscGraph(Graph graph) {
m_title = graph.getTitle();
m_timespan = graph.getTimespan();
m_graphtype = graph.getGraphtype();
m_resourceId = graph.getResourceId();
m_nodeId = graph.getNodeId();
m_nodeSource = graph.getNodeSource();
m_domain = graph.getDomain();
m_interfaceId = graph.getInterfaceId();
m_extlink = graph.getExtlink();
}
public Graph buildGraph() {
boolean found = false;
for (final String valid : KSC_PerformanceReportFactory.TIMESPAN_OPTIONS) {
if (valid.equals(m_timespan)) {
found = true;
break;
}
}
if (!found) {
LOG.debug("invalid timespan ('{}'), setting to '7_day' instead.", m_timespan);
m_timespan = "7_day";
}
final Graph graph = new Graph();
graph.setTitle(m_title);
graph.setTimespan(m_timespan);
graph.setGraphtype(m_graphtype);
graph.setResourceId(m_resourceId);
graph.setNodeId(m_nodeId);
graph.setNodeSource(m_nodeSource);
graph.setDomain(m_domain);
graph.setInterfaceId(m_interfaceId);
graph.setExtlink(m_extlink);
return graph;
}
}
}
| Fix for Bug NMS-8648
Normalize the KSC ReST end point to show consistent content in XML an JSON
| opennms-webapp-rest/src/main/java/org/opennms/web/rest/v1/KscRestService.java | Fix for Bug NMS-8648 |
|
Java | lgpl-2.1 | 944b2fa5b29dd211db99fe10c8a95a67f63d49c7 | 0 | CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine | /*
* jETeL/CloverETL - Java based ETL application framework.
* Copyright (c) Javlin, a.s. ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jetel.graph.runtime;
import java.lang.management.ManagementFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.MDC;
import org.jetel.exception.ComponentNotReadyException;
import org.jetel.graph.ContextProvider;
import org.jetel.graph.GraphElement;
import org.jetel.graph.IGraphElement;
import org.jetel.graph.JobType;
import org.jetel.graph.Node;
import org.jetel.graph.Phase;
import org.jetel.graph.Result;
import org.jetel.graph.TransformationGraph;
import org.jetel.graph.runtime.jmx.CloverJMX;
import org.jetel.graph.runtime.tracker.TokenTracker;
import org.jetel.util.primitive.MultiValueMap;
import org.jetel.util.string.StringUtils;
/**
* Description of the Class
*
* @author dpavlis
* @since July 29, 2002
* @revision $Revision$
*/
public class WatchDog implements Callable<Result>, CloverPost {
/**
* This lock object guards currentPhase variable and watchDogStatus.
*/
private final Lock CURRENT_PHASE_LOCK = new ReentrantLock();
private final Object ABORT_MONITOR = new Object();
private boolean abortFinished = false;
public final static String MBEAN_NAME_PREFIX = "CLOVERJMX_";
public final static long WAITTIME_FOR_STOP_SIGNAL = 5000; //miliseconds
private static final long ABORT_TIMEOUT = 5000L;
private static final long ABORT_WAIT = 2400L;
public static final String WATCHDOG_THREAD_NAME_PREFIX = "WatchDog_";
private int[] _MSG_LOCK=new int[0];
private static Log logger = LogFactory.getLog(WatchDog.class);
/**
* Thread manager is used to run nodes as threads.
*/
private IThreadManager threadManager;
private volatile Result watchDogStatus;
private TransformationGraph graph;
private Phase currentPhase;
private BlockingQueue <Message<?>> inMsgQueue;
private MultiValueMap<IGraphElement, Message<?>> outMsgMap;
private volatile Throwable causeException;
private volatile IGraphElement causeGraphElement;
private CloverJMX cloverJMX;
// private volatile boolean runIt;
private boolean provideJMX = true;
private boolean finishJMX = true; //whether the JMX mbean should be unregistered on the graph finish
private final GraphRuntimeContext runtimeContext;
static private MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
private ObjectName jmxObjectName;
private TokenTracker tokenTracker;
/**
*Constructor for the WatchDog object
*
* @param graph Description of the Parameter
* @param phases Description of the Parameter
* @since September 02, 2003
*/
public WatchDog(TransformationGraph graph, GraphRuntimeContext runtimeContext) {
graph.setWatchDog(this);
this.graph = graph;
this.runtimeContext = runtimeContext;
currentPhase = null;
watchDogStatus = Result.N_A;
inMsgQueue = new LinkedBlockingQueue<Message<?>>();
outMsgMap = new MultiValueMap<IGraphElement, Message<?>>(Collections.synchronizedMap(new HashMap<IGraphElement, List<Message<?>>>()));
//is JMX turned on?
provideJMX = runtimeContext.useJMX();
//passes a password from context to the running graph
graph.setPassword(runtimeContext.getPassword());
}
/**
* WatchDog initialization.
*/
public void init() {
//at least simple thread manager will be used
if(threadManager == null) {
threadManager = new SimpleThreadManager();
}
//create token tracker if graph is jobflow type
if (graph.getJobType() == JobType.JOBFLOW) {
tokenTracker = new TokenTracker(graph);
}
//start up JMX
cloverJMX = new CloverJMX(this);
if(provideJMX) {
registerTrackingMBean(cloverJMX);
}
//watchdog is now ready to use
watchDogStatus = Result.READY;
}
private void finishJMX() {
if(provideJMX) {
try {
mbs.unregisterMBean(jmxObjectName);
} catch (Exception e) {
logger.error("JMX error - ObjectName cannot be unregistered.", e);
}
}
}
/** Main processing method for the WatchDog object */
@Override
public Result call() {
CURRENT_PHASE_LOCK.lock();
String originalThreadName = null;
try {
//thread context classloader is preset to a reasonable classloader
//this is just for sure, threads are recycled and no body can guarantee which context classloader remains preset
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
//we have to register current watchdog's thread to context provider - from now all
//ContextProvider.getGraph() invocations return proper transformation graph
ContextProvider.registerGraph(graph);
MDC.put("runId", runtimeContext.getRunId());
Thread t = Thread.currentThread();
originalThreadName = t.getName();
String newThreadName = WATCHDOG_THREAD_NAME_PREFIX + runtimeContext.getRunId();
if (logger.isTraceEnabled())
logger.trace("rename thread " + originalThreadName + " to " + newThreadName);
t.setName(newThreadName);
long startTimestamp = System.currentTimeMillis();
//print graph properties
graph.getGraphProperties().print(logger, "Graph parameters:");
//print out runtime context
logger.debug("Graph runtime context: " + graph.getRuntimeContext().getAllProperties());
//print initial dictionary content
graph.getDictionary().printContent(logger, "Initial dictionary content:");
if (runtimeContext.isVerboseMode()) {
// this can be called only after graph.init()
graph.dumpGraphConfiguration();
}
watchDogStatus = Result.RUNNING;
//creates tracking logger for cloverJMX mbean
TrackingLogger.track(cloverJMX);
cloverJMX.graphStarted();
//pre-execute initialization of graph
try {
graph.preExecute();
} catch (Exception e) {
causeException = e;
if (e instanceof ComponentNotReadyException) {
causeGraphElement = ((ComponentNotReadyException) e).getGraphElement();
}
watchDogStatus = Result.ERROR;
logger.error("Graph pre-execute initialization failed.", e);
}
//run all phases
if (watchDogStatus == Result.RUNNING) {
Phase[] phases = graph.getPhases();
Result phaseResult = Result.N_A;
for (int currentPhaseNum = 0; currentPhaseNum < phases.length; currentPhaseNum++) {
//if the graph runs in synchronized mode we need to wait for synchronization event to process next phase
if (runtimeContext.isSynchronizedRun()) {
logger.info("Waiting for phase " + phases[currentPhaseNum] + " approval...");
watchDogStatus = Result.WAITING;
CURRENT_PHASE_LOCK.unlock();
synchronized (cloverJMX) {
while (cloverJMX.getApprovedPhaseNumber() < phases[currentPhaseNum].getPhaseNum()
&& watchDogStatus == Result.WAITING) { //graph was maybe aborted
try {
cloverJMX.wait();
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for phase synchronization event.");
}
}
}
CURRENT_PHASE_LOCK.lock();
//watchdog was aborted while was waiting for next phase approval
if (watchDogStatus == Result.ABORTED) {
logger.warn("!!! Graph execution aborted !!!");
break;
} else {
watchDogStatus = Result.RUNNING;
}
}
cloverJMX.phaseStarted(phases[currentPhaseNum]);
//execute phase
phaseResult = executePhase(phases[currentPhaseNum]);
if(phaseResult == Result.ABORTED) {
cloverJMX.phaseAborted();
logger.warn("!!! Phase execution aborted !!!");
break;
} else if(phaseResult == Result.ERROR) {
cloverJMX.phaseError(getErrorMessage());
logger.error("!!! Phase finished with error - stopping graph run !!!");
break;
}
cloverJMX.phaseFinished();
}
//post-execution of graph
try {
graph.postExecute();
} catch (Exception e) {
causeException = e;
if (e instanceof ComponentNotReadyException) {
causeGraphElement = ((ComponentNotReadyException) e).getGraphElement();
}
watchDogStatus = Result.ERROR;
logger.error("Graph post-execute method failed.", e);
}
//aborted graph does not follow last phase status
if (watchDogStatus == Result.RUNNING) {
watchDogStatus = phaseResult;
}
}
//commit or rollback
if (watchDogStatus == Result.FINISHED_OK) {
try {
graph.commit();
} catch (Exception e) {
causeException = e;
watchDogStatus = Result.ERROR;
logger.fatal("Graph commit failed:" + e.getMessage(), e);
}
} else {
try {
graph.rollback();
} catch (Exception e) {
causeException = e;
watchDogStatus = Result.ERROR;
logger.fatal("Graph rollback failed:" + e.getMessage(), e);
}
}
//print initial dictionary content
graph.getDictionary().printContent(logger, "Final dictionary content:");
sendFinalJmxNotification();
if(finishJMX) {
finishJMX();
}
logger.info("WatchDog thread finished - total execution time: " + (System.currentTimeMillis() - startTimestamp) / 1000 + " (sec)");
} catch (RuntimeException e) {
causeException = e;
causeGraphElement = null;
watchDogStatus = Result.ERROR;
logger.error("Fatal error watchdog execution", e);
throw e;
} finally {
//we have to unregister current watchdog's thread from context provider
ContextProvider.unregister();
CURRENT_PHASE_LOCK.unlock();
if (originalThreadName != null)
Thread.currentThread().setName(originalThreadName);
MDC.remove("runId");
}
return watchDogStatus;
}
private void sendFinalJmxNotification() {
sendFinalJmxNotification0();
//is there anyone who is really interested in to be informed about the graph is really finished? - at least our clover designer runs graphs with this option
if (runtimeContext.isWaitForJMXClient()) {
//wait for a JMX client (GUI) to download all tracking information
long startWaitingTime = System.currentTimeMillis();
synchronized (cloverJMX) {
while (WAITTIME_FOR_STOP_SIGNAL > (System.currentTimeMillis() - startWaitingTime)
&& !cloverJMX.canCloseServer()) {
try {
cloverJMX.wait(10);
sendFinalJmxNotification0();
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for close signal.");
}
}
if (!cloverJMX.canCloseServer()) {
// give client one last chance to react to final notification and to send close signal before cloverJMX is unregistering
try {
cloverJMX.wait(100);
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for close signal.");
}
if (!cloverJMX.canCloseServer()) {
logger.debug("JMX server close signal timeout; client may have missed final notification");
}
}
}
}
//if the graph was aborted, now the aborting thread is waiting for final notification - this is the way how to send him word about the graph finished right now
synchronized (ABORT_MONITOR) {
abortFinished = true;
ABORT_MONITOR.notifyAll();
}
}
private void sendFinalJmxNotification0() {
switch (watchDogStatus) {
case FINISHED_OK:
cloverJMX.graphFinished();
break;
case ABORTED:
cloverJMX.graphAborted();
break;
case ERROR:
cloverJMX.graphError(getErrorMessage());
break;
default:
break;
}
}
/**
* Register given jmx mbean.
*/
private void registerTrackingMBean(CloverJMX cloverJMX) {
String mbeanId = graph.getId();
// Construct the ObjectName for the MBean we will register
try {
String name = createMBeanName(mbeanId != null ? mbeanId : graph.getName(), this.getGraphRuntimeContext().getRunId());
jmxObjectName = new ObjectName( name );
logger.debug("register MBean with name:"+name);
// Register the MBean
mbs.registerMBean(cloverJMX, jmxObjectName);
} catch (MalformedObjectNameException e) {
logger.error(e);
} catch (InstanceAlreadyExistsException e) {
logger.error(e);
} catch (MBeanRegistrationException e) {
logger.error(e);
} catch (NotCompliantMBeanException e) {
logger.error(e);
}
}
/**
* Creates identifier for shared JMX mbean.
* @param defaultMBeanName
* @return
*/
public static String createMBeanName(String mbeanIdentifier) {
return createMBeanName(mbeanIdentifier, 0);
}
/**
* Creates identifier for shared JMX mbean.
* @param mbeanIdentifier
* @param runId
* @return
*/
public static String createMBeanName(String mbeanIdentifier, long runId) {
return "org.jetel.graph.runtime:type=" + MBEAN_NAME_PREFIX + (mbeanIdentifier != null ? mbeanIdentifier : "") + "_" + runId;
}
/**
* Execute transformation - start-up all Nodes & watch them running
*
* @param phase Description of the Parameter
* @param leafNodes Description of the Parameter
* @return Description of the Return Value
* @since July 29, 2002
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings("UL")
private Result watch(Phase phase) throws InterruptedException {
Message<?> message;
Set<Node> phaseNodes;
// let's create a copy of leaf nodes - we will watch them
phaseNodes = new HashSet<Node>(phase.getNodes().values());
// is there any node running ? - this test is necessary for phases without nodes - empty phase
if (phaseNodes.isEmpty()) {
return watchDogStatus != Result.ABORTED ? Result.FINISHED_OK : Result.ABORTED;
}
// entering the loop awaiting completion of work by all leaf nodes
while (true) {
// wait on error message queue
CURRENT_PHASE_LOCK.unlock();
try {
message = inMsgQueue.poll(runtimeContext.getTrackingInterval(), TimeUnit.MILLISECONDS);
} finally {
CURRENT_PHASE_LOCK.lock();
}
if (message != null) {
switch(message.getType()){
case ERROR:
causeException = ((ErrorMsgBody) message.getBody()).getSourceException();
causeGraphElement = message.getSender();
logger.error("Graph execution finished with error");
logger.error("Node "
+ message.getSender().getId()
+ " finished with status: "
+ ((ErrorMsgBody) message.getBody())
.getErrorMessage() + (causeException != null ? " caused by: " + causeException.getMessage() : ""));
logger.error("Node " + message.getSender().getId() + " error details:", causeException);
return Result.ERROR;
case MESSAGE:
synchronized (_MSG_LOCK) {
if (message.getRecipient() != null) {
outMsgMap.putValue(message.getRecipient(), message);
}
}
break;
case NODE_FINISHED:
phaseNodes.remove(message.getSender());
break;
default:
// do nothing, just wake up
}
}
// is there any node running ?
if (phaseNodes.isEmpty()) {
return watchDogStatus != Result.ABORTED ? Result.FINISHED_OK : Result.ABORTED;
}
// gather graph tracking
//etl graphs are tracked only in regular intervals, jobflows are tracked more precise, whenever something happens
if (message == null || ContextProvider.getJobType() == JobType.JOBFLOW) {
cloverJMX.gatherTrackingDetails();
}
}
}
/**
* Gets the Status of the WatchDog
*
* @return Result of WatchDog run-time
* @since July 30, 2002
* @see org.jetel.graph.Result
*/
public Result getStatus() {
return watchDogStatus;
}
/**
* aborts execution of current phase
*
* @since July 29, 2002
*/
public void abort() {
CURRENT_PHASE_LOCK.lock();
//only running or waiting graph can be aborted
if (watchDogStatus != Result.RUNNING && watchDogStatus != Result.WAITING) {
//if the graph status is not final, so the graph was aborted
if (!watchDogStatus.isStop()) {
watchDogStatus = Result.ABORTED;
}
CURRENT_PHASE_LOCK.unlock();
return;
}
try {
//if the phase is running broadcast all nodes in the phase they should be aborted
if (watchDogStatus == Result.RUNNING) {
watchDogStatus = Result.ABORTED;
// iterate through all the nodes and stop them
for (Node node : currentPhase.getNodes().values()) {
node.abort();
logger.warn("Interrupted node: " + node.getId());
}
}
//if the graph is waiting on a phase synchronization point the watchdog is woken up with current status ABORTED
if (watchDogStatus == Result.WAITING) {
watchDogStatus = Result.ABORTED;
synchronized (cloverJMX) {
cloverJMX.notifyAll();
}
}
} finally {
synchronized (ABORT_MONITOR) {
CURRENT_PHASE_LOCK.unlock();
long startAbort = System.currentTimeMillis();
while (!abortFinished) {
long interval = System.currentTimeMillis() - startAbort;
if (interval > ABORT_TIMEOUT) {
throw new IllegalStateException("Graph aborting error! Timeout "+ABORT_TIMEOUT+"ms exceeded!");
}
try {
//the aborting thread try to wait for end of graph run
ABORT_MONITOR.wait(ABORT_WAIT);
} catch (InterruptedException ignore) { }// catch
}// while
}// synchronized
}// finally
}
/**
* Description of the Method
*
* @param nodesIterator Description of Parameter
* @param leafNodesList Description of Parameter
* @since July 31, 2002
*/
private void startUpNodes(Phase phase) {
synchronized(threadManager) {
while(threadManager.getFreeThreadsCount() < phase.getNodes().size()) { //it is sufficient, not necessary condition - so we have to time to time wake up and check it again
try {
threadManager.wait(); //from time to time thread is woken up to check the condition again
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for free workers for nodes in phase " + phase.getPhaseNum());
}
}
if (phase.getNodes().size() > 0) {
//this barrier can be broken only when all components and wathdog is waiting there
CyclicBarrier preExecuteBarrier = new CyclicBarrier(phase.getNodes().size() + 1);
//this barrier is used for synchronization of all components between pre-execute and execute
//it is necessary to finish all pre-execute's before execution
CyclicBarrier executeBarrier = new CyclicBarrier(phase.getNodes().size());
for (Node node: phase.getNodes().values()) {
node.setPreExecuteBarrier(preExecuteBarrier);
node.setExecuteBarrier(executeBarrier);
threadManager.executeNode(node);
logger.debug(node.getId()+ " ... starting");
}
try {
//now we will wait for all components are really alive - node.getNodeThread() return non-null value
preExecuteBarrier.await();
logger.debug("All components are ready to start.");
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for workers startup in phase " + phase.getPhaseNum());
} catch (BrokenBarrierException e) {
throw new RuntimeException("WatchDog or a worker was interrupted while was waiting for nodes tartup in phase " + phase.getPhaseNum());
}
}
}
}
/**
* Description of the Method
*
* @param phase Description of the Parameter
* @return Description of the Return Value
*/
private Result executePhase(Phase phase) {
currentPhase = phase;
//preExecute() invocation
try {
phase.preExecute();
} catch (ComponentNotReadyException e) {
logger.error("Phase pre-execute initialization failed with reason: " + e.getMessage(), e);
causeException = e;
causeGraphElement = e.getGraphElement();
return Result.ERROR;
}
logger.info("Starting up all nodes in phase [" + phase.getPhaseNum() + "]");
startUpNodes(phase);
logger.info("Successfully started all nodes in phase!");
// watch running nodes in phase
Result phaseStatus = Result.N_A;
try{
phaseStatus = watch(phase);
}catch(InterruptedException ex){
phaseStatus = Result.ABORTED;
} finally {
//now we can notify all waiting phases for free threads
synchronized(threadManager) {
threadManager.releaseNodeThreads(phase.getNodes().size());
threadManager.notifyAll();
}
/////////////////
//is this code really necessary? why?
for (Node node : phase.getNodes().values()) {
synchronized (node) { //this is the guard of Node.nodeThread variable
Thread t = node.getNodeThread();
long runId = this.getGraphRuntimeContext().getRunId();
if (t == null) {
continue;
}
String newThreadName = "exNode_"+runId+"_"+getGraph().getId()+"_"+node.getId();
if (logger.isTraceEnabled())
logger.trace("rename thread "+t.getName()+" to " + newThreadName);
t.setName(newThreadName);
// explicit interruption of threads of failed graph; (some nodes may be still running)
if (!node.getResultCode().isStop()) {
if (logger.isTraceEnabled())
logger.trace("try to abort node "+node);
node.abort();
}
}
}// for
/////////////////
//postExecute() invocation
try {
phase.postExecute();
} catch (ComponentNotReadyException e) {
logger.error("Phase post-execute finalization failed with reason: " + e.getMessage(), e);
causeException = e;
causeGraphElement = e.getGraphElement();
phaseStatus = Result.ERROR;
}
}
phase.setResult(phaseStatus);
return phaseStatus;
}
@Override
public void sendMessage(Message<?> msg) {
inMsgQueue.add(msg);
}
@Override
public Message<?>[] receiveMessage(GraphElement recipient, final long wait) {
Message<?>[] msg = null;
synchronized (_MSG_LOCK) {
msg=(Message[])outMsgMap.get(recipient).toArray(new Message<?>[0]);
if (msg!=null) {
outMsgMap.remove(recipient);
}
}
return msg;
}
@Override
public boolean hasMessage(GraphElement recipient) {
synchronized (_MSG_LOCK ){
return outMsgMap.containsKey(recipient);
}
}
/**
* Returns exception (reported by Node) which caused
* graph to stop processing.<br>
*
* @return the causeException
* @since 7.1.2007
*/
public Throwable getCauseException() {
return causeException;
}
/**
* Returns ID of Node which caused
* graph to stop processing.
*
* @return the causeNodeID
* @since 7.1.2007
*/
public IGraphElement getCauseGraphElement() {
return causeGraphElement;
}
public String getErrorMessage() {
StringBuilder message = new StringBuilder();
IGraphElement graphElement = getCauseGraphElement();
if (graphElement != null) {
message.append(graphElement.getId() + ": ");
}
Throwable throwable = getCauseException();
if (throwable != null && !StringUtils.isEmpty(throwable.getMessage())) {
message.append(throwable.getMessage());
}
return message.length() > 0 ? message.toString() : null;
}
/**
* @return the graph
* @since 26.2.2007
*/
public TransformationGraph getTransformationGraph() {
return graph;
}
public void setUseJMX(boolean useJMX) {
this.provideJMX = useJMX;
}
public GraphRuntimeContext getGraphRuntimeContext() {
return runtimeContext;
}
public CloverJMX getCloverJmx() {
return cloverJMX;
}
public boolean isFinishJMX() {
return finishJMX;
}
public void setFinishJMX(boolean finishJMX) {
this.finishJMX = finishJMX;
}
public IThreadManager getThreadManager() {
return threadManager;
}
public void setThreadManager(IThreadManager threadManager) {
this.threadManager = threadManager;
}
public TransformationGraph getGraph() {
return graph;
}
public IAuthorityProxy getAuthorityProxy() {
return getGraphRuntimeContext().getAuthorityProxy();
}
public TokenTracker getTokenTracker() {
return tokenTracker;
}
}
| cloveretl.engine/src/org/jetel/graph/runtime/WatchDog.java | /*
* jETeL/CloverETL - Java based ETL application framework.
* Copyright (c) Javlin, a.s. ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jetel.graph.runtime;
import java.lang.management.ManagementFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.MDC;
import org.jetel.exception.ComponentNotReadyException;
import org.jetel.graph.ContextProvider;
import org.jetel.graph.GraphElement;
import org.jetel.graph.IGraphElement;
import org.jetel.graph.JobType;
import org.jetel.graph.Node;
import org.jetel.graph.Phase;
import org.jetel.graph.Result;
import org.jetel.graph.TransformationGraph;
import org.jetel.graph.runtime.jmx.CloverJMX;
import org.jetel.graph.runtime.tracker.TokenTracker;
import org.jetel.util.primitive.MultiValueMap;
import org.jetel.util.string.StringUtils;
/**
* Description of the Class
*
* @author dpavlis
* @since July 29, 2002
* @revision $Revision$
*/
public class WatchDog implements Callable<Result>, CloverPost {
/**
* This lock object guards currentPhase variable and watchDogStatus.
*/
private final Lock CURRENT_PHASE_LOCK = new ReentrantLock();
private final Object ABORT_MONITOR = new Object();
private boolean abortFinished = false;
public final static String MBEAN_NAME_PREFIX = "CLOVERJMX_";
public final static long WAITTIME_FOR_STOP_SIGNAL = 5000; //miliseconds
private static final long ABORT_TIMEOUT = 5000L;
private static final long ABORT_WAIT = 2400L;
private int[] _MSG_LOCK=new int[0];
private static Log logger = LogFactory.getLog(WatchDog.class);
/**
* Thread manager is used to run nodes as threads.
*/
private IThreadManager threadManager;
private volatile Result watchDogStatus;
private TransformationGraph graph;
private Phase currentPhase;
private BlockingQueue <Message<?>> inMsgQueue;
private MultiValueMap<IGraphElement, Message<?>> outMsgMap;
private volatile Throwable causeException;
private volatile IGraphElement causeGraphElement;
private CloverJMX cloverJMX;
// private volatile boolean runIt;
private boolean provideJMX = true;
private boolean finishJMX = true; //whether the JMX mbean should be unregistered on the graph finish
private final GraphRuntimeContext runtimeContext;
static private MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
private ObjectName jmxObjectName;
private TokenTracker tokenTracker;
/**
*Constructor for the WatchDog object
*
* @param graph Description of the Parameter
* @param phases Description of the Parameter
* @since September 02, 2003
*/
public WatchDog(TransformationGraph graph, GraphRuntimeContext runtimeContext) {
graph.setWatchDog(this);
this.graph = graph;
this.runtimeContext = runtimeContext;
currentPhase = null;
watchDogStatus = Result.N_A;
inMsgQueue = new LinkedBlockingQueue<Message<?>>();
outMsgMap = new MultiValueMap<IGraphElement, Message<?>>(Collections.synchronizedMap(new HashMap<IGraphElement, List<Message<?>>>()));
//is JMX turned on?
provideJMX = runtimeContext.useJMX();
//passes a password from context to the running graph
graph.setPassword(runtimeContext.getPassword());
}
/**
* WatchDog initialization.
*/
public void init() {
//at least simple thread manager will be used
if(threadManager == null) {
threadManager = new SimpleThreadManager();
}
//create token tracker if graph is jobflow type
if (graph.getJobType() == JobType.JOBFLOW) {
tokenTracker = new TokenTracker(graph);
}
//start up JMX
cloverJMX = new CloverJMX(this);
if(provideJMX) {
registerTrackingMBean(cloverJMX);
}
//watchdog is now ready to use
watchDogStatus = Result.READY;
}
private void finishJMX() {
if(provideJMX) {
try {
mbs.unregisterMBean(jmxObjectName);
} catch (Exception e) {
logger.error("JMX error - ObjectName cannot be unregistered.", e);
}
}
}
/** Main processing method for the WatchDog object */
@Override
public Result call() {
CURRENT_PHASE_LOCK.lock();
try {
//thread context classloader is preset to a reasonable classloader
//this is just for sure, threads are recycled and no body can guarantee which context classloader remains preset
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
//we have to register current watchdog's thread to context provider - from now all
//ContextProvider.getGraph() invocations return proper transformation graph
ContextProvider.registerGraph(graph);
MDC.put("runId", runtimeContext.getRunId());
long startTimestamp = System.currentTimeMillis();
//print graph properties
graph.getGraphProperties().print(logger, "Graph parameters:");
//print out runtime context
logger.debug("Graph runtime context: " + graph.getRuntimeContext().getAllProperties());
//print initial dictionary content
graph.getDictionary().printContent(logger, "Initial dictionary content:");
if (runtimeContext.isVerboseMode()) {
// this can be called only after graph.init()
graph.dumpGraphConfiguration();
}
watchDogStatus = Result.RUNNING;
//creates tracking logger for cloverJMX mbean
TrackingLogger.track(cloverJMX);
cloverJMX.graphStarted();
//pre-execute initialization of graph
try {
graph.preExecute();
} catch (Exception e) {
causeException = e;
if (e instanceof ComponentNotReadyException) {
causeGraphElement = ((ComponentNotReadyException) e).getGraphElement();
}
watchDogStatus = Result.ERROR;
logger.error("Graph pre-execute initialization failed.", e);
}
//run all phases
if (watchDogStatus == Result.RUNNING) {
Phase[] phases = graph.getPhases();
Result phaseResult = Result.N_A;
for (int currentPhaseNum = 0; currentPhaseNum < phases.length; currentPhaseNum++) {
//if the graph runs in synchronized mode we need to wait for synchronization event to process next phase
if (runtimeContext.isSynchronizedRun()) {
logger.info("Waiting for phase " + phases[currentPhaseNum] + " approval...");
watchDogStatus = Result.WAITING;
CURRENT_PHASE_LOCK.unlock();
synchronized (cloverJMX) {
while (cloverJMX.getApprovedPhaseNumber() < phases[currentPhaseNum].getPhaseNum()
&& watchDogStatus == Result.WAITING) { //graph was maybe aborted
try {
cloverJMX.wait();
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for phase synchronization event.");
}
}
}
CURRENT_PHASE_LOCK.lock();
//watchdog was aborted while was waiting for next phase approval
if (watchDogStatus == Result.ABORTED) {
logger.warn("!!! Graph execution aborted !!!");
break;
} else {
watchDogStatus = Result.RUNNING;
}
}
cloverJMX.phaseStarted(phases[currentPhaseNum]);
//execute phase
phaseResult = executePhase(phases[currentPhaseNum]);
if(phaseResult == Result.ABORTED) {
cloverJMX.phaseAborted();
logger.warn("!!! Phase execution aborted !!!");
break;
} else if(phaseResult == Result.ERROR) {
cloverJMX.phaseError(getErrorMessage());
logger.error("!!! Phase finished with error - stopping graph run !!!");
break;
}
cloverJMX.phaseFinished();
}
//post-execution of graph
try {
graph.postExecute();
} catch (Exception e) {
causeException = e;
if (e instanceof ComponentNotReadyException) {
causeGraphElement = ((ComponentNotReadyException) e).getGraphElement();
}
watchDogStatus = Result.ERROR;
logger.error("Graph post-execute method failed.", e);
}
//aborted graph does not follow last phase status
if (watchDogStatus == Result.RUNNING) {
watchDogStatus = phaseResult;
}
}
//commit or rollback
if (watchDogStatus == Result.FINISHED_OK) {
try {
graph.commit();
} catch (Exception e) {
causeException = e;
watchDogStatus = Result.ERROR;
logger.fatal("Graph commit failed:" + e.getMessage(), e);
}
} else {
try {
graph.rollback();
} catch (Exception e) {
causeException = e;
watchDogStatus = Result.ERROR;
logger.fatal("Graph rollback failed:" + e.getMessage(), e);
}
}
//print initial dictionary content
graph.getDictionary().printContent(logger, "Final dictionary content:");
sendFinalJmxNotification();
if(finishJMX) {
finishJMX();
}
logger.info("WatchDog thread finished - total execution time: " + (System.currentTimeMillis() - startTimestamp) / 1000 + " (sec)");
} catch (RuntimeException e) {
causeException = e;
causeGraphElement = null;
watchDogStatus = Result.ERROR;
logger.error("Fatal error watchdog execution", e);
throw e;
} finally {
//we have to unregister current watchdog's thread from context provider
ContextProvider.unregister();
CURRENT_PHASE_LOCK.unlock();
MDC.remove("runId");
}
return watchDogStatus;
}
private void sendFinalJmxNotification() {
sendFinalJmxNotification0();
//is there anyone who is really interested in to be informed about the graph is really finished? - at least our clover designer runs graphs with this option
if (runtimeContext.isWaitForJMXClient()) {
//wait for a JMX client (GUI) to download all tracking information
long startWaitingTime = System.currentTimeMillis();
synchronized (cloverJMX) {
while (WAITTIME_FOR_STOP_SIGNAL > (System.currentTimeMillis() - startWaitingTime)
&& !cloverJMX.canCloseServer()) {
try {
cloverJMX.wait(10);
sendFinalJmxNotification0();
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for close signal.");
}
}
if (!cloverJMX.canCloseServer()) {
// give client one last chance to react to final notification and to send close signal before cloverJMX is unregistering
try {
cloverJMX.wait(100);
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for close signal.");
}
if (!cloverJMX.canCloseServer()) {
logger.debug("JMX server close signal timeout; client may have missed final notification");
}
}
}
}
//if the graph was aborted, now the aborting thread is waiting for final notification - this is the way how to send him word about the graph finished right now
synchronized (ABORT_MONITOR) {
abortFinished = true;
ABORT_MONITOR.notifyAll();
}
}
private void sendFinalJmxNotification0() {
switch (watchDogStatus) {
case FINISHED_OK:
cloverJMX.graphFinished();
break;
case ABORTED:
cloverJMX.graphAborted();
break;
case ERROR:
cloverJMX.graphError(getErrorMessage());
break;
default:
break;
}
}
/**
* Register given jmx mbean.
*/
private void registerTrackingMBean(CloverJMX cloverJMX) {
String mbeanId = graph.getId();
// Construct the ObjectName for the MBean we will register
try {
String name = createMBeanName(mbeanId != null ? mbeanId : graph.getName(), this.getGraphRuntimeContext().getRunId());
jmxObjectName = new ObjectName( name );
logger.debug("register MBean with name:"+name);
// Register the MBean
mbs.registerMBean(cloverJMX, jmxObjectName);
} catch (MalformedObjectNameException e) {
logger.error(e);
} catch (InstanceAlreadyExistsException e) {
logger.error(e);
} catch (MBeanRegistrationException e) {
logger.error(e);
} catch (NotCompliantMBeanException e) {
logger.error(e);
}
}
/**
* Creates identifier for shared JMX mbean.
* @param defaultMBeanName
* @return
*/
public static String createMBeanName(String mbeanIdentifier) {
return createMBeanName(mbeanIdentifier, 0);
}
/**
* Creates identifier for shared JMX mbean.
* @param mbeanIdentifier
* @param runId
* @return
*/
public static String createMBeanName(String mbeanIdentifier, long runId) {
return "org.jetel.graph.runtime:type=" + MBEAN_NAME_PREFIX + (mbeanIdentifier != null ? mbeanIdentifier : "") + "_" + runId;
}
/**
* Execute transformation - start-up all Nodes & watch them running
*
* @param phase Description of the Parameter
* @param leafNodes Description of the Parameter
* @return Description of the Return Value
* @since July 29, 2002
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings("UL")
private Result watch(Phase phase) throws InterruptedException {
Message<?> message;
Set<Node> phaseNodes;
// let's create a copy of leaf nodes - we will watch them
phaseNodes = new HashSet<Node>(phase.getNodes().values());
// is there any node running ? - this test is necessary for phases without nodes - empty phase
if (phaseNodes.isEmpty()) {
return watchDogStatus != Result.ABORTED ? Result.FINISHED_OK : Result.ABORTED;
}
// entering the loop awaiting completion of work by all leaf nodes
while (true) {
// wait on error message queue
CURRENT_PHASE_LOCK.unlock();
try {
message = inMsgQueue.poll(runtimeContext.getTrackingInterval(), TimeUnit.MILLISECONDS);
} finally {
CURRENT_PHASE_LOCK.lock();
}
if (message != null) {
switch(message.getType()){
case ERROR:
causeException = ((ErrorMsgBody) message.getBody()).getSourceException();
causeGraphElement = message.getSender();
logger.error("Graph execution finished with error");
logger.error("Node "
+ message.getSender().getId()
+ " finished with status: "
+ ((ErrorMsgBody) message.getBody())
.getErrorMessage() + (causeException != null ? " caused by: " + causeException.getMessage() : ""));
logger.error("Node " + message.getSender().getId() + " error details:", causeException);
return Result.ERROR;
case MESSAGE:
synchronized (_MSG_LOCK) {
if (message.getRecipient() != null) {
outMsgMap.putValue(message.getRecipient(), message);
}
}
break;
case NODE_FINISHED:
phaseNodes.remove(message.getSender());
break;
default:
// do nothing, just wake up
}
}
// is there any node running ?
if (phaseNodes.isEmpty()) {
return watchDogStatus != Result.ABORTED ? Result.FINISHED_OK : Result.ABORTED;
}
// gather graph tracking
//etl graphs are tracked only in regular intervals, jobflows are tracked more precise, whenever something happens
if (message == null || ContextProvider.getJobType() == JobType.JOBFLOW) {
cloverJMX.gatherTrackingDetails();
}
}
}
/**
* Gets the Status of the WatchDog
*
* @return Result of WatchDog run-time
* @since July 30, 2002
* @see org.jetel.graph.Result
*/
public Result getStatus() {
return watchDogStatus;
}
/**
* aborts execution of current phase
*
* @since July 29, 2002
*/
public void abort() {
CURRENT_PHASE_LOCK.lock();
//only running or waiting graph can be aborted
if (watchDogStatus != Result.RUNNING && watchDogStatus != Result.WAITING) {
//if the graph status is not final, so the graph was aborted
if (!watchDogStatus.isStop()) {
watchDogStatus = Result.ABORTED;
}
CURRENT_PHASE_LOCK.unlock();
return;
}
try {
//if the phase is running broadcast all nodes in the phase they should be aborted
if (watchDogStatus == Result.RUNNING) {
watchDogStatus = Result.ABORTED;
// iterate through all the nodes and stop them
for (Node node : currentPhase.getNodes().values()) {
node.abort();
logger.warn("Interrupted node: " + node.getId());
}
}
//if the graph is waiting on a phase synchronization point the watchdog is woken up with current status ABORTED
if (watchDogStatus == Result.WAITING) {
watchDogStatus = Result.ABORTED;
synchronized (cloverJMX) {
cloverJMX.notifyAll();
}
}
} finally {
synchronized (ABORT_MONITOR) {
CURRENT_PHASE_LOCK.unlock();
long startAbort = System.currentTimeMillis();
while (!abortFinished) {
long interval = System.currentTimeMillis() - startAbort;
if (interval > ABORT_TIMEOUT) {
throw new IllegalStateException("Graph aborting error! Timeout "+ABORT_TIMEOUT+"ms exceeded!");
}
try {
//the aborting thread try to wait for end of graph run
ABORT_MONITOR.wait(ABORT_WAIT);
} catch (InterruptedException ignore) { }// catch
}// while
}// synchronized
}// finally
}
/**
* Description of the Method
*
* @param nodesIterator Description of Parameter
* @param leafNodesList Description of Parameter
* @since July 31, 2002
*/
private void startUpNodes(Phase phase) {
synchronized(threadManager) {
while(threadManager.getFreeThreadsCount() < phase.getNodes().size()) { //it is sufficient, not necessary condition - so we have to time to time wake up and check it again
try {
threadManager.wait(); //from time to time thread is woken up to check the condition again
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for free workers for nodes in phase " + phase.getPhaseNum());
}
}
if (phase.getNodes().size() > 0) {
//this barrier can be broken only when all components and wathdog is waiting there
CyclicBarrier preExecuteBarrier = new CyclicBarrier(phase.getNodes().size() + 1);
//this barrier is used for synchronization of all components between pre-execute and execute
//it is necessary to finish all pre-execute's before execution
CyclicBarrier executeBarrier = new CyclicBarrier(phase.getNodes().size());
for (Node node: phase.getNodes().values()) {
node.setPreExecuteBarrier(preExecuteBarrier);
node.setExecuteBarrier(executeBarrier);
threadManager.executeNode(node);
logger.debug(node.getId()+ " ... starting");
}
try {
//now we will wait for all components are really alive - node.getNodeThread() return non-null value
preExecuteBarrier.await();
logger.debug("All components are ready to start.");
} catch (InterruptedException e) {
throw new RuntimeException("WatchDog was interrupted while was waiting for workers startup in phase " + phase.getPhaseNum());
} catch (BrokenBarrierException e) {
throw new RuntimeException("WatchDog or a worker was interrupted while was waiting for nodes tartup in phase " + phase.getPhaseNum());
}
}
}
}
/**
* Description of the Method
*
* @param phase Description of the Parameter
* @return Description of the Return Value
*/
private Result executePhase(Phase phase) {
currentPhase = phase;
//preExecute() invocation
try {
phase.preExecute();
} catch (ComponentNotReadyException e) {
logger.error("Phase pre-execute initialization failed with reason: " + e.getMessage(), e);
causeException = e;
causeGraphElement = e.getGraphElement();
return Result.ERROR;
}
logger.info("Starting up all nodes in phase [" + phase.getPhaseNum() + "]");
startUpNodes(phase);
logger.info("Successfully started all nodes in phase!");
// watch running nodes in phase
Result phaseStatus = Result.N_A;
try{
phaseStatus = watch(phase);
}catch(InterruptedException ex){
phaseStatus = Result.ABORTED;
} finally {
//now we can notify all waiting phases for free threads
synchronized(threadManager) {
threadManager.releaseNodeThreads(phase.getNodes().size());
/////////////////
//is this code really necessary? why?
for (Node node : phase.getNodes().values()) {
synchronized (node) { //this is the guard of Node.nodeThread variable
Thread t = node.getNodeThread();
long runId = this.getGraphRuntimeContext().getRunId();
if (t == null) {
continue;
}
String newThreadName = "exNode_"+runId+"_"+getGraph().getId()+"_"+node.getId();
if (logger.isTraceEnabled())
logger.trace("rename thread "+t.getName()+" to " + newThreadName);
t.setName(newThreadName);
// explicit interruption of threads of failed graph; (some nodes may be still running)
if (!node.getResultCode().isStop()) {
if (logger.isTraceEnabled())
logger.trace("try to abort node "+node);
node.abort();
}
}
}// for
/////////////////
threadManager.notifyAll();
}
//postExecute() invocation
try {
phase.postExecute();
} catch (ComponentNotReadyException e) {
logger.error("Phase post-execute finalization failed with reason: " + e.getMessage(), e);
causeException = e;
causeGraphElement = e.getGraphElement();
phaseStatus = Result.ERROR;
}
}
phase.setResult(phaseStatus);
return phaseStatus;
}
@Override
public void sendMessage(Message<?> msg) {
inMsgQueue.add(msg);
}
@Override
public Message<?>[] receiveMessage(GraphElement recipient, final long wait) {
Message<?>[] msg = null;
synchronized (_MSG_LOCK) {
msg=(Message[])outMsgMap.get(recipient).toArray(new Message<?>[0]);
if (msg!=null) {
outMsgMap.remove(recipient);
}
}
return msg;
}
@Override
public boolean hasMessage(GraphElement recipient) {
synchronized (_MSG_LOCK ){
return outMsgMap.containsKey(recipient);
}
}
/**
* Returns exception (reported by Node) which caused
* graph to stop processing.<br>
*
* @return the causeException
* @since 7.1.2007
*/
public Throwable getCauseException() {
return causeException;
}
/**
* Returns ID of Node which caused
* graph to stop processing.
*
* @return the causeNodeID
* @since 7.1.2007
*/
public IGraphElement getCauseGraphElement() {
return causeGraphElement;
}
public String getErrorMessage() {
StringBuilder message = new StringBuilder();
IGraphElement graphElement = getCauseGraphElement();
if (graphElement != null) {
message.append(graphElement.getId() + ": ");
}
Throwable throwable = getCauseException();
if (throwable != null && !StringUtils.isEmpty(throwable.getMessage())) {
message.append(throwable.getMessage());
}
return message.length() > 0 ? message.toString() : null;
}
/**
* @return the graph
* @since 26.2.2007
*/
public TransformationGraph getTransformationGraph() {
return graph;
}
public void setUseJMX(boolean useJMX) {
this.provideJMX = useJMX;
}
public GraphRuntimeContext getGraphRuntimeContext() {
return runtimeContext;
}
public CloverJMX getCloverJmx() {
return cloverJMX;
}
public boolean isFinishJMX() {
return finishJMX;
}
public void setFinishJMX(boolean finishJMX) {
this.finishJMX = finishJMX;
}
public IThreadManager getThreadManager() {
return threadManager;
}
public void setThreadManager(IThreadManager threadManager) {
this.threadManager = threadManager;
}
public TransformationGraph getGraph() {
return graph;
}
public IAuthorityProxy getAuthorityProxy() {
return getGraphRuntimeContext().getAuthorityProxy();
}
public TokenTracker getTokenTracker() {
return tokenTracker;
}
}
| FIX: fox of DeadLock caused by waiting for ServerThreadManager monitor
git-svn-id: 8d19231102a908be67aa2fa433bbbb760ee64d21@13349 a09ad3ba-1a0f-0410-b1b9-c67202f10d70
| cloveretl.engine/src/org/jetel/graph/runtime/WatchDog.java | FIX: fox of DeadLock caused by waiting for ServerThreadManager monitor |
|
Java | lgpl-2.1 | 4a0260c52205b1681cf07bccc5d2ed15c4251868 | 0 | exedio/copernica,exedio/copernica,exedio/copernica | /*
* Copyright (C) 2004-2006 exedio GmbH (www.exedio.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.exedio.dsmf;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.StringTokenizer;
import com.exedio.dsmf.Node.ResultSetHandler;
public final class MysqlDriver extends Driver
{
final String primaryKeyColumnName;
private final boolean toLowerCase;
public MysqlDriver(final String primaryKeyColumnName, final boolean toLowerCase)
{
super(null, null);
this.primaryKeyColumnName = primaryKeyColumnName;
this.toLowerCase = toLowerCase;
//System.out.println("toLowerCase:"+toLowerCase);
}
private static final char PROTECTOR = '`';
/**
* Use backticks to protect name for mysql.
*/
@Override
public String protectName(final String name)
{
if(name.indexOf(PROTECTOR)>=0)
throw new RuntimeException("database name contains forbidden characters: "+name);
return PROTECTOR + name + PROTECTOR;
}
@Override
public boolean supportsCheckConstraints()
{
return false;
}
@Override
public String canonizeTableName(final String tableName)
{
return toLowerCase ? tableName.toLowerCase() : tableName;
}
@Override
String getColumnType(final int dataType, final ResultSet resultSet) throws SQLException
{
final int columnSize = resultSet.getInt("COLUMN_SIZE");
switch(dataType)
{
case Types.INTEGER:
return "integer";
case Types.BIGINT:
return "bigint";
case Types.DOUBLE:
return "double";
case Types.TIMESTAMP:
return "timestamp";
case Types.DATE:
return "DATE";
case Types.VARCHAR:
return "varchar("+columnSize+") character set utf8 binary";
case Types.LONGVARCHAR:
switch(columnSize)
{
case 65535: return "text character set utf8 binary";
case 16277215: return "mediumtext character set utf8 binary";
case 2147483647: return "longtext character set utf8 binary";
default: return "LONGVARCHAR("+columnSize+")";
}
case Types.BINARY:
switch(columnSize)
{
case 255: return "TINYBLOB";
default: return "BINARY("+columnSize+")";
}
case Types.LONGVARBINARY:
switch(columnSize)
{
case 65535: return "BLOB";
case 16277215: return "MEDIUMBLOB";
case 2147483647: return "LONGBLOB";
default: return "LONGVARBINARY("+columnSize+")";
}
default:
return null;
}
}
private final String unprotectName(final String protectedName)
{
final int length = protectedName.length();
if(length<3)
throw new RuntimeException(protectedName);
if(protectedName.charAt(0)!=MysqlDriver.PROTECTOR)
throw new RuntimeException(protectedName);
if(protectedName.charAt(length-1)!=MysqlDriver.PROTECTOR)
throw new RuntimeException(protectedName);
return protectedName.substring(1, protectedName.length()-1);
}
@Override
void verify(final Schema schema)
{
super.verify(schema);
{
for(final Table table : schema.getTables())
{
if(!table.exists())
continue;
{
final StringBuffer bf = new StringBuffer();
bf.append("show columns from ").
append(protectName(table.name));
schema.querySQL(bf.toString(), new Node.ResultSetHandler()
{
public void run(final ResultSet resultSet) throws SQLException
{
//printMeta(resultSet);
while(resultSet.next())
{
//printRow(resultSet);
final String key = resultSet.getString("Key");
if("PRI".equals(key))
{
final String field = resultSet.getString("Field");
if(primaryKeyColumnName.equals(field) && table.required())
{
for(final Constraint c : table.getConstraints())
{
if(c instanceof PrimaryKeyConstraint)
{
table.notifyExistentPrimaryKeyConstraint(c.name);
break;
}
}
}
else
table.notifyExistentPrimaryKeyConstraint(field+"_Pk");
}
}
}
});
}
{
final StringBuffer bf = new StringBuffer();
bf.append("show create table ").
append(protectName(table.name));
schema.querySQL(bf.toString(), new ResultSetHandler()
{
public void run(final ResultSet resultSet) throws SQLException
{
while(resultSet.next())
{
final String tableName = resultSet.getString("Table");
final String createTable = resultSet.getString("Create Table");
final Table table = schema.notifyExistentTable(tableName);
//System.out.println("----------"+tableName+"----"+createTable);
final StringTokenizer t = new StringTokenizer(createTable);
for(String s = t.nextToken(); t.hasMoreTokens(); s = t.nextToken())
{
//System.out.println("----------"+tableName+"---------------"+s);
if("CONSTRAINT".equals(s))
{
if(!t.hasMoreTokens())
continue;
final String protectedName = t.nextToken();
//System.out.println("----------"+tableName+"--------------------protectedName:"+protectedName);
final String name = unprotectName(protectedName);
//System.out.println("----------"+tableName+"--------------------name:"+name);
if(!t.hasMoreTokens() || !"FOREIGN".equals(t.nextToken()) ||
!t.hasMoreTokens() || !"KEY".equals(t.nextToken()) ||
!t.hasMoreTokens())
continue;
//final String source =
t.nextToken();
//System.out.println("----------"+tableName+"--------------------source:"+source);
if(!t.hasMoreTokens() || !"REFERENCES".equals(t.nextToken()) ||
!t.hasMoreTokens())
continue;
//final String targetTable =
t.nextToken();
//System.out.println("----------"+tableName+"--------------------targetTable:"+targetTable);
if(!t.hasMoreTokens())
continue;
//final String targetAttribute =
t.nextToken();
//System.out.println("----------"+tableName+"--------------------targetAttribute:"+targetAttribute);
table.notifyExistentForeignKeyConstraint(name);
}
//UNIQUE KEY `AttriEmptyItem_parKey_Unq` (`parent`,`key`)
if("UNIQUE".equals(s))
{
if(!t.hasMoreTokens() || !"KEY".equals(t.nextToken()) ||
!t.hasMoreTokens())
continue;
final String protectedName = t.nextToken();
//System.out.println("----------"+tableName+"--------------------protectedName:"+protectedName);
final String name = unprotectName(protectedName);
//System.out.println("----------"+tableName+"--------------------name:"+name);
if(!t.hasMoreTokens())
continue;
final String clause = t.nextToken();
//System.out.println("----------"+tableName+"--------------------clause:"+clause);
final int clauseLengthM1 = clause.length()-1;
table.notifyExistentUniqueConstraint(name, clause.charAt(clauseLengthM1)==',' ? clause.substring(0, clauseLengthM1) : clause);
}
}
}
}
});
}
}
}
}
@Override
void appendTableCreateStatement(final StringBuffer bf)
{
bf.append(" engine=innodb");
}
@Override
boolean needsTargetColumnName()
{
return true;
}
@Override
String getRenameColumnStatement(final String tableName, final String oldColumnName, final String newColumnName, final String columnType)
{
final StringBuffer bf = new StringBuffer();
bf.append("alter table ").
append(tableName).
append(" change ").
append(oldColumnName).
append(' ').
append(newColumnName).
append(' ').
append(columnType);
return bf.toString();
}
// TODO is same as hsqldb
@Override
String getCreateColumnStatement(final String tableName, final String columnName, final String columnType)
{
final StringBuffer bf = new StringBuffer();
bf.append("alter table ").
append(tableName).
append(" add column ").
append(columnName).
append(' ').
append(columnType);
return bf.toString();
}
@Override
String getModifyColumnStatement(final String tableName, final String columnName, final String newColumnType)
{
//ALTER TABLE Item MODIFY ownerType varchar(30);
final StringBuffer bf = new StringBuffer();
bf.append("alter table ").
append(tableName).
append(" modify ").
append(columnName).
append(' ').
append(newColumnType);
//System.out.println("------------------"+bf.toString());
return bf.toString();
}
@Override
String getDropForeignKeyConstraintStatement(final String tableName, final String constraintName)
{
final StringBuffer bf = new StringBuffer();
bf.append("alter table ").
append(tableName).
append(" drop foreign key ").
append(constraintName);
return bf.toString();
}
@Override
boolean canDropUniqueConstraints()
{
return false;
}
@Override
boolean canDropPrimaryKeyConstraints()
{
return false;
}
}
| dsmf/src/com/exedio/dsmf/MysqlDriver.java | /*
* Copyright (C) 2004-2006 exedio GmbH (www.exedio.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.exedio.dsmf;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.StringTokenizer;
import com.exedio.dsmf.Node.ResultSetHandler;
public final class MysqlDriver extends Driver
{
final String primaryKeyColumnName;
private final boolean toLowerCase;
public MysqlDriver(final String primaryKeyColumnName, final boolean toLowerCase)
{
super(null, null);
this.primaryKeyColumnName = primaryKeyColumnName;
this.toLowerCase = toLowerCase;
//System.out.println("toLowerCase:"+toLowerCase);
}
private static final char PROTECTOR = '`';
/**
* Use backticks to protect name for mysql.
*/
@Override
public String protectName(final String name)
{
if(name.indexOf(PROTECTOR)>=0)
throw new RuntimeException("database name contains forbidden characters: "+name);
return PROTECTOR + name + PROTECTOR;
}
@Override
public boolean supportsCheckConstraints()
{
return false;
}
@Override
public String canonizeTableName(final String tableName)
{
return toLowerCase ? tableName.toLowerCase() : tableName;
}
@Override
String getColumnType(final int dataType, final ResultSet resultSet) throws SQLException
{
final int columnSize = resultSet.getInt("COLUMN_SIZE");
switch(dataType)
{
case Types.INTEGER:
return "integer";
case Types.BIGINT:
return "bigint";
case Types.DOUBLE:
return "double";
case Types.TIMESTAMP:
return "timestamp";
case Types.DATE:
return "DATE";
case Types.VARCHAR:
return "varchar("+columnSize+") character set utf8 binary";
case Types.LONGVARCHAR:
switch(columnSize)
{
case 65535: return "text character set utf8 binary";
case 16277215: return "mediumtext character set utf8 binary";
case 2147483647: return "longtext character set utf8 binary";
default: return "LONGVARCHAR("+columnSize+")";
}
case Types.BINARY:
switch(columnSize)
{
case 255: return "TINYBLOB";
default: return "BINARY("+columnSize+")";
}
case Types.LONGVARBINARY:
switch(columnSize)
{
case 65535: return "BLOB";
case 16277215: return "MEDIUMBLOB";
case 2147483647: return "LONGBLOB";
default: return "LONGVARBINARY("+columnSize+")";
}
default:
return null;
}
}
private final String unprotectName(final String protectedName)
{
final int length = protectedName.length();
if(length<3)
throw new RuntimeException(protectedName);
if(protectedName.charAt(0)!=MysqlDriver.PROTECTOR)
throw new RuntimeException(protectedName);
if(protectedName.charAt(length-1)!=MysqlDriver.PROTECTOR)
throw new RuntimeException(protectedName);
return protectedName.substring(1, protectedName.length()-1);
}
@Override
void verify(final Schema schema)
{
super.verify(schema);
{
for(final Table table : schema.getTables())
{
if(!table.exists())
continue;
{
final StringBuffer bf = new StringBuffer();
bf.append("show columns from ").
append(protectName(table.name));
schema.querySQL(bf.toString(), new Node.ResultSetHandler()
{
public void run(final ResultSet resultSet) throws SQLException
{
//printMeta(resultSet);
while(resultSet.next())
{
//printRow(resultSet);
final String key = resultSet.getString("Key");
if("PRI".equals(key))
{
final String field = resultSet.getString("Field");
if(primaryKeyColumnName.equals(field) && table.required())
{
for(final Constraint c : table.getConstraints())
{
if(c instanceof PrimaryKeyConstraint)
{
table.notifyExistentPrimaryKeyConstraint(c.name);
break;
}
}
}
else
table.notifyExistentPrimaryKeyConstraint(field+"_Pk");
}
}
}
});
}
{
final StringBuffer bf = new StringBuffer();
bf.append("show create table ").
append(protectName(table.name));
schema.querySQL(bf.toString(), new ResultSetHandler()
{
public void run(final ResultSet resultSet) throws SQLException
{
while(resultSet.next())
{
final String tableName = resultSet.getString("Table");
final String createTable = resultSet.getString("Create Table");
final Table table = schema.notifyExistentTable(tableName);
//System.out.println("----------"+tableName+"----"+createTable);
final StringTokenizer t = new StringTokenizer(createTable);
for(String s = t.nextToken(); t.hasMoreTokens(); s = t.nextToken())
{
//System.out.println("----------"+tableName+"---------------"+s);
if("CONSTRAINT".equals(s))
{
if(!t.hasMoreTokens())
continue;
final String protectedName = t.nextToken();
//System.out.println("----------"+tableName+"--------------------protectedName:"+protectedName);
final String name = unprotectName(protectedName);
//System.out.println("----------"+tableName+"--------------------name:"+name);
if(!t.hasMoreTokens() || !"FOREIGN".equals(t.nextToken()) ||
!t.hasMoreTokens() || !"KEY".equals(t.nextToken()) ||
!t.hasMoreTokens())
continue;
//final String source =
t.nextToken();
//System.out.println("----------"+tableName+"--------------------source:"+source);
if(!t.hasMoreTokens() || !"REFERENCES".equals(t.nextToken()) ||
!t.hasMoreTokens())
continue;
//final String targetTable =
t.nextToken();
//System.out.println("----------"+tableName+"--------------------targetTable:"+targetTable);
if(!t.hasMoreTokens())
continue;
//final String targetAttribute =
t.nextToken();
//System.out.println("----------"+tableName+"--------------------targetAttribute:"+targetAttribute);
table.notifyExistentForeignKeyConstraint(name);
}
//UNIQUE KEY `AttriEmptyItem_parKey_Unq` (`parent`,`key`)
if("UNIQUE".equals(s))
{
if(!t.hasMoreTokens() || !"KEY".equals(t.nextToken()) ||
!t.hasMoreTokens())
continue;
final String protectedName = t.nextToken();
//System.out.println("----------"+tableName+"--------------------protectedName:"+protectedName);
final String name = unprotectName(protectedName);
//System.out.println("----------"+tableName+"--------------------name:"+name);
if(!t.hasMoreTokens())
continue;
final String clause = t.nextToken();
//System.out.println("----------"+tableName+"--------------------clause:"+clause);
final int clauseLengthM1 = clause.length()-1;
table.notifyExistentUniqueConstraint(name, clause.charAt(clauseLengthM1)==',' ? clause.substring(0, clauseLengthM1) : clause);
}
}
}
}
});
}
}
}
}
@Override
void appendTableCreateStatement(final StringBuffer bf)
{
bf.append(" engine=innodb");
}
@Override
boolean needsTargetColumnName()
{
return true;
}
@Override
String getRenameColumnStatement(final String tableName, final String oldColumnName, final String newColumnName, final String columnType)
{
final StringBuffer bf = new StringBuffer();
bf.append("alter table ").
append(tableName).
append(" change ").
append(oldColumnName).
append(' ').
append(newColumnName).
append(' ').
append(columnType);
return bf.toString();
}
// TODO is same as hsqldb
@Override
String getCreateColumnStatement(final String tableName, final String columnName, final String columnType)
{
final StringBuffer bf = new StringBuffer();
bf.append("alter table ").
append(tableName).
append(" add column ").
append(columnName).
append(' ').
append(columnType);
return bf.toString();
}
@Override
String getModifyColumnStatement(final String tableName, final String columnName, final String newColumnType)
{
throw new RuntimeException("not implemented");
}
@Override
String getDropForeignKeyConstraintStatement(final String tableName, final String constraintName)
{
final StringBuffer bf = new StringBuffer();
bf.append("alter table ").
append(tableName).
append(" drop foreign key ").
append(constraintName);
return bf.toString();
}
@Override
boolean canDropUniqueConstraints()
{
return false;
}
@Override
boolean canDropPrimaryKeyConstraints()
{
return false;
}
}
| implement getModifyColumnStatement, not yet tested
git-svn-id: 9dbc6da3594b32e13bcf3b3752e372ea5bc7c2cc@6021 e7d4fc99-c606-0410-b9bf-843393a9eab7
| dsmf/src/com/exedio/dsmf/MysqlDriver.java | implement getModifyColumnStatement, not yet tested |
|
Java | lgpl-2.1 | 495e3f10faabf6b5e4151c68338b1adb79a7daa0 | 0 | andreasprlic/biojava,lafita/biojava,pwrose/biojava,biojava/biojava,andreasprlic/biojava,pwrose/biojava,emckee2006/biojava,sbliven/biojava-sbliven,sbliven/biojava-sbliven,lafita/biojava,pwrose/biojava,andreasprlic/biojava,heuermh/biojava,heuermh/biojava,andreasprlic/biojava,emckee2006/biojava,heuermh/biojava,biojava/biojava,emckee2006/biojava,lafita/biojava,biojava/biojava,sbliven/biojava-sbliven | package org.biojava.nbio.genome;
import com.google.common.collect.Lists;
import com.google.common.collect.Range;
import junit.framework.TestCase;
import org.biojava.nbio.genome.parsers.genename.GeneChromosomePosition;
import org.biojava.nbio.genome.parsers.genename.GeneChromosomePositionParser;
import org.biojava.nbio.genome.util.ChromosomeMappingTools;
import org.junit.Test;
import java.io.InputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.zip.GZIPInputStream;
/**
* Created by andreas on 7/19/16.
*/
public class TestGenomeMapping extends TestCase {
private static final String geneChromosomeFile = "http://cdn.rcsb.org/gene/hg38/geneChromosome38.tsf.gz";
private List<GeneChromosomePosition> gcps = null;
@Override
protected void setUp() throws Exception {
super.setUp();
InputStream input = new GZIPInputStream(new URL(geneChromosomeFile).openStream());
gcps = GeneChromosomePositionParser.getChromosomeMappings(input);
}
@Test
public void testAK1() {
String geneName = "AK1";
assertNotNull(gcps);
assertTrue("Problems with downloading refFlat file from UCSC browser ", gcps.size() > 100);
int uniProtLength = 194;
try {
for (GeneChromosomePosition pos : gcps) {
//System.out.println(pos.getGeneName());
if (!pos.getGeneName().equals(geneName))
continue;
/// there are three alternative transcripts for AK1.
// we are just testing one here:
if ( ! pos.getGenebankId().equals("NM_000476"))
continue;
assertTrue(pos.getGeneName().equals(geneName));
assertTrue(pos.getOrientation().equals('-'));
assertTrue(pos.getChromosome().equals("chr9"));
List<Range<Integer>> cdsranges = ChromosomeMappingTools.getCDSExonRanges(pos);
validateExon(0,0,7, cdsranges );
validateExon(1,7,43, cdsranges );
validateExon(2,43,207, cdsranges );
validateExon(3,207,324, cdsranges );
validateExon(4,324,516, cdsranges );
validateExon(5,516,585, cdsranges );
int cdslength = ChromosomeMappingTools.getCDSLength(pos);
assertTrue("CDS length should be 582, but is " + cdslength, cdslength == (uniProtLength *3));
List<Range<Integer>> chromranges = ChromosomeMappingTools.getChromosomalRangesForCDS(pos);
// we are reverse strand. reverse the order
chromranges = Lists.reverse(chromranges);
assertTrue(chromranges.size() == 6);
// compare with https://www.ncbi.nlm.nih.gov/CCDS/CcdsBrowse.cgi?REQUEST=CCDS&DATA=CCDS6881
validateExon(0,127868008,127868076, chromranges );
validateExon(1,127868320,127868512, chromranges );
validateExon(2,127871822,127871939, chromranges );
validateExon(3,127872689,127872853, chromranges );
validateExon(4,127873025,127873061, chromranges );
validateExon(5,127874610,127874617, chromranges );
}
} catch (Exception e) {
fail(e.getMessage());
}
}
@Test
public void testHBA(){
String geneName = "HBA1";
assertNotNull(gcps);
assertTrue("Problems with downloading refFlat file from UCSC browser ", gcps.size() > 100);
try {
for ( GeneChromosomePosition pos : gcps){
//System.out.println(pos.getGeneName());
if ( ! pos.getGeneName().equals(geneName))
continue;
assertTrue(pos.getGeneName().equals("HBA1"));
assertTrue(pos.getGenebankId().equals("NM_000558"));
assertTrue(pos.getChromosome().equals("chr16"));
assertTrue(pos.getTranscriptionStart().equals(176650));
assertTrue(pos.getTranscriptionEnd().equals(177522));
assertTrue(pos.getOrientation().equals('+'));
List<Range<Integer>> cdsranges = ChromosomeMappingTools.getCDSExonRanges(pos);
assertTrue(cdsranges.size() == 3);
validateExon(0,0,95,cdsranges);
validateExon(1,95,300,cdsranges);
validateExon(2,300,429,cdsranges);
List<Range<Integer>> chromranges = ChromosomeMappingTools.getChromosomalRangesForCDS(pos);
validateExon(0,176716,176811, chromranges );
validateExon(1,176928,177133, chromranges );
validateExon(2,177282,177411, chromranges );
}
} catch (Exception e){
fail(e.getMessage());
}
}
private void validateExon(int exonNr, int start, int stop, List<Range<Integer>> cdsranges) {
Range<Integer> exon = cdsranges.get(exonNr);
assertTrue("Exon " + exonNr + " boundary "+ exon.lowerEndpoint() + " does not match " +start , exon.lowerEndpoint().equals(start));
assertTrue("Exon " + exonNr + " boundary " + exon.upperEndpoint() + " does not match " + stop, exon.upperEndpoint().equals(stop));
}
/** Get the position of the nucleotide base corresponding to the position of that base on the mRNA sequence
* for a gene living on the reverse DNA strand.
*
* @author Yana Valasatava
*/
private int getPositionInmRNA(String geneName, String genebankId, int posChrom) {
for (GeneChromosomePosition gcp : gcps) {
if ( gcp.getGeneName().equals(geneName) ) {
if ( gcp.getGenebankId().equals(genebankId) ) {
return ChromosomeMappingTools.getCDSPosForChromosomeCoordinate(posChrom, gcp);
}
}
}
return -1;
}
/** Make sure the mapping tool correctly retrieves the mRNA position for a gene
* living on the forward DNA strand for different chromosome positions.
*
* @author Yana Valasatava
*/
@Test
public void testForwardMappingPositions() {
String geneName = "HORMAD2"; // gene on the forward DNA strand
String genebankId = "NM_152510"; // GeneBank ID for the transcript used for testing (ENST00000336726)
List<String> scenarios = Arrays.asList("first1exon", "last1exon", "last3exon");
int cds;
int posExonStart;
int posInmRNA;
for (String scenario : scenarios) {
switch (scenario) {
case "first1exon":
posExonStart = 30093953; // ending position of the last exon coding region (on forward strand)
posInmRNA = 1; // base 1 position in mRNA sequence
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
case "last1exon":
posExonStart = 30094003; // starting position of the last exon coding region (on forward strand)
posInmRNA = 51; // position in mRNA sequence equals to the length of the exon
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
case "last3exon":
posExonStart = 30103500; // starting position of the first base in a coding region (3rd exon)
posInmRNA = 257; // position in mRNA sequence equals to the sum length of the 3 last exons
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
}
}
}
/** Make sure the mapping tool correctly retrieves the mRNA position for a gene
* living on the reverse DNA strand for different chromosome positions.
*
* @author Yana Valasatava
*/
@Test
public void testReverseMappingPositions() {
String geneName = "BCL11B"; // gene on the reverse DNA strand
String genebankId = "NM_138576"; // GeneBank ID for the transcript used for testing (ENST00000357195)
List<String> scenarios = Arrays.asList("first1exon", "last1exon", "last3exon");
int cds;
int posExonStart;
int posInmRNA;
for (String scenario : scenarios) {
switch (scenario) {
case "first1exon":
posExonStart = 99271218; // ending position of the last exon coding region (on forward strand)
posInmRNA = 1; // base 1 position in mRNA sequence
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
case "last1exon":
posExonStart = 99271161; // starting position of the last exon coding region (on forward strand)
posInmRNA = 58; // position in mRNA sequence equals to the length of the exon
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
case "last3exon":
posExonStart = 99231345; // starting position of the first base in a coding region (3rd exon)
posInmRNA = 640; // position in mRNA sequence equals to the sum length of the 3 last exons
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
}
}
}
/** Test to make sure the mapping tool correctly identify that position falls outside the coding region
* for a gene living on the forward DNA strand.
*
* @author Yana Valasatava
*/
@Test
public void testForwardMappingForExonBoundaries() {
String geneName = "HBA1"; // gene on the reverse DNA strand
String genebankId = "NM_000558"; // GeneBank ID for the transcript used for testing (ENST00000320868)
int posExonStart = 176717; // starting position of the first base in a coding region (1st exon)
int posExonEnd = 176811; // ending position of the first base in a coding region (1st exon)
int cdsSE = getPositionInmRNA(geneName, genebankId, posExonStart-1);
assertEquals(cdsSE, -1);
int cdsEE = getPositionInmRNA(geneName, genebankId, posExonEnd+1);
assertEquals(cdsEE, -1);
}
/** Test to make sure the mapping tool correctly identify that position falls outside the coding region
* for a gene living on the reverse DNA strand.
*
* @author Yana Valasatava
*/
@Test
public void testReverseMappingForExonBoundaries() {
String geneName = "BCL11B"; // gene on the reverse DNA strand
String genebankId = "NM_138576"; // GeneBank ID for the transcript used for testing (ENST00000357195)
int posExonStart = 99174151; // starting position of the first base in a coding region (1st exon)
int posExonEnd = 99176195; // ending position of the first base in a coding region (1st exon)
int cdsSE = getPositionInmRNA(geneName, genebankId, posExonStart-1);
assertEquals(cdsSE, -1);
int cdsEE = getPositionInmRNA(geneName, genebankId, posExonEnd+1);
assertEquals(cdsEE, -1);
}
/** Test to make sure the mapping tool correctly converts the genetic position to a position on mRNA
* when multiple UTR regions are consecutive.
*
* @author Yana Valasatava
*/
@Test
public void testMappingCromosomePosTomRNAMultiUTRs() {
String geneName = "ILK"; // gene on the reverse DNA strand
String genebankId = "NM_001278442"; // GeneBank ID for the transcript used for testing (ENST00000532063)
int chromPos = 6608760;
int mRNAPos = 16;
int cds = getPositionInmRNA(geneName, genebankId, chromPos);
assertEquals(cds, mRNAPos);
}
}
| biojava-genome/src/test/java/org/biojava/nbio/genome/TestGenomeMapping.java | package org.biojava.nbio.genome;
import com.google.common.collect.Lists;
import com.google.common.collect.Range;
import junit.framework.TestCase;
import org.biojava.nbio.genome.parsers.genename.GeneChromosomePosition;
import org.biojava.nbio.genome.parsers.genename.GeneChromosomePositionParser;
import org.biojava.nbio.genome.util.ChromosomeMappingTools;
import org.junit.Test;
import java.io.InputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.zip.GZIPInputStream;
/**
* Created by andreas on 7/19/16.
*/
public class TestGenomeMapping extends TestCase {
private static final String geneChromosomeFile = "http://cdn.rcsb.org/gene/hg38/geneChromosome38.tsf.gz";
private List<GeneChromosomePosition> gcps = null;
@Override
protected void setUp() throws Exception {
super.setUp();
InputStream input = new GZIPInputStream(new URL(geneChromosomeFile).openStream());
gcps = GeneChromosomePositionParser.getChromosomeMappings(input);
}
@Test
public void testAK1() {
String geneName = "AK1";
assertNotNull(gcps);
assertTrue("Problems with downloading refFlat file from UCSC browser ", gcps.size() > 100);
int uniProtLength = 194;
try {
for (GeneChromosomePosition pos : gcps) {
//System.out.println(pos.getGeneName());
if (!pos.getGeneName().equals(geneName))
continue;
/// there are three alternative transcripts for AK1.
// we are just testing one here:
if ( ! pos.getGenebankId().equals("NM_000476"))
continue;
assertTrue(pos.getGeneName().equals(geneName));
assertTrue(pos.getOrientation().equals('-'));
assertTrue(pos.getChromosome().equals("chr9"));
List<Range<Integer>> cdsranges = ChromosomeMappingTools.getCDSExonRanges(pos);
validateExon(0,0,7, cdsranges );
validateExon(1,7,43, cdsranges );
validateExon(2,43,207, cdsranges );
validateExon(3,207,324, cdsranges );
validateExon(4,324,516, cdsranges );
validateExon(5,516,585, cdsranges );
int cdslength = ChromosomeMappingTools.getCDSLength(pos);
assertTrue("CDS length should be 582, but is " + cdslength, cdslength == (uniProtLength *3));
List<Range<Integer>> chromranges = ChromosomeMappingTools.getChromosomalRangesForCDS(pos);
// we are reverse strand. reverse the order
chromranges = Lists.reverse(chromranges);
assertTrue(chromranges.size() == 6);
// compare with https://www.ncbi.nlm.nih.gov/CCDS/CcdsBrowse.cgi?REQUEST=CCDS&DATA=CCDS6881
validateExon(0,127868008,127868076, chromranges );
validateExon(1,127868320,127868512, chromranges );
validateExon(2,127871822,127871939, chromranges );
validateExon(3,127872689,127872853, chromranges );
validateExon(4,127873025,127873061, chromranges );
validateExon(5,127874610,127874617, chromranges );
}
} catch (Exception e) {
fail(e.getMessage());
}
}
@Test
public void testHBA(){
String geneName = "HBA1";
assertNotNull(gcps);
assertTrue("Problems with downloading refFlat file from UCSC browser ", gcps.size() > 100);
try {
for ( GeneChromosomePosition pos : gcps){
//System.out.println(pos.getGeneName());
if ( ! pos.getGeneName().equals(geneName))
continue;
assertTrue(pos.getGeneName().equals("HBA1"));
assertTrue(pos.getGenebankId().equals("NM_000558"));
assertTrue(pos.getChromosome().equals("chr16"));
assertTrue(pos.getTranscriptionStart().equals(176650));
assertTrue(pos.getTranscriptionEnd().equals(177522));
assertTrue(pos.getOrientation().equals('+'));
List<Range<Integer>> cdsranges = ChromosomeMappingTools.getCDSExonRanges(pos);
assertTrue(cdsranges.size() == 3);
validateExon(0,0,95,cdsranges);
validateExon(1,95,300,cdsranges);
validateExon(2,300,429,cdsranges);
List<Range<Integer>> chromranges = ChromosomeMappingTools.getChromosomalRangesForCDS(pos);
validateExon(0,176716,176811, chromranges );
validateExon(1,176928,177133, chromranges );
validateExon(2,177282,177411, chromranges );
}
} catch (Exception e){
fail(e.getMessage());
}
}
private void validateExon(int exonNr, int start, int stop, List<Range<Integer>> cdsranges) {
Range exon = cdsranges.get(exonNr);
assertTrue("Exon " + exonNr + " boundary "+ exon.lowerEndpoint() + " does not match " +start , exon.lowerEndpoint().equals(start));
assertTrue("Exon " + exonNr + " boundary " + exon.upperEndpoint() + " does not match " + stop, exon.upperEndpoint().equals(stop));
}
@Test
/** Test to make sure the mapping tool correctly retrieves the mRNA position for a gene
* living on the forward DNA strand for different chromosome positions.
*
* @author Yana Valasatava
*/
public void testForwardMappingPositions() {
String geneName = "HORMAD2"; // gene on the forward DNA strand
String genebankId = "NM_152510"; // GeneBank ID for the transcript used for testing (ENST00000336726)
List<String> scenarios = Arrays.asList("first1exon", "last1exon", "last3exon");
int cds;
int posExonStart;
int posInmRNA;
for (String scenario : scenarios) {
switch (scenario) {
case "first1exon":
posExonStart = 30093953; // ending position of the last exon coding region (on forward strand)
posInmRNA = 1; // base 1 position in mRNA sequence
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
case "last1exon":
posExonStart = 30094003; // starting position of the last exon coding region (on forward strand)
posInmRNA = 51; // position in mRNA sequence equals to the length of the exon
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
case "last3exon":
posExonStart = 30103500; // starting position of the first base in a coding region (3rd exon)
posInmRNA = 257; // position in mRNA sequence equals to the sum length of the 3 last exons
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
}
}
}
@Test
/** Test to make sure the mapping tool correctly retrieves the mRNA position for a gene
* living on the reverse DNA strand for different chromosome positions.
*
* @author Yana Valasatava
*/
public void testReverseMappingPositions() {
String geneName = "BCL11B"; // gene on the reverse DNA strand
String genebankId = "NM_138576"; // GeneBank ID for the transcript used for testing (ENST00000357195)
List<String> scenarios = Arrays.asList("first1exon", "last1exon", "last3exon");
int cds;
int posExonStart;
int posInmRNA;
for (String scenario : scenarios) {
switch (scenario) {
case "first1exon":
posExonStart = 99271218; // ending position of the last exon coding region (on forward strand)
posInmRNA = 1; // base 1 position in mRNA sequence
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
case "last1exon":
posExonStart = 99271161; // starting position of the last exon coding region (on forward strand)
posInmRNA = 58; // position in mRNA sequence equals to the length of the exon
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
case "last3exon":
posExonStart = 99231345; // starting position of the first base in a coding region (3rd exon)
posInmRNA = 640; // position in mRNA sequence equals to the sum length of the 3 last exons
cds = getPositionInmRNA(geneName, genebankId, posExonStart);
assertEquals(cds, posInmRNA);
break;
}
}
}
@Test
/** Test to make sure the mapping tool correctly identify that position falls outside the coding region
* for a gene living on the forward DNA strand.
*
* @author Yana Valasatava
*/
public void testForwardMappingForExonBoundaries() {
String geneName = "HBA1"; // gene on the reverse DNA strand
String genebankId = "NM_000558"; // GeneBank ID for the transcript used for testing (ENST00000320868)
int posExonStart = 176717; // starting position of the first base in a coding region (1st exon)
int posExonEnd = 176811; // ending position of the first base in a coding region (1st exon)
for (GeneChromosomePosition gcp : gcps) {
if ( !gcp.getGeneName().equals(geneName) )
continue;
if ( !gcp.getGenebankId().equals(genebankId) )
continue;
int cdsSE = ChromosomeMappingTools.getCDSPosForChromosomeCoordinate(posExonStart-1, gcp);
assertEquals(cdsSE, -1);
int cdsEE = ChromosomeMappingTools.getCDSPosForChromosomeCoordinate(posExonEnd+1, gcp);
assertEquals(cdsEE, -1);
break;
}
}
@Test
/** Test to make sure the mapping tool correctly identify that position falls outside the coding region
* for a gene living on the reverse DNA strand.
*
* @author Yana Valasatava
*/
public void testReverseMappingForExonBoundaries() {
String geneName = "BCL11B"; // gene on the reverse DNA strand
String genebankId = "NM_138576"; // GeneBank ID for the transcript used for testing (ENST00000357195)
int posExonStart = 99174151; // starting position of the first base in a coding region (1st exon)
int posExonEnd = 99176195; // ending position of the first base in a coding region (1st exon)
for (GeneChromosomePosition gcp : gcps) {
if ( !gcp.getGeneName().equals(geneName) )
continue;
if ( !gcp.getGenebankId().equals(genebankId) )
continue;
int cdsSE = ChromosomeMappingTools.getCDSPosForChromosomeCoordinate(posExonStart-1, gcp);
assertEquals(cdsSE, -1);
int cdsEE = ChromosomeMappingTools.getCDSPosForChromosomeCoordinate(posExonEnd+1, gcp);
assertEquals(cdsEE, -1);
break;
}
}
/** Get the position of the nucleotide base corresponding to the position of that base on the mRNA sequence
* for a gene living on the reverse DNA strand.
*
* @author Yana Valasatava
*/
private int getPositionInmRNA(String geneName, String genebankId, int posChrom) {
for (GeneChromosomePosition gcp : gcps) {
if ( !gcp.getGeneName().equals(geneName) )
continue;
if ( !gcp.getGenebankId().equals(genebankId) )
continue;
return ChromosomeMappingTools.getCDSPosForChromosomeCoordinate(posChrom, gcp);
}
return -1;
}
}
| Chromosome mapping tool: unit test to check handling multiple UTRs preceding the coding region
| biojava-genome/src/test/java/org/biojava/nbio/genome/TestGenomeMapping.java | Chromosome mapping tool: unit test to check handling multiple UTRs preceding the coding region |
|
Java | lgpl-2.1 | e681be1a064e8e23e3134f4df98dba4e2227726b | 0 | wolfgangmm/exist,patczar/exist,zwobit/exist,eXist-db/exist,jensopetersen/exist,hungerburg/exist,wshager/exist,windauer/exist,wshager/exist,kohsah/exist,dizzzz/exist,adamretter/exist,eXist-db/exist,ljo/exist,opax/exist,ljo/exist,zwobit/exist,wolfgangmm/exist,lcahlander/exist,shabanovd/exist,eXist-db/exist,dizzzz/exist,shabanovd/exist,kohsah/exist,eXist-db/exist,joewiz/exist,ambs/exist,joewiz/exist,hungerburg/exist,patczar/exist,olvidalo/exist,joewiz/exist,patczar/exist,zwobit/exist,RemiKoutcherawy/exist,jessealama/exist,windauer/exist,jessealama/exist,ambs/exist,lcahlander/exist,zwobit/exist,dizzzz/exist,ljo/exist,adamretter/exist,hungerburg/exist,wolfgangmm/exist,MjAbuz/exist,olvidalo/exist,ljo/exist,shabanovd/exist,dizzzz/exist,ambs/exist,RemiKoutcherawy/exist,jensopetersen/exist,lcahlander/exist,ambs/exist,lcahlander/exist,opax/exist,wshager/exist,lcahlander/exist,RemiKoutcherawy/exist,patczar/exist,windauer/exist,patczar/exist,windauer/exist,jessealama/exist,opax/exist,wolfgangmm/exist,MjAbuz/exist,joewiz/exist,shabanovd/exist,MjAbuz/exist,eXist-db/exist,jessealama/exist,hungerburg/exist,dizzzz/exist,ambs/exist,kohsah/exist,olvidalo/exist,eXist-db/exist,zwobit/exist,MjAbuz/exist,patczar/exist,adamretter/exist,RemiKoutcherawy/exist,zwobit/exist,joewiz/exist,jensopetersen/exist,opax/exist,ambs/exist,adamretter/exist,dizzzz/exist,wolfgangmm/exist,wshager/exist,MjAbuz/exist,shabanovd/exist,lcahlander/exist,kohsah/exist,shabanovd/exist,ljo/exist,jessealama/exist,jensopetersen/exist,ljo/exist,RemiKoutcherawy/exist,kohsah/exist,windauer/exist,wolfgangmm/exist,wshager/exist,MjAbuz/exist,olvidalo/exist,jensopetersen/exist,windauer/exist,hungerburg/exist,adamretter/exist,adamretter/exist,opax/exist,jessealama/exist,RemiKoutcherawy/exist,kohsah/exist,olvidalo/exist,jensopetersen/exist,wshager/exist,joewiz/exist | /*
* eXist Open Source Native XML Database
* Copyright (C) 2001-2007 The eXist team
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Id$
*/
package org.exist.storage;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Observer;
import java.util.Stack;
import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.stream.XMLStreamException;
import org.apache.log4j.Logger;
import org.exist.EXistException;
import org.exist.Indexer;
import org.exist.backup.RawDataBackup;
import org.exist.collections.Collection;
import org.exist.collections.Collection.SubCollectionEntry;
import org.exist.collections.CollectionCache;
import org.exist.collections.CollectionConfiguration;
import org.exist.collections.CollectionConfigurationException;
import org.exist.collections.CollectionConfigurationManager;
import org.exist.collections.triggers.CollectionTriggersVisitor;
import org.exist.collections.triggers.DocumentTriggersVisitor;
import org.exist.collections.triggers.TriggerException;
import org.exist.dom.*;
import org.exist.fulltext.FTIndex;
import org.exist.fulltext.FTIndexWorker;
import org.exist.indexing.StreamListener;
import org.exist.indexing.StructuralIndex;
import org.exist.memtree.DOMIndexer;
import org.exist.numbering.NodeId;
import org.exist.security.Account;
import org.exist.security.MessageDigester;
import org.exist.security.Permission;
import org.exist.security.PermissionDeniedException;
import org.exist.security.Subject;
import org.exist.stax.EmbeddedXMLStreamReader;
import org.exist.storage.btree.BTree;
import org.exist.storage.btree.BTreeCallback;
import org.exist.storage.btree.BTreeException;
import org.exist.storage.btree.DBException;
import org.exist.storage.btree.IndexQuery;
import org.exist.storage.btree.Paged;
import org.exist.storage.btree.Value;
import org.exist.storage.btree.Paged.Page;
import org.exist.storage.dom.DOMFile;
import org.exist.storage.dom.DOMTransaction;
import org.exist.storage.dom.NodeIterator;
import org.exist.storage.dom.RawNodeIterator;
import org.exist.storage.index.BFile;
import org.exist.storage.index.CollectionStore;
import org.exist.storage.io.VariableByteInput;
import org.exist.storage.io.VariableByteOutputStream;
import org.exist.storage.journal.Journal;
import org.exist.storage.journal.LogEntryTypes;
import org.exist.storage.journal.Loggable;
import org.exist.storage.lock.Lock;
import org.exist.storage.serializers.NativeSerializer;
import org.exist.storage.serializers.Serializer;
import org.exist.storage.sync.Sync;
import org.exist.storage.txn.TransactionException;
import org.exist.storage.txn.TransactionManager;
import org.exist.storage.txn.Txn;
import org.exist.util.ByteArrayPool;
import org.exist.util.ByteConversion;
import org.exist.util.Configuration;
import org.exist.util.DatabaseConfigurationException;
import org.exist.util.LockException;
import org.exist.util.ReadOnlyException;
import org.exist.xmldb.XmldbURI;
import org.exist.xquery.value.Type;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.exist.xquery.TerminatedException;
/**
* Main class for the native XML storage backend.
* By "native" it is meant file-based, embedded backend.
*
* Provides access to all low-level operations required by
* the database. Extends {@link DBBroker}.
*
* Observer Design Pattern: role : this class is the subject (alias observable)
* for various classes that generate indices for the database content :
* @link org.exist.storage.NativeElementIndex
* @link org.exist.storage.NativeTextEngine
* @link org.exist.storage.NativeValueIndex
* @link org.exist.storage.NativeValueIndexByQName
*
* This class dispatches the various events (defined by the methods
* of @link org.exist.storage.ContentLoadingObserver) to indexing classes.
*
*@author Wolfgang Meier
*/
public class NativeBroker extends DBBroker {
public final static String EXIST_STATISTICS_LOGGER = "org.exist.statistics";
protected final static Logger LOGSTATS = Logger.getLogger( EXIST_STATISTICS_LOGGER );
public final static byte LOG_RENAME_BINARY = 0x40;
public final static byte LOG_CREATE_BINARY = 0x41;
public final static byte LOG_UPDATE_BINARY = 0x42;
static {
LogEntryTypes.addEntryType(LOG_RENAME_BINARY, RenameBinaryLoggable.class);
LogEntryTypes.addEntryType(LOG_CREATE_BINARY, CreateBinaryLoggable.class);
LogEntryTypes.addEntryType(LOG_UPDATE_BINARY, UpdateBinaryLoggable.class);
}
public static final byte PREPEND_DB_ALWAYS = 0;
public static final byte PREPEND_DB_NEVER = 1;
public static final byte PREPEND_DB_AS_NEEDED = 2;
public static final byte COLLECTIONS_DBX_ID = 0;
public static final byte VALUES_DBX_ID = 2;
public static final byte DOM_DBX_ID = 3;
//Note : no ID for symbols ? Too bad...
public static final String PAGE_SIZE_ATTRIBUTE = "pageSize";
public static final String INDEX_DEPTH_ATTRIBUTE = "index-depth";
public static final String PROPERTY_INDEX_DEPTH = "indexer.index-depth";
private static final byte[] ALL_STORAGE_FILES = {
COLLECTIONS_DBX_ID, VALUES_DBX_ID, DOM_DBX_ID
};
//private static final String TEMP_FRAGMENT_REMOVE_ERROR = "Could not remove temporary fragment";
// private static final String TEMP_STORE_ERROR = "An error occurred while storing temporary data: ";
private static final String EXCEPTION_DURING_REINDEX = "exception during reindex";
private static final String DATABASE_IS_READ_ONLY = "database is read-only";
public static final String DEFAULT_DATA_DIR = "data";
public static final int DEFAULT_INDEX_DEPTH = 1;
public static final int DEFAULT_MIN_MEMORY = 5000000;
public static final long TEMP_FRAGMENT_TIMEOUT = 60000;
/** default buffer size setting */
public static final int BUFFERS = 256;
/** check available memory after storing DEFAULT_NODES_BEFORE_MEMORY_CHECK nodes */
public static final int DEFAULT_NODES_BEFORE_MEMORY_CHECK = 500;
public static int OFFSET_COLLECTION_ID = 0;
public static int OFFSET_VALUE = OFFSET_COLLECTION_ID + Collection.LENGTH_COLLECTION_ID; //2
public final static String INIT_COLLECTION_CONFIG = "collection.xconf.init";
/** the database files */
protected CollectionStore collectionsDb;
protected DOMFile domDb;
/** the index processors */
protected NativeValueIndex valueIndex;
protected IndexSpec indexConfiguration;
protected int defaultIndexDepth;
protected Serializer xmlSerializer;
/** used to count the nodes inserted after the last memory check */
protected int nodesCount = 0;
protected int nodesCountThreshold = DEFAULT_NODES_BEFORE_MEMORY_CHECK;
protected String dataDir;
protected File fsDir;
protected File fsBackupDir;
protected int pageSize;
protected byte prepend;
private final Runtime run = Runtime.getRuntime();
private NodeProcessor nodeProcessor = new NodeProcessor();
private EmbeddedXMLStreamReader streamReader = null;
protected Journal logManager;
protected boolean incrementalDocIds = false;
/** initialize database; read configuration, etc. */
public NativeBroker(BrokerPool pool, Configuration config) throws EXistException {
super(pool, config);
this.logManager = pool.getTransactionManager().getJournal();
LOG.debug("Initializing broker " + hashCode());
String prependDB = (String) config.getProperty("db-connection.prepend-db");
if ("always".equalsIgnoreCase(prependDB)) {
prepend = PREPEND_DB_ALWAYS;
} else if("never".equalsIgnoreCase(prependDB)) {
prepend = PREPEND_DB_NEVER;
} else {
prepend = PREPEND_DB_AS_NEEDED;
}
dataDir = (String) config.getProperty(BrokerPool.PROPERTY_DATA_DIR);
if (dataDir == null)
dataDir = DEFAULT_DATA_DIR;
fsDir = new File(new File(dataDir),"fs");
if (!fsDir.exists()) {
if (!fsDir.mkdir()) {
throw new EXistException("Cannot make collection filesystem directory: "+fsDir);
}
}
fsBackupDir = new File(new File(dataDir),"fs.journal");
if (!fsBackupDir.exists()) {
if (!fsBackupDir.mkdir()) {
throw new EXistException("Cannot make collection filesystem directory: "+fsBackupDir);
}
}
nodesCountThreshold = config.getInteger(BrokerPool.PROPERTY_NODES_BUFFER);
if (nodesCountThreshold > 0)
nodesCountThreshold = nodesCountThreshold * 1000;
defaultIndexDepth = config.getInteger(PROPERTY_INDEX_DEPTH);
if (defaultIndexDepth < 0)
defaultIndexDepth = DEFAULT_INDEX_DEPTH;
String docIdProp = (String) config.getProperty(BrokerPool.DOC_ID_MODE_PROPERTY);
if (docIdProp != null) {
incrementalDocIds = docIdProp.equalsIgnoreCase("incremental");
}
indexConfiguration = (IndexSpec) config.getProperty(Indexer.PROPERTY_INDEXER_CONFIG);
xmlSerializer = new NativeSerializer(this, config);
setSubject(pool.getSecurityManager().getSystemSubject());
try {
//TODO : refactor so that we can,
//1) customize the different properties (file names, cache settings...)
//2) have a consistent READ-ONLY behaviour (based on *mandatory* files ?)
//3) have consistent file creation behaviour (we can probably avoid some unnecessary files)
//4) use... *customized* factories for a better index plugability ;-)
// Initialize DOM storage
domDb = (DOMFile) config.getProperty(DOMFile.getConfigKeyForFile());
if (domDb == null)
domDb = new DOMFile(pool, DOM_DBX_ID, dataDir, config);
if (domDb.isReadOnly()) {
LOG.warn(domDb.getFile().getName() + " is read-only!");
pool.setReadOnly();
}
//Initialize collections storage
collectionsDb = (CollectionStore) config.getProperty(CollectionStore.getConfigKeyForFile());
if (collectionsDb == null)
collectionsDb = new CollectionStore(pool, COLLECTIONS_DBX_ID, dataDir, config);
if (collectionsDb.isReadOnly()) {
LOG.warn(collectionsDb.getFile().getName() + " is read-only!");
pool.setReadOnly();
}
valueIndex = new NativeValueIndex(this, VALUES_DBX_ID, dataDir, config);
if (pool.isReadOnly())
LOG.info("Database runs in read-only mode");
} catch (DBException e) {
LOG.debug(e.getMessage(), e);
throw new EXistException(e);
}
}
@Override
public ElementIndex getElementIndex() {
return null;
}
@Override
public synchronized void addObserver(Observer o) {
super.addObserver(o);
//textEngine.addObserver(o);
//elementIndex.addObserver(o);
//TODO : what about other indexes observers ?
}
@Override
public synchronized void deleteObservers() {
super.deleteObservers();
//if (elementIndex != null)
//elementIndex.deleteObservers();
//TODO : what about other indexes observers ?
//if (textEngine != null)
//textEngine.deleteObservers();
}
// ============ dispatch the various events to indexing classes ==========
private void notifyRemoveNode(StoredNode node, NodePath currentPath, String content) {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.removeNode(node, currentPath, content);
}
}
//private void notifyStoreAttribute(AttrImpl attr, NodePath currentPath, int indexingHint, RangeIndexSpec spec, boolean remove) {
// for (int i = 0; i < contentLoadingObservers.size(); i++) {
// ContentLoadingObserver observer = (ContentLoadingObserver) contentLoadingObservers.get(i);
// observer.storeAttribute(attr, currentPath, indexingHint, spec, remove);
// }
//}
private void notifyStoreText(TextImpl text, NodePath currentPath, int indexingHint) {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.storeText(text, currentPath, indexingHint);
}
}
private void notifyDropIndex(Collection collection) {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.dropIndex(collection);
}
}
private void notifyDropIndex(DocumentImpl doc) throws ReadOnlyException {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.dropIndex(doc);
}
}
private void notifyRemove() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.remove();
}
}
private void notifySync() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.sync();
}
}
private void notifyFlush() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
try {
observer.flush();
} catch (DBException e) {
LOG.warn(e);
//Ignore the exception ; try to continue on other files
}
}
}
private void notifyPrintStatistics() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.printStatistics();
}
}
private void notifyClose() throws DBException {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.close();
}
clearContentLoadingObservers();
}
private void notifyCloseAndRemove() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.closeAndRemove();
}
clearContentLoadingObservers();
}
/**
* Update indexes for the given element node. This method is called when the indexer
* encounters a closing element tag. It updates any range indexes defined on the
* element value and adds the element id to the structural index.
*
* @param node the current element node
* @param currentPath node path leading to the element
* @param content contains the string value of the element. Needed if a range index
* is defined on it.
*/
@Override
public void endElement(final StoredNode node, NodePath currentPath, String content, boolean remove) {
final int indexType = ((ElementImpl) node).getIndexType();
//TODO : do not care about the current code redundancy : this will move in the (near) future
// TODO : move to NativeValueIndex
if (RangeIndexSpec.hasRangeIndex(indexType)) {
node.getQName().setNameType(ElementValue.ELEMENT);
if (content == null) {
//NodeProxy p = new NodeProxy(node);
//if (node.getOldInternalAddress() != StoredNode.UNKNOWN_NODE_IMPL_ADDRESS)
// p.setInternalAddress(node.getOldInternalAddress());
content = getNodeValue(node, false);
//Curious... I assume getNodeValue() needs the old address
//p.setInternalAddress(node.getInternalAddress());
}
valueIndex.setDocument((DocumentImpl) node.getOwnerDocument());
valueIndex.storeElement((ElementImpl) node, content, RangeIndexSpec.indexTypeToXPath(indexType),
NativeValueIndex.IDX_GENERIC, remove);
}
// TODO : move to NativeValueIndexByQName
if ( RangeIndexSpec.hasQNameIndex(indexType) ) {
node.getQName().setNameType(ElementValue.ELEMENT);
if (content == null) {
//NodeProxy p = new NodeProxy(node);
//if (node.getOldInternalAddress() != StoredNode.UNKNOWN_NODE_IMPL_ADDRESS)
// p.setInternalAddress(node.getOldInternalAddress());
content = getNodeValue(node, false);
//Curious... I assume getNodeValue() needs the old address
//p.setInternalAddress(node.getInternalAddress());
}
valueIndex.setDocument((DocumentImpl) node.getOwnerDocument());
valueIndex.storeElement((ElementImpl) node, content, RangeIndexSpec.indexTypeToXPath(indexType),
NativeValueIndex.IDX_QNAME, remove);
//qnameValueIndex.setDocument((DocumentImpl) node.getOwnerDocument());
//qnameValueIndex.endElement((ElementImpl) node, currentPath, content);
}
}
/*
private String getOldNodeContent(StoredNode node, long oldAddress) {
NodeProxy p = new NodeProxy(node);
if (oldAddress != StoredNode.UNKNOWN_NODE_IMPL_ADDRESS)
p.setInternalAddress(oldAddress);
String content = getNodeValue(node, false);
//Curious... I assume getNodeValue() needs the old address
p.setInternalAddress(node.getInternalAddress());
return content;
}
*/
/** Takes care of actually remove entries from the indices;
* must be called after one or more call to {@link #removeNode(Txn, StoredNode, NodePath, String)}. */
@Override
public void endRemove(Txn transaction) {
notifyRemove();
}
@Override
public boolean isReadOnly() {
return pool.isReadOnly();
}
public DOMFile getDOMFile() {
return domDb;
}
public BTree getStorage(byte id) {
//Notice that there is no entry for the symbols table
switch (id) {
case DOM_DBX_ID :
return domDb;
case COLLECTIONS_DBX_ID :
return collectionsDb;
case VALUES_DBX_ID :
return valueIndex.dbValues;
default:
return null;
}
}
public byte[] getStorageFileIds() {
return ALL_STORAGE_FILES;
}
public int getDefaultIndexDepth() {
return defaultIndexDepth;
}
@Override
public void backupToArchive(RawDataBackup backup) throws IOException, EXistException {
for (byte i : ALL_STORAGE_FILES) {
Paged paged = getStorage(i);
if (paged == null) {
LOG.warn("Storage file is null: " + i);
continue;
}
OutputStream os = backup.newEntry(paged.getFile().getName());
paged.backupToStream(os);
backup.closeEntry();
}
pool.getSymbols().backupToArchive(backup);
backupBinary(backup, fsDir, "");
pool.getIndexManager().backupToArchive(backup);
//TODO backup counters
//TODO USE zip64 or tar to create snapshots larger then 4Gb
}
private void backupBinary(RawDataBackup backup, File file, String path) throws IOException {
path = path + "/" + file.getName();
if (file.isDirectory()) {
for (File f : file.listFiles()) {
backupBinary(backup, f, path);
}
} else {
OutputStream os = backup.newEntry(path);
InputStream is = new FileInputStream(file);
byte[] buf = new byte[4096];
int len;
while ((len = is.read(buf)) > 0) {
os.write(buf, 0, len);
}
is.close();
backup.closeEntry();
}
}
@Override
public IndexSpec getIndexConfiguration() {
return indexConfiguration;
}
@Override
public StructuralIndex getStructuralIndex() {
return (StructuralIndex) getIndexController().getWorkerByIndexName(StructuralIndex.STRUCTURAL_INDEX_ID);
}
@Override
public NativeValueIndex getValueIndex() {
return valueIndex;
}
@Override
public TextSearchEngine getTextEngine() {
FTIndexWorker worker = (FTIndexWorker) indexController.getWorkerByIndexId(FTIndex.ID);
if (worker == null) {
LOG.warn("Fulltext index is not configured. Please check the <modules> section in conf.xml");
return null;
}
return worker.getEngine();
}
@Override
public EmbeddedXMLStreamReader getXMLStreamReader(NodeHandle node, boolean reportAttributes)
throws IOException, XMLStreamException {
if (streamReader == null) {
RawNodeIterator iterator = new RawNodeIterator(this, domDb, node);
streamReader = new EmbeddedXMLStreamReader(this, (DocumentImpl) node.getOwnerDocument(), iterator, node, reportAttributes);
} else {
streamReader.reposition(this, node, reportAttributes);
}
return streamReader;
}
@Override
public EmbeddedXMLStreamReader newXMLStreamReader(NodeHandle node, boolean reportAttributes)
throws IOException, XMLStreamException {
RawNodeIterator iterator = new RawNodeIterator(this, domDb, node);
return new EmbeddedXMLStreamReader(this, (DocumentImpl) node.getOwnerDocument(), iterator, null, reportAttributes);
}
@Override
public Iterator<StoredNode> getNodeIterator(StoredNode node) {
if (node == null)
throw new IllegalArgumentException("The node parameter cannot be null.");
try {
return new NodeIterator(this, domDb, node, false);
} catch (BTreeException e) {
LOG.warn("failed to create node iterator", e);
} catch (IOException e) {
LOG.warn("failed to create node iterator", e);
}
return null;
}
@Override
public Serializer getSerializer() {
xmlSerializer.reset();
return xmlSerializer;
}
@Override
public Serializer newSerializer() {
return new NativeSerializer(this, getConfiguration());
}
public XmldbURI prepend(XmldbURI uri) {
switch(prepend) {
case PREPEND_DB_ALWAYS:
return uri.prepend(XmldbURI.ROOT_COLLECTION_URI);
case PREPEND_DB_AS_NEEDED:
return uri.startsWith(XmldbURI.ROOT_COLLECTION_URI)?
uri:
uri.prepend(XmldbURI.ROOT_COLLECTION_URI);
default:
return uri;
}
}
/**
* Creates a temporary collecion
*
* @param transaction : The transaction, which registers the acquired write locks. The locks should be released on commit/abort.
* @return The temporary collection
* @throws LockException
* @throws PermissionDeniedException
* @throws IOException
* @throws TriggerException
*/
private Collection createTempCollection(Txn transaction)
throws LockException, PermissionDeniedException, IOException, TriggerException {
Subject u = getSubject();
try {
setSubject( pool.getSecurityManager().getSystemSubject() );
Collection temp = getOrCreateCollection(transaction, XmldbURI.TEMP_COLLECTION_URI);
temp.setPermissions(0771);
saveCollection(transaction, temp);
return temp;
} finally {
setSubject( u );
}
}
private final String readInitCollectionConfig() {
final File fInitCollectionConfig = new File(pool.getConfiguration().getExistHome(), INIT_COLLECTION_CONFIG);
if(fInitCollectionConfig.exists() && fInitCollectionConfig.isFile()) {
InputStream is = null;
try {
final StringBuilder initCollectionConfig = new StringBuilder();
is = new FileInputStream(fInitCollectionConfig);
int read = -1;
byte buf[] = new byte[1024];
while((read = is.read(buf)) != -1) {
initCollectionConfig.append(new String(buf, 0, read));
}
return initCollectionConfig.toString();
} catch(final IOException ioe) {
LOG.error(ioe.getMessage(), ioe);
} finally {
if(is != null) {
try {
is.close();
} catch(final IOException ioe) {
LOG.warn(ioe.getMessage(), ioe);
}
}
}
};
return null;
}
/* (non-Javadoc)
* @see org.exist.storage.DBBroker#getOrCreateCollection(org.exist.storage.txn.Txn, org.exist.xmldb.XmldbURI)
*/
@Override
public Collection getOrCreateCollection(Txn transaction, XmldbURI name) throws PermissionDeniedException, IOException, TriggerException {
name = prepend(name.normalizeCollectionPath());
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
try {
//TODO : resolve URIs !
XmldbURI[] segments = name.getPathSegments();
XmldbURI path = XmldbURI.ROOT_COLLECTION_URI;
Collection sub;
Collection current = getCollection(XmldbURI.ROOT_COLLECTION_URI);
if (current == null) {
LOG.debug("Creating root collection '" + XmldbURI.ROOT_COLLECTION_URI + "'");
pool.getCollectionTrigger().beforeCreateCollection(this, transaction, XmldbURI.ROOT_COLLECTION_URI);
current = new Collection(this, XmldbURI.ROOT_COLLECTION_URI);
current.setId(getNextCollectionId(transaction));
current.setCreationTime(System.currentTimeMillis());
if(transaction != null) {
transaction.acquireLock(current.getLock(), Lock.WRITE_LOCK);
}
//TODO : acquire lock manually if transaction is null ?
saveCollection(transaction, current);
pool.getCollectionTrigger().afterCreateCollection(this, transaction, current);
//import an initial collection configuration
try {
final String initCollectionConfig = readInitCollectionConfig();
if(initCollectionConfig != null) {
CollectionConfigurationManager collectionConfigurationManager = pool.getConfigurationManager();
if(collectionConfigurationManager == null) {
//might not yet have been initialised
pool.initCollectionConfigurationManager(this);
collectionConfigurationManager = pool.getConfigurationManager();
}
if(collectionConfigurationManager != null) {
collectionConfigurationManager.addConfiguration(transaction, this, current, initCollectionConfig);
}
}
} catch(final CollectionConfigurationException cce) {
LOG.error("Could not load initial collection configuration for /db: " + cce.getMessage(), cce);
}
}
for(int i=1;i<segments.length;i++) {
XmldbURI temp = segments[i];
path = path.append(temp);
if(current.hasSubcollectionNoLock(this, temp)) {
current = getCollection(path);
if (current == null) {
LOG.debug("Collection '" + path + "' not found!");
}
} else {
if (pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
if(!current.getPermissionsNoLock().validate(getSubject(), Permission.WRITE)) {
LOG.error("Permission denied to create collection '" + path + "'");
throw new PermissionDeniedException("Account '"+ getSubject().getName() + "' not allowed to write to collection '" + current.getURI() + "'");
}
if (!current.getPermissionsNoLock().validate(getSubject(), Permission.EXECUTE)) {
LOG.error("Permission denied to create collection '" + path + "'");
throw new PermissionDeniedException("Account '"+ getSubject().getName() + "' not allowed to execute to collection '" + current.getURI() + "'");
}
if (current.hasDocument(this, path.lastSegment())) {
LOG.error("Collection '" + current.getURI() + "' have document '" + path.lastSegment() + "'");
throw new PermissionDeniedException("Collection '" + current.getURI() + "' have document '" + path.lastSegment() + "'.");
}
LOG.debug("Creating collection '" + path + "'...");
CollectionConfiguration colConf = current.getConfiguration(this);
pool.getCollectionTrigger().beforeCreateCollection(this, transaction, path);
CollectionTriggersVisitor triggersVisitor = null;
if(colConf != null) {
triggersVisitor = colConf.getCollectionTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeCreateCollection(this, transaction, path);
}
sub = new Collection(this, path);
sub.setId(getNextCollectionId(transaction));
if (transaction != null) {
transaction.acquireLock(sub.getLock(), Lock.WRITE_LOCK);
}
//TODO : acquire lock manually if transaction is null ?
current.addCollection(this, sub, true);
saveCollection(transaction, current);
pool.getCollectionTrigger().afterCreateCollection(this, transaction, sub);
if(colConf != null) {
triggersVisitor.afterCreateCollection(this, transaction, sub);
}
current = sub;
}
}
return current;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
return null;
} catch (ReadOnlyException e) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
}
}
@Override
public Collection getCollection(XmldbURI uri) throws PermissionDeniedException {
return openCollection(uri, Lock.NO_LOCK);
}
@Override
public Collection openCollection(XmldbURI uri, int lockMode) throws PermissionDeniedException {
return openCollection(uri, BFile.UNKNOWN_ADDRESS, lockMode);
}
@Override
public List<String> findCollectionsMatching(String regexp) {
final List<String> collections = new ArrayList<String>();
final Pattern p = Pattern.compile(regexp);
final Matcher m = p.matcher("");
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
//TODO write a regexp lookup for key data in BTree.query
//IndexQuery idxQuery = new IndexQuery(IndexQuery.REGEXP, regexp);
//List<Value> keys = collectionsDb.findKeysByCollectionName(idxQuery);
List<Value> keys = collectionsDb.getKeys();
for(Value key : keys) {
//TODO restrict keys to just collection uri's
final String collectionName = new String(key.getData());
m.reset(collectionName);
if(m.matches()) {
collections.add(collectionName);
}
}
} catch (UnsupportedEncodingException e) {
//LOG.error("Unable to encode '" + uri + "' in UTF-8");
//return null;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
//return null;
} catch (TerminatedException e) {
LOG.error(e.getMessage(), e);
//return null;
} catch (BTreeException e) {
LOG.error(e.getMessage(), e);
//return null;
} catch (IOException e) {
LOG.error(e.getMessage(), e);
//return null;
} finally {
lock.release(Lock.READ_LOCK);
}
return collections;
}
@Override
public void readCollectionEntry(SubCollectionEntry entry) {
final XmldbURI uri = prepend(entry.getUri().toCollectionPathURI());
Collection collection;
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
collection = collectionsCache.get(uri);
if (collection == null) {
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.CollectionKey(uri.toString());
VariableByteInput is = collectionsDb.getAsStream(key);
if (is == null) {
LOG.warn("Could not read collection entry for: " + uri);
return;
}
//read the entry details
entry.read(is);
} catch (UnsupportedEncodingException e) {
LOG.error("Unable to encode '" + uri + "' in UTF-8");
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.error(e.getMessage(), e);
} finally {
lock.release(Lock.READ_LOCK);
}
} else {
if (!collection.getURI().equalsInternal(uri)) {
LOG.error("The collection received from the cache is not the requested: " + uri +
"; received: " + collection.getURI());
return;
}
entry.read(collection);
collectionsCache.add(collection);
}
}
}
/**
* Get collection object. If the collection does not exist, null is
* returned.
*
*@param uri collection URI
*@return The collection value
*/
private Collection openCollection(XmldbURI uri, long addr, int lockMode) throws PermissionDeniedException {
uri = prepend(uri.toCollectionPathURI());
//We *must* declare it here (see below)
Collection collection;
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
collection = collectionsCache.get(uri);
if (collection == null) {
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
VariableByteInput is;
if (addr == BFile.UNKNOWN_ADDRESS) {
Value key = new CollectionStore.CollectionKey(uri.toString());
is = collectionsDb.getAsStream(key);
} else {
is = collectionsDb.getAsStream(addr);
}
if (is == null)
return null;
collection = new Collection(this, uri);
collection.read(this, is);
//TODO : manage this from within the cache -pb
if(!pool.isInitializing())
collectionsCache.add(collection);
//TODO : rethrow exceptions ? -pb
} catch (UnsupportedEncodingException e) {
LOG.error("Unable to encode '" + uri + "' in UTF-8");
return null;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
return null;
} catch (IOException e) {
LOG.error(e.getMessage(), e);
return null;
} finally {
lock.release(Lock.READ_LOCK);
}
} else {
if (!collection.getURI().equalsInternal(uri)) {
LOG.error("The collection received from the cache is not the requested: " + uri +
"; received: " + collection.getURI());
}
collectionsCache.add(collection);
if(!collection.getPermissionsNoLock().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Permission denied to open collection: " + collection.getURI().toString() + " by " + getSubject().getName());
}
}
}
//Important :
//This code must remain outside of the synchonized block
//because another thread may already own a lock on the collection
//This would result in a deadlock... until the time-out raises the Exception
//TODO : make an attempt to an immediate lock ?
//TODO : manage a collection of requests for locks ?
//TODO : another yet smarter solution ?
if(lockMode != Lock.NO_LOCK) {
try {
collection.getLock().acquire(lockMode);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on collection '" + uri + "'");
}
}
return collection;
}
/**
* Checks all permissions in the tree to ensure that a copy operation will succeed
*/
final void checkPermissionsForCopy(final Collection src, final XmldbURI destUri) throws PermissionDeniedException, LockException {
if(!src.getPermissions().validate(getSubject(), Permission.EXECUTE | Permission.READ)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " by " + getSubject().getName());
}
final Collection dest = getCollection(destUri);
final XmldbURI newDestUri = destUri.append(src.getURI().lastSegment());
final Collection newDest = getCollection(newDestUri);
if(dest != null) {
if(!dest.getPermissions().validate(getSubject(), Permission.EXECUTE | Permission.WRITE | Permission.READ)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getSubject().getName());
}
if(newDest != null) {
if(!dest.getPermissions().validate(getSubject(), Permission.EXECUTE | Permission.READ)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getSubject().getName());
}
if(newDest.isEmpty(this)) {
if(!dest.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getSubject().getName());
}
}
}
}
for(Iterator<DocumentImpl> itSrcSubDoc = src.iterator(this); itSrcSubDoc.hasNext();) {
final DocumentImpl srcSubDoc = itSrcSubDoc.next();
if(!srcSubDoc.getPermissions().validate(getSubject(), Permission.READ)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " for resource " + srcSubDoc.getURI() + " by " + getSubject().getName());
}
if(newDest != null && !newDest.isEmpty(this)) {
final DocumentImpl newDestSubDoc = newDest.getDocument(this, srcSubDoc.getFileURI()); //TODO check this uri is just the filename!
if(newDestSubDoc != null) {
if(!newDestSubDoc.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " for resource " + newDestSubDoc.getURI() + " by " + getSubject().getName());
}
} else {
if(!dest.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getSubject().getName());
}
}
}
}
for(Iterator<XmldbURI> itSrcSubColUri = src.collectionIterator(this); itSrcSubColUri.hasNext();) {
final XmldbURI srcSubColUri = itSrcSubColUri.next();
final Collection srcSubCol = getCollection(src.getURI().append(srcSubColUri));
checkPermissionsForCopy(srcSubCol, newDestUri);
}
}
/* (non-Javadoc)
* @see org.exist.storage.DBBroker#copyCollection(org.exist.storage.txn.Txn, org.exist.collections.Collection, org.exist.collections.Collection, org.exist.xmldb.XmldbURI)
*/
@Override
public void copyCollection(final Txn transaction, final Collection collection, final Collection destination, final XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
//TODO : resolve URIs !!!
if(newName != null && newName.numSegments() != 1) {
throw new PermissionDeniedException("New collection name must have one segment!");
}
final XmldbURI srcURI = collection.getURI();
final XmldbURI dstURI = destination.getURI().append(newName);
if(collection.getURI().equals(dstURI)) {
throw new PermissionDeniedException("Cannot move collection to itself '"+collection.getURI()+"'.");
}
if(collection.getId() == destination.getId()) {
throw new PermissionDeniedException("Cannot move collection to itself '"+collection.getURI()+"'.");
}
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
final Lock lock = collectionsDb.getLock();
try {
pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_COPY_COLLECTION, collection.getURI());
lock.acquire(Lock.WRITE_LOCK);
final XmldbURI parentName = collection.getParentURI();
final Collection parent = parentName == null ? collection : getCollection(parentName);
final CollectionTriggersVisitor triggersVisitor = parent.getConfiguration(this).getCollectionTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeCopyCollection(this, transaction, collection, dstURI);
//atomically check all permissions in the tree to ensure a copy operation will succeed before starting copying
checkPermissionsForCopy(collection, destination.getURI());
Collection newCollection = doCopyCollection(transaction, collection, destination, newName);
triggersVisitor.afterCopyCollection(this, transaction, newCollection, srcURI);
} finally {
lock.release(Lock.WRITE_LOCK);
pool.getProcessMonitor().endJob();
}
}
}
private Collection doCopyCollection(final Txn transaction, final Collection collection, final Collection destination, XmldbURI newName) throws PermissionDeniedException, IOException, EXistException, TriggerException, LockException {
if(newName == null)
newName = collection.getURI().lastSegment();
newName = destination.getURI().append(newName);
if (LOG.isDebugEnabled())
LOG.debug("Copying collection to '" + newName + "'");
final Collection destCollection = getOrCreateCollection(transaction, newName);
for(Iterator<DocumentImpl> i = collection.iterator(this); i.hasNext(); ) {
final DocumentImpl child = i.next();
if (LOG.isDebugEnabled())
LOG.debug("Copying resource: '" + child.getURI() + "'");
XmldbURI newUri = destCollection.getURI().append(child.getFileURI());
pool.getDocumentTrigger().beforeCopyDocument(this, transaction, child, newUri);
DocumentImpl createdDoc;
if (child.getResourceType() == DocumentImpl.XML_FILE) {
//TODO : put a lock on newDoc ?
final DocumentImpl newDoc = new DocumentImpl(pool, destCollection, child.getFileURI());
newDoc.copyOf(child);
newDoc.setDocId(getNextResourceId(transaction, destination));
copyXMLResource(transaction, child, newDoc);
storeXMLResource(transaction, newDoc);
destCollection.addDocument(transaction, this, newDoc);
createdDoc = newDoc;
} else {
final BinaryDocument newDoc = new BinaryDocument(pool, destCollection, child.getFileURI());
newDoc.copyOf(child);
newDoc.setDocId(getNextResourceId(transaction, destination));
InputStream is = null;
try {
is = getBinaryResource((BinaryDocument)child);
storeBinaryResource(transaction,newDoc,is);
} finally {
is.close();
}
storeXMLResource(transaction, newDoc);
destCollection.addDocument(transaction, this, newDoc);
createdDoc = newDoc;
}
pool.getDocumentTrigger().afterCopyDocument(this, transaction, createdDoc, child.getURI());
}
saveCollection(transaction, destCollection);
final XmldbURI name = collection.getURI();
for(Iterator<XmldbURI> i = collection.collectionIterator(this); i.hasNext(); ) {
final XmldbURI childName = i.next();
//TODO : resolve URIs ! collection.getURI().resolve(childName)
final Collection child = openCollection(name.append(childName), Lock.WRITE_LOCK);
if(child == null) {
LOG.warn("Child collection '" + childName + "' not found");
} else {
try {
doCopyCollection(transaction, child, destCollection, childName);
} finally {
child.release(Lock.WRITE_LOCK);
}
}
}
saveCollection(transaction, destCollection);
saveCollection(transaction, destination);
return destCollection;
}
@Override
public void moveCollection(Txn transaction, Collection collection, Collection destination, XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
if(newName != null && newName.numSegments() != 1) {
throw new PermissionDeniedException("New collection name must have one segment!");
}
if(collection.getId() == destination.getId()) {
throw new PermissionDeniedException("Cannot move collection to itself '"+collection.getURI()+"'.");
}
if(collection.getURI().equals(destination.getURI().append(newName))) {
throw new PermissionDeniedException("Cannot move collection to itself '"+collection.getURI()+"'.");
}
if(collection.getURI().equals(XmldbURI.ROOT_COLLECTION_URI)) {
throw new PermissionDeniedException("Cannot move the db root collection");
}
final XmldbURI parentName = collection.getParentURI();
final Collection parent = parentName == null ? collection : getCollection(parentName);
if(!parent.getPermissions().validate(getSubject(), Permission.WRITE | Permission.EXECUTE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection " + parent.getURI() + " to move collection " + collection.getURI());
}
if(!collection.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection to move collection " + collection.getURI());
}
if(!destination.getPermissions().validate(getSubject(), Permission.WRITE | Permission.EXECUTE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection " + parent.getURI() + " to move collection " + collection.getURI());
}
/*
* If replacing another collection in the move i.e. /db/col1/A -> /db/col2 (where /db/col2/A exists)
* we have to make sure the permissions to remove /db/col2/A are okay!
*
* So we must call removeCollection on /db/col2/A
* Which will ensure that collection can be removed and then remove it.
*/
final XmldbURI movedToCollectionUri = destination.getURI().append(newName);
final Collection existingMovedToCollection = getCollection(movedToCollectionUri);
if(existingMovedToCollection != null) {
removeCollection(transaction, existingMovedToCollection);
}
pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_MOVE_COLLECTION, collection.getURI());
try {
final XmldbURI srcURI = collection.getURI();
final XmldbURI dstURI = destination.getURI().append(newName);
final CollectionTriggersVisitor triggersVisitor = parent.getConfiguration(this).getCollectionTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeMoveCollection(this, transaction, collection, dstURI);
// sourceDir must be known in advance, because once moveCollectionRecursive
// is called, both collection and destination can point to the same resource
final File fsSourceDir = getCollectionFile(fsDir, collection.getURI(),false);
// Need to move each collection in the source tree individually, so recurse.
moveCollectionRecursive(transaction, collection, destination, newName);
// For binary resources, though, just move the top level directory and all descendants come with it.
moveBinaryFork(transaction, fsSourceDir, destination, newName);
triggersVisitor.afterMoveCollection(this, transaction, collection, srcURI);
} finally {
pool.getProcessMonitor().endJob();
}
}
private void moveBinaryFork(Txn transaction, File sourceDir, Collection destination, XmldbURI newName) throws IOException {
final File targetDir = getCollectionFile(fsDir,destination.getURI().append(newName),false);
if (sourceDir.exists()) {
if(targetDir.exists()) {
final File targetDelDir = getCollectionFile(fsBackupDir,transaction,destination.getURI().append(newName),true);
targetDelDir.getParentFile().mkdirs();
if (targetDir.renameTo(targetDelDir)) {
Loggable loggable = new RenameBinaryLoggable(this,transaction,targetDir,targetDelDir);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
LOG.fatal("Cannot rename "+targetDir+" to "+targetDelDir);
}
}
targetDir.getParentFile().mkdirs();
if (sourceDir.renameTo(targetDir)) {
Loggable loggable = new RenameBinaryLoggable(this,transaction,sourceDir,targetDir);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
LOG.fatal("Cannot move "+sourceDir+" to "+targetDir);
}
}
}
private void moveCollectionRecursive(Txn transaction, Collection collection, Collection destination, XmldbURI newName) throws PermissionDeniedException, IOException, LockException, TriggerException {
final XmldbURI uri = collection.getURI();
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
final XmldbURI srcURI = collection.getURI();
final XmldbURI dstURI = destination.getURI().append(newName);
pool.getCollectionTrigger().beforeMoveCollection(this, transaction, collection, dstURI);
final XmldbURI parentName = collection.getParentURI();
final Collection parent = openCollection(parentName, Lock.WRITE_LOCK);
if(parent != null) {
try {
//TODO : resolve URIs
parent.removeCollection(this, uri.lastSegment());
} finally {
parent.release(Lock.WRITE_LOCK);
}
}
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
collectionsCache.remove(collection);
final Value key = new CollectionStore.CollectionKey(uri.toString());
collectionsDb.remove(transaction, key);
//TODO : resolve URIs destination.getURI().resolve(newName)
collection.setPath(destination.getURI().append(newName));
collection.setCreationTime(System.currentTimeMillis());
destination.addCollection(this, collection, false);
if(parent != null) {
saveCollection(transaction, parent);
}
if(parent != destination) {
saveCollection(transaction, destination);
}
saveCollection(transaction, collection);
//} catch (ReadOnlyException e) {
//throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
} finally {
lock.release(Lock.WRITE_LOCK);
}
pool.getCollectionTrigger().afterMoveCollection(this, transaction, collection, srcURI);
for(Iterator<XmldbURI> i = collection.collectionIterator(this); i.hasNext(); ) {
final XmldbURI childName = i.next();
//TODO : resolve URIs !!! name.resolve(childName)
final Collection child = openCollection(uri.append(childName), Lock.WRITE_LOCK);
if(child == null) {
LOG.warn("Child collection " + childName + " not found");
} else {
try {
moveCollectionRecursive(transaction, child, collection, childName);
} finally {
child.release(Lock.WRITE_LOCK);
}
}
}
}
}
/**
* Removes a collection and all child collections and resources
*
* We first traverse down the Collection tree to ensure that the Permissions
* enable the Collection Tree to be removed. We then return back up the Collection
* tree, removing each child as we progresses upwards.
*
* @param transaction the transaction to use
* @param collection the collection to remove
* @return true if the collection was removed, false otherwise
* @throws TriggerException
*/
@Override
public boolean removeCollection(final Txn transaction, Collection collection) throws PermissionDeniedException, IOException, TriggerException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
final XmldbURI parentName = collection.getParentURI();
final boolean isRoot = parentName == null;
final Collection parent = isRoot ? collection : getCollection(parentName);
//parent collection permissions
if(!parent.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
if(!parent.getPermissions().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
//this collection permissions
if(!collection.getPermissions().validate(getSubject(), Permission.READ)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
if(!collection.isEmpty(this)) {
if(!collection.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
if(!collection.getPermissions().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
}
try {
pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_COLLECTION, collection.getURI());
pool.getCollectionTrigger().beforeDeleteCollection(this, transaction, collection);
final CollectionTriggersVisitor triggersVisitor = parent.getConfiguration(this).getCollectionTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeDeleteCollection(this, transaction, collection);
long start = System.currentTimeMillis();
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
final XmldbURI uri = collection.getURI();
final String collName = uri.getRawCollectionPath();
// Notify the collection configuration manager
pool.getConfigurationManager().invalidateAll(uri);
if(LOG.isDebugEnabled()) {
LOG.debug("Removing children collections from their parent '" + collName + "'...");
}
for(Iterator<XmldbURI> i = collection.collectionIterator(this); i.hasNext();) {
final XmldbURI childName = i.next();
//TODO : resolve from collection's base URI
//TODO : resulve URIs !!! (uri.resolve(childName))
Collection childCollection = openCollection(uri.append(childName), Lock.WRITE_LOCK);
try {
removeCollection(transaction, childCollection);
} finally {
if (childCollection != null) {
childCollection.getLock().release(Lock.WRITE_LOCK);
} else {
LOG.warn("childCollection is null !");
}
}
}
//Drop all index entries
notifyDropIndex(collection);
// Drop custom indexes
indexController.removeCollection(collection, this);
if(!isRoot) {
// remove from parent collection
//TODO : resolve URIs ! (uri.resolve(".."))
Collection parentCollection = openCollection(collection.getParentURI(), Lock.WRITE_LOCK);
// keep the lock for the transaction
if(transaction != null) {
transaction.registerLock(parentCollection.getLock(), Lock.WRITE_LOCK);
}
if(parentCollection != null) {
try {
LOG.debug("Removing collection '" + collName + "' from its parent...");
//TODO : resolve from collection's base URI
parentCollection.removeCollection(this, uri.lastSegment());
saveCollection(transaction, parentCollection);
} catch(LockException e) {
LOG.warn("LockException while removing collection '" + collName + "'");
}
finally {
if(transaction == null){
parentCollection.getLock().release(Lock.WRITE_LOCK);
}
}
}
}
//Update current state
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
// remove the metadata of all documents in the collection
Value docKey = new CollectionStore.DocumentKey(collection.getId());
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, docKey);
collectionsDb.removeAll(transaction, query);
// if this is not the root collection remove it...
if(!isRoot) {
Value key = new CollectionStore.CollectionKey(collName);
//... from the disk
collectionsDb.remove(transaction, key);
//... from the cache
collectionsCache.remove(collection);
//and free its id for any futher use
freeCollectionId(transaction, collection.getId());
} else {
//Simply save the collection on disk
//It will remain cached
//and its id well never be made available
saveCollection(transaction, collection);
}
}
catch(LockException e) {
LOG.warn("Failed to acquire lock on '" + collectionsDb.getFile().getName() + "'");
}
//catch(ReadOnlyException e) {
//throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
//}
catch(BTreeException e) {
LOG.warn("Exception while removing collection: " + e.getMessage(), e);
}
catch(IOException e) {
LOG.warn("Exception while removing collection: " + e.getMessage(), e);
}
finally {
lock.release(Lock.WRITE_LOCK);
}
//Remove child resources
if (LOG.isDebugEnabled()) {
LOG.debug("Removing resources in '" + collName + "'...");
}
for(Iterator<DocumentImpl> i = collection.iterator(this); i.hasNext();) {
final DocumentImpl doc = i.next();
pool.getDocumentTrigger().beforeDeleteDocument(this, transaction, doc);
//Remove doc's metadata
// WM: now removed in one step. see above.
//removeResourceMetadata(transaction, doc);
//Remove document nodes' index entries
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
Value ref = new NodeRef(doc.getDocId());
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
domDb.remove(transaction, query, null);
} catch(BTreeException e) {
LOG.warn("btree error while removing document", e);
} catch(IOException e) {
LOG.warn("io error while removing document", e);
}
catch(TerminatedException e) {
LOG.warn("method terminated", e);
}
return null;
}
}.run();
//Remove nodes themselves
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
if(doc.getResourceType() == DocumentImpl.BINARY_FILE) {
long page = ((BinaryDocument)doc).getPage();
if (page > Page.NO_PAGE)
domDb.removeOverflowValue(transaction, page);
} else {
StoredNode node = (StoredNode)doc.getFirstChild();
domDb.removeAll(transaction, node.getInternalAddress());
}
return null;
}
}.run();
pool.getDocumentTrigger().afterDeleteDocument(this, transaction, doc.getURI());
//Make doc's id available again
freeResourceId(transaction, doc.getDocId());
}
//now that the database has been updated, update the binary collections on disk
final File fsSourceDir = getCollectionFile(fsDir,collection.getURI(),false);
final File fsTargetDir = getCollectionFile(fsBackupDir,transaction,collection.getURI(),true);
// remove child binary collections
if (fsSourceDir.exists()) {
fsTargetDir.getParentFile().mkdirs();
//XXX: log first, rename second ??? -shabanovd
if(fsSourceDir.renameTo(fsTargetDir)) {
final Loggable loggable = new RenameBinaryLoggable(this,transaction,fsSourceDir,fsTargetDir);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
//XXX: throw IOException -shabanovd
LOG.fatal("Cannot rename "+fsSourceDir+" to "+fsTargetDir);
}
}
if(LOG.isDebugEnabled()) {
LOG.debug("Removing collection '" + collName + "' took " + (System.currentTimeMillis() - start));
}
triggersVisitor.afterDeleteCollection(this, transaction, collection.getURI());
pool.getCollectionTrigger().afterDeleteCollection(this, transaction, collection.getURI());
return true;
}
} finally {
pool.getProcessMonitor().endJob();
}
}
/**
* Saves the specified collection to storage. Collections are usually cached in
* memory. If a collection is modified, this method needs to be called to make
* the changes persistent.
*
* Note: appending a new document to a collection does not require a save.
*
* @throws PermissionDeniedException
* @throws IOException
* @throws TriggerException
*/
@Override
public void saveCollection(Txn transaction, Collection collection) throws PermissionDeniedException, IOException, TriggerException {
if (collection == null) {
LOG.error("NativeBroker.saveCollection called with collection == null! Aborting.");
return;
}
if (pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
if (!pool.isInitializing()) {
// don't cache the collection during initialization: SecurityManager is not yet online
pool.getCollectionsCache().add(collection);
}
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
if(collection.getId() == Collection.UNKNOWN_COLLECTION_ID) {
collection.setId(getNextCollectionId(transaction));
}
Value name = new CollectionStore.CollectionKey(collection.getURI().toString());
final VariableByteOutputStream ostream = new VariableByteOutputStream(8);
collection.write(this, ostream);
final long addr = collectionsDb.put(transaction, name, ostream.data(), true);
if (addr == BFile.UNKNOWN_ADDRESS) {
//TODO : exception !!! -pb
LOG.warn("could not store collection data for '" + collection.getURI()+ "'");
return;
}
collection.setAddress(addr);
ostream.close();
} catch (ReadOnlyException e) {
LOG.warn(DATABASE_IS_READ_ONLY);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
/**
* Release the collection id assigned to a collection so it can be
* reused later.
*
* @param id
* @throws PermissionDeniedException
*/
protected void freeCollectionId(Txn transaction, int id) throws PermissionDeniedException {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.FREE_COLLECTION_ID_KEY);
Value value = collectionsDb.get(key);
if (value != null) {
byte[] data = value.getData();
byte[] ndata = new byte[data.length + Collection.LENGTH_COLLECTION_ID];
System.arraycopy(data, 0, ndata, OFFSET_VALUE, data.length);
ByteConversion.intToByte(id, ndata, OFFSET_COLLECTION_ID);
collectionsDb.put(transaction, key, ndata, true);
} else {
byte[] data = new byte[Collection.LENGTH_COLLECTION_ID];
ByteConversion.intToByte(id, data, OFFSET_COLLECTION_ID);
collectionsDb.put(transaction, key, data, true);
}
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
//TODO : rethrow ? -pb
//} catch (ReadOnlyException e) {
//throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
/**
* Get the next free collection id. If a collection is removed, its collection id
* is released so it can be reused.
*
* @return next free collection id.
* @throws ReadOnlyException
*/
public int getFreeCollectionId(Txn transaction) throws ReadOnlyException {
int freeCollectionId = Collection.UNKNOWN_COLLECTION_ID;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.FREE_COLLECTION_ID_KEY);
Value value = collectionsDb.get(key);
if (value != null) {
byte[] data = value.getData();
freeCollectionId = ByteConversion.byteToInt(data, data.length - Collection.LENGTH_COLLECTION_ID);
//LOG.debug("reusing collection id: " + freeCollectionId);
if(data.length - Collection.LENGTH_COLLECTION_ID > 0) {
byte[] ndata = new byte[data.length - Collection.LENGTH_COLLECTION_ID];
System.arraycopy(data, 0, ndata, OFFSET_COLLECTION_ID, ndata.length);
collectionsDb.put(transaction, key, ndata, true);
} else {
collectionsDb.remove(transaction, key);
}
}
return freeCollectionId;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
return Collection.UNKNOWN_COLLECTION_ID;
//TODO : rethrow ? -pb
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
/**
* Get the next available unique collection id.
*
* @return next available unique collection id
* @throws ReadOnlyException
*/
public int getNextCollectionId(Txn transaction) throws ReadOnlyException {
int nextCollectionId = getFreeCollectionId(transaction);
if (nextCollectionId != Collection.UNKNOWN_COLLECTION_ID)
return nextCollectionId;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.NEXT_COLLECTION_ID_KEY);
Value data = collectionsDb.get(key);
if (data != null) {
nextCollectionId = ByteConversion.byteToInt(data.getData(), OFFSET_COLLECTION_ID);
++nextCollectionId;
}
byte[] d = new byte[Collection.LENGTH_COLLECTION_ID];
ByteConversion.intToByte(nextCollectionId, d, OFFSET_COLLECTION_ID);
collectionsDb.put(transaction, key, d, true);
return nextCollectionId;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
return Collection.UNKNOWN_COLLECTION_ID;
//TODO : rethrow ? -pb
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
@Override
public void reindexCollection(XmldbURI collectionName) throws PermissionDeniedException {
if (pool.isReadOnly())
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
collectionName = prepend(collectionName.toCollectionPathURI());
Collection collection = getCollection(collectionName);
if (collection == null) {
LOG.debug("collection " + collectionName + " not found!");
return;
}
reindexCollection(collection, NodeProcessor.MODE_STORE);
}
public void reindexCollection(Collection collection, int mode) throws PermissionDeniedException {
TransactionManager transact = pool.getTransactionManager();
Txn transaction = transact.beginTransaction();
try {
pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_REINDEX_COLLECTION, collection.getURI());
reindexCollection(transaction, collection, mode);
transact.commit(transaction);
} catch (TransactionException e) {
transact.abort(transaction);
LOG.warn("An error occurred during reindex: " + e.getMessage(), e);
} finally {
pool.getProcessMonitor().endJob();
}
}
public void reindexCollection(Txn transaction, Collection collection, int mode) throws PermissionDeniedException {
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
if (!collection.getPermissions().validate(getSubject(), Permission.WRITE))
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection " + collection.getURI());
LOG.debug("Reindexing collection " + collection.getURI());
if (mode == NodeProcessor.MODE_STORE)
dropCollectionIndex(transaction, collection);
for(Iterator<DocumentImpl> i = collection.iterator(this); i.hasNext(); ) {
DocumentImpl next = i.next();
reindexXMLResource(transaction, next, mode);
}
for(Iterator<XmldbURI> i = collection.collectionIterator(this); i.hasNext(); ) {
XmldbURI next = i.next();
//TODO : resolve URIs !!! (collection.getURI().resolve(next))
Collection child = getCollection(collection.getURI().append(next));
if(child == null)
LOG.warn("Collection '" + next + "' not found");
else {
reindexCollection(transaction, child, mode);
}
}
}
}
public void dropCollectionIndex(final Txn transaction, Collection collection) throws PermissionDeniedException {
if (pool.isReadOnly())
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
if (!collection.getPermissions().validate(getSubject(), Permission.WRITE))
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection " +collection.getURI());
notifyDropIndex(collection);
indexController.removeCollection(collection, this);
for (Iterator<DocumentImpl> i = collection.iterator(this); i.hasNext();) {
final DocumentImpl doc = i.next();
LOG.debug("Dropping index for document " + doc.getFileURI());
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
Value ref = new NodeRef(doc.getDocId());
IndexQuery query =
new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
domDb.remove(transaction, query, null);
domDb.flush();
} catch (BTreeException e) {
LOG.warn("btree error while removing document", e);
} catch (DBException e) {
LOG.warn("db error while removing document", e);
} catch (IOException e) {
LOG.warn("io error while removing document", e);
} catch (TerminatedException e) {
LOG.warn("method terminated", e);
}
return null;
}
}
.run();
}
}
/** Store into the temporary collection of the database a given in-memory Document
*
* The in-memory Document is stored without a transaction and is not journalled,
* if there is no temporary collection, this will first be created with a transaction
*
* @param doc The in-memory Document to store
* @return The document stored in the temp collection
*/
@Override
public DocumentImpl storeTempResource(org.exist.memtree.DocumentImpl doc)
throws EXistException, PermissionDeniedException, LockException {
//store the currentUser
Subject currentUser = getSubject();
//elevate getUser() to DBA_USER
setSubject(pool.getSecurityManager().getSystemSubject() );
//start a transaction
TransactionManager transact = pool.getTransactionManager();
Txn transaction = transact.beginTransaction();
//create a name for the temporary document
XmldbURI docName = XmldbURI.create(MessageDigester.md5(Thread.currentThread().getName() + Long.toString(System.currentTimeMillis()),false) + ".xml");
//get the temp collection
Collection temp = openCollection(XmldbURI.TEMP_COLLECTION_URI, Lock.WRITE_LOCK);
boolean created = false;
try {
//if no temp collection
if(temp == null) {
//creates temp collection (with write lock)
temp = createTempCollection(transaction);
if(temp == null) {
LOG.warn("Failed to create temporary collection");
//TODO : emergency exit?
}
created = true;
}
//create a temporary document
DocumentImpl targetDoc = new DocumentImpl(pool, temp, docName);
targetDoc.getPermissions().setMode(Permission.DEFAULT_TEMPORARY_DOCUMENT_PERM);
long now = System.currentTimeMillis();
DocumentMetadata metadata = new DocumentMetadata();
metadata.setLastModified(now);
metadata.setCreated(now);
targetDoc.setMetadata(metadata);
targetDoc.setDocId(getNextResourceId(transaction, temp));
//index the temporary document
DOMIndexer indexer = new DOMIndexer(this, transaction, doc, targetDoc); //NULL transaction, so temporary fragment is not journalled - AR
indexer.scan();
indexer.store();
//store the temporary document
temp.addDocument(transaction, this, targetDoc); //NULL transaction, so temporary fragment is not journalled - AR
// unlock the temp collection
if(transaction == null)
temp.getLock().release(Lock.WRITE_LOCK);
else if (!created)
transaction.registerLock(temp.getLock(), Lock.WRITE_LOCK);
//NULL transaction, so temporary fragment is not journalled - AR
storeXMLResource(transaction, targetDoc);
flush();
closeDocument();
//commit the transaction
transact.commit(transaction);
return targetDoc;
} catch (Exception e) {
LOG.warn("Failed to store temporary fragment: " + e.getMessage(), e);
//abort the transaction
transact.abort(transaction);
}
finally {
//restore the user
setUser(currentUser);
}
return null;
}
/** remove all documents from temporary collection
*
* @param forceRemoval Should temporary resources be forcefully removed
*/
@Override
public void cleanUpTempResources(boolean forceRemoval) throws PermissionDeniedException {
Collection temp = getCollection(XmldbURI.TEMP_COLLECTION_URI);
if(temp == null)
return;
TransactionManager transact = pool.getTransactionManager();
Txn transaction = transact.beginTransaction();
try {
removeCollection(transaction, temp);
transact.commit(transaction);
} catch(Exception e) {
transact.abort(transaction);
LOG.warn("Failed to remove temp collection: " + e.getMessage(), e);
}
}
@Override
public DocumentImpl getResourceById(int collectionId, byte resourceType, int documentId) throws PermissionDeniedException {
XmldbURI uri = null;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
//final VariableByteOutputStream ostream = new VariableByteOutputStream(8);
//doc.write(ostream);
//Value key = new CollectionStore.DocumentKey(doc.getCollection().getId(), doc.getResourceType(), doc.getDocId());
//collectionsDb.put(transaction, key, ostream.data(), true);
//Value collectionKey = new CollectionStore.CollectionKey
//collectionsDb.get(Value.EMPTY_VALUE)
//get the collection uri
String collectionUri = null;
if(collectionId == 0) {
collectionUri = "/db";
} else {
for(Value collectionDbKey : collectionsDb.getKeys()) {
if(collectionDbKey.data()[0] == CollectionStore.KEY_TYPE_COLLECTION) {
//Value collectionDbValue = collectionsDb.get(collectionDbKey);
VariableByteInput vbi = collectionsDb.getAsStream(collectionDbKey);
int id = vbi.readInt();
//check if the collection id matches (first 4 bytes)
if(collectionId == id) {
collectionUri = new String(Arrays.copyOfRange(collectionDbKey.data(), 1, collectionDbKey.data().length));
break;
}
}
}
}
//get the resource uri
Value key = new CollectionStore.DocumentKey(collectionId, resourceType, documentId);
VariableByteInput vbi = collectionsDb.getAsStream(key);
vbi.readInt(); //skip doc id
final String resourceUri = vbi.readUTF();
//get the resource
uri = XmldbURI.createInternal(collectionUri + "/" + resourceUri);
} catch (TerminatedException te) {
LOG.error("Query Terminated", te);
return null;
} catch (BTreeException bte) {
LOG.error("Problem reading btree", bte);
return null;
} catch (LockException e) {
LOG.error("Failed to acquire lock on " + collectionsDb.getFile().getName());
return null;
} catch (IOException e) {
LOG.error("IOException while reading recource data", e);
return null;
} finally {
lock.release(Lock.READ_LOCK);
}
return getResource(uri, Permission.READ);
}
/** store Document entry into its collection. */
@Override
public void storeXMLResource(final Txn transaction, final DocumentImpl doc) {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
final VariableByteOutputStream ostream = new VariableByteOutputStream(8);
doc.write(ostream);
Value key = new CollectionStore.DocumentKey(doc.getCollection().getId(), doc.getResourceType(), doc.getDocId());
collectionsDb.put(transaction, key, ostream.data(), true);
//} catch (ReadOnlyException e) {
//LOG.warn(DATABASE_IS_READ_ONLY);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while writing document data", e);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
public void storeMetadata(final Txn transaction, final DocumentImpl doc) throws TriggerException {
Collection col = doc.getCollection();
DocumentTriggersVisitor triggersVisitor = null;
if(col.isTriggersEnabled()) {
triggersVisitor = col.getConfiguration(this).getDocumentTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeUpdateDocumentMetadata(this, transaction, doc);
}
storeXMLResource(transaction, doc);
if (triggersVisitor != null)
triggersVisitor.afterUpdateDocumentMetadata(this, transaction, doc);
}
private File getCollectionFile(File dir,XmldbURI uri,boolean create) throws IOException {
return getCollectionFile(dir,null,uri,create);
}
public File getCollectionBinaryFileFsPath(XmldbURI uri) {
return new File(fsDir, uri.getURI().toString());
}
private File getCollectionFile(File dir,Txn transaction,XmldbURI uri,boolean create)
throws IOException {
if (transaction!=null) {
dir = new File(dir,"txn."+transaction.getId());
if (create && !dir.exists()) {
if (!dir.mkdir()) {
throw new IOException("Cannot make transaction filesystem directory: "+dir);
}
}
}
XmldbURI [] segments = uri.getPathSegments();
File binFile = dir;
int last = segments.length-1;
for (int i=0; i<segments.length; i++) {
binFile = new File(binFile,segments[i].toString());
if (create && i!=last && !binFile.exists()) {
if (!binFile.mkdir()) {
throw new IOException("Cannot make collection filesystem directory: "+binFile);
}
}
}
return binFile;
}
@Deprecated
@Override
public void storeBinaryResource(final Txn transaction, final BinaryDocument blob, final byte[] data)
throws IOException {
blob.setPage(Page.NO_PAGE);
File binFile = getCollectionFile(fsDir,blob.getURI(),true);
File backupFile = null;
boolean exists = binFile.exists();
if (exists) {
backupFile = getCollectionFile(fsBackupDir,transaction,blob.getURI(),true);
if (!binFile.renameTo(backupFile)) {
throw new IOException("Cannot backup binary resource for journal to "+backupFile);
}
}
OutputStream os = new FileOutputStream(binFile);
os.write(data,0,data.length);
os.close();
if (exists) {
Loggable loggable = new UpdateBinaryLoggable(this,transaction,binFile,backupFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
Loggable loggable = new CreateBinaryLoggable(this,transaction,binFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
}
}
@Override
public void storeBinaryResource(final Txn transaction, final BinaryDocument blob, final InputStream is)
throws IOException {
blob.setPage(Page.NO_PAGE);
File binFile = getCollectionFile(fsDir,blob.getURI(),true);
File backupFile = null;
boolean exists = binFile.exists();
if (exists) {
backupFile = getCollectionFile(fsBackupDir,transaction,blob.getURI(),true);
if (!binFile.renameTo(backupFile)) {
throw new IOException("Cannot backup binary resource for journal to "+backupFile);
}
}
byte [] buffer = new byte[65536];
OutputStream os = new FileOutputStream(binFile);
int len;
while ((len = is.read(buffer))>=0) {
if (len>0) {
os.write(buffer,0,len);
}
}
os.close();
if (exists) {
Loggable loggable = new UpdateBinaryLoggable(this,transaction,binFile,backupFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
Loggable loggable = new CreateBinaryLoggable(this,transaction,binFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
}
}
public Document getXMLResource(XmldbURI fileName) throws PermissionDeniedException {
return getResource(fileName, Permission.READ);
}
/**
* get a document by its file name. The document's file name is used to
* identify a document.
*
*@param fileName absolute file name in the database;
*name can be given with or without the leading path /db/shakespeare.
*@return The document value
*@exception PermissionDeniedException
*/
@Override
public DocumentImpl getResource(XmldbURI fileName, int accessType) throws PermissionDeniedException {
fileName = prepend(fileName.toCollectionPathURI());
//TODO : resolve URIs !!!
XmldbURI collUri = fileName.removeLastSegment();
XmldbURI docUri = fileName.lastSegment();
Collection collection = getCollection(collUri);
if (collection == null) {
LOG.debug("collection '" + collUri + "' not found!");
return null;
}
if(!collection.getPermissions().validate(getSubject(), Permission.READ)) {
throw new PermissionDeniedException("Permission denied to read collection '" + collUri + "' by " + getSubject().getName());
}
DocumentImpl doc = collection.getDocument(this, docUri);
if (doc == null) {
LOG.debug("document '" + fileName + "' not found!");
return null;
}
if(!doc.getPermissions().validate(getSubject(), accessType)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' not allowed requested access to document '" + fileName + "'");
}
if (doc.getResourceType() == DocumentImpl.BINARY_FILE) {
BinaryDocument bin = (BinaryDocument)doc;
try {
bin.setContentLength(getBinaryResourceSize(bin));
} catch (IOException ex) {
LOG.fatal("Cannot get content size for "+bin.getURI(),ex);
}
}
return doc;
}
@Override
public DocumentImpl getXMLResource(XmldbURI fileName, int lockMode) throws PermissionDeniedException {
if(fileName==null) {
return null;
}
fileName = prepend(fileName.toCollectionPathURI());
//TODO : resolve URIs !
XmldbURI collUri = fileName.removeLastSegment();
XmldbURI docUri = fileName.lastSegment();
Collection collection = openCollection(collUri, lockMode);
if (collection == null) {
LOG.debug("collection '" + collUri + "' not found!");
return null;
}
try {
if (!collection.getPermissions().validate(getSubject(), Permission.READ))
throw new PermissionDeniedException("Permission denied to read collection '" + collUri + "' by " + getSubject().getName());
DocumentImpl doc = collection.getDocumentWithLock(this, docUri, lockMode);
if (doc == null) {
//LOG.debug("document '" + fileName + "' not found!");
return null;
}
//if (!doc.getMode().validate(getUser(), Permission.READ))
//throw new PermissionDeniedException("not allowed to read document");
if (doc.getResourceType() == DocumentImpl.BINARY_FILE) {
BinaryDocument bin = (BinaryDocument)doc;
try {
bin.setContentLength(getBinaryResourceSize(bin));
} catch (IOException ex) {
LOG.fatal("Cannot get content size for "+bin.getURI(),ex);
}
}
return doc;
} catch (LockException e) {
LOG.warn("Could not acquire lock on document " + fileName, e);
//TODO : exception ? -pb
} finally {
//TOUNDERSTAND : by whom is this lock acquired ? -pb
// If we don't check for the NO_LOCK we'll pop someone else's lock off
if(lockMode != Lock.NO_LOCK)
collection.release(lockMode);
}
return null;
}
@Override
public void readBinaryResource(final BinaryDocument blob, final OutputStream os)
throws IOException {
InputStream is = null;
try {
is = getBinaryResource(blob);
byte [] buffer = new byte[655360];
int len;
while ((len=is.read(buffer))>=0) {
os.write(buffer,0,len);
}
} finally {
if(is != null)
is.close();
}
}
@Override
public long getBinaryResourceSize(final BinaryDocument blob)
throws IOException {
File binFile = getCollectionFile(fsDir,blob.getURI(),false);
return binFile.length();
}
@Override
public InputStream getBinaryResource(final BinaryDocument blob)
throws IOException {
File binFile = getCollectionFile(fsDir,blob.getURI(),false);
return new FileInputStream(binFile);
}
//TODO : consider a better cooperation with Collection -pb
@Override
public void getCollectionResources(Collection.InternalAccess collectionInternalAccess) {
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.DocumentKey(collectionInternalAccess.getId());
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, key);
collectionsDb.query(query, new DocumentCallback(collectionInternalAccess));
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while reading document data", e);
} catch (BTreeException e) {
LOG.warn("Exception while reading document data", e);
} catch (TerminatedException e) {
LOG.warn("Exception while reading document data", e);
} finally {
lock.release(Lock.READ_LOCK);
}
}
@Override
public void getResourcesFailsafe(BTreeCallback callback, boolean fullScan) throws TerminatedException {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.DocumentKey();
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, key);
if (fullScan)
collectionsDb.rawScan(query, callback);
else
collectionsDb.query(query, callback);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while reading document data", e);
} catch (BTreeException e) {
LOG.warn("Exception while reading document data", e);
} finally {
lock.release(Lock.READ_LOCK);
}
}
@Override
public void getCollectionsFailsafe(BTreeCallback callback) throws TerminatedException {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.CollectionKey();
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, key);
collectionsDb.query(query, callback);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while reading document data", e);
} catch (BTreeException e) {
LOG.warn("Exception while reading document data", e);
} finally {
lock.release(Lock.READ_LOCK);
}
}
/**
* Get all the documents in this database matching the given
* document-type's name.
* @return The documentsByDoctype value
*/
@Override
public MutableDocumentSet getXMLResourcesByDoctype(String doctypeName, MutableDocumentSet result) throws PermissionDeniedException {
MutableDocumentSet docs = getAllXMLResources(new DefaultDocumentSet());
for (Iterator<DocumentImpl> i = docs.getDocumentIterator(); i.hasNext();) {
DocumentImpl doc = i.next();
DocumentType doctype = doc.getDoctype();
if (doctype == null)
continue;
if (doctypeName.equals(doctype.getName())
&& doc.getCollection().getPermissions().validate(getSubject(), Permission.READ)
&& doc.getPermissions().validate(getSubject(), Permission.READ))
result.add(doc);
}
return result;
}
/**
* Adds all the documents in the database to the specified DocumentSet.
*
* @param docs a (possibly empty) document set to which the found
* documents are added.
*/
@Override
public MutableDocumentSet getAllXMLResources(MutableDocumentSet docs) throws PermissionDeniedException {
long start = System.currentTimeMillis();
Collection rootCollection = null;
try {
rootCollection = openCollection(XmldbURI.ROOT_COLLECTION_URI, Lock.READ_LOCK);
rootCollection.allDocs(this, docs, true);
if (LOG.isDebugEnabled()) {
LOG.debug("getAllDocuments(DocumentSet) - end - "
+ "loading "
+ docs.getDocumentCount()
+ " documents took "
+ (System.currentTimeMillis() - start)
+ "ms.");
}
return docs;
} finally {
if (rootCollection != null)
rootCollection.release(Lock.READ_LOCK);
}
}
//TODO : consider a better cooperation with Collection -pb
@Override
public void getResourceMetadata(DocumentImpl document) {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.DocumentKey(document.getCollection().getId(), document.getResourceType(), document.getDocId());
VariableByteInput istream = collectionsDb.getAsStream(key);
if(istream != null) {
document.readDocumentMeta(istream);
}
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while reading document data", e);
} finally {
lock.release(Lock.READ_LOCK);
}
}
/**
* @param doc src document
* @param destination destination collection
* @param newName the new name for the document
*/
@Override
public void copyResource(Txn transaction, DocumentImpl doc, Collection destination, XmldbURI newName) throws PermissionDeniedException, LockException, EXistException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
Collection collection = doc.getCollection();
if(!collection.getPermissions().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' has insufficient privileges to copy the resource '" + doc.getFileURI() + "'.");
}
if(!doc.getPermissions().validate(getSubject(), Permission.READ)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' has insufficient privileges to copy the resource '" + doc.getFileURI() + "'.");
}
if(newName == null) {
newName = doc.getFileURI();
}
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
DocumentImpl oldDoc = destination.getDocument(this, newName);
if(!destination.getPermissions().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' does not have execute access on the destination collection '" + destination.getURI() + "'.");
}
if(destination.hasChildCollection(this, newName.lastSegment())) {
throw new EXistException(
"The collection '" + destination.getURI() + "' already has a sub-collection named '" + newName.lastSegment() + "', you cannot create a Document with the same name as an existing collection."
);
}
final XmldbURI newURI = destination.getURI().append(newName);
final XmldbURI oldUri = doc.getURI();
if(oldDoc == null) {
if(!destination.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' does not have write access on the destination collection '" + destination.getURI() + "'.");
}
} else {
//overwrite existing document
if(doc.getDocId() == oldDoc.getDocId()){
throw new EXistException("Cannot copy resource to itself '" +doc.getURI() + "'.");
}
if(!oldDoc.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("A resource with the same name already exists in the target collection '" + oldDoc.getURI() + "', and you do not have write access on that resource.");
}
getDatabase().getDocumentTrigger().beforeDeleteDocument(this, transaction, oldDoc);
getDatabase().getDocumentTrigger().afterDeleteDocument(this, transaction, newURI);
}
getDatabase().getDocumentTrigger().beforeCopyDocument(this, transaction, doc, newURI);
final DocumentTriggersVisitor triggersVisitor = collection.getConfiguration(this).getDocumentTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeCopyDocument(this, transaction, doc, newURI);
DocumentImpl newDocument = null;
if (doc.getResourceType() == DocumentImpl.BINARY_FILE) {
InputStream is = null;
try {
is = getBinaryResource((BinaryDocument) doc);
newDocument = destination.addBinaryResource(transaction, this, newName, is, doc.getMetadata().getMimeType(),-1);
} finally {
if(is != null)
is.close();
}
} else {
DocumentImpl newDoc = new DocumentImpl(pool, destination, newName);
newDoc.copyOf(doc);
newDoc.setDocId(getNextResourceId(transaction, destination));
newDoc.setPermissions(doc.getPermissions());
newDoc.getUpdateLock().acquire(Lock.WRITE_LOCK);
try {
copyXMLResource(transaction, doc, newDoc);
destination.addDocument(transaction, this, newDoc);
storeXMLResource(transaction, newDoc);
} finally {
newDoc.getUpdateLock().release(Lock.WRITE_LOCK);
}
newDocument = newDoc;
}
getDatabase().getDocumentTrigger().afterCopyDocument(this, transaction, newDocument, oldUri);
triggersVisitor.afterCopyDocument(this, transaction, newDocument, oldUri);
} catch (IOException e) {
LOG.warn("An error occurred while copying resource", e);
} catch (TriggerException e) {
throw new PermissionDeniedException(e.getMessage(), e);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
}
private void copyXMLResource(Txn transaction, DocumentImpl oldDoc, DocumentImpl newDoc) {
LOG.debug("Copying document " + oldDoc.getFileURI() + " to " +
newDoc.getURI());
final long start = System.currentTimeMillis();
indexController.setDocument(newDoc, StreamListener.STORE);
StreamListener listener = indexController.getStreamListener();
NodeList nodes = oldDoc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
copyNodes(transaction, iterator, node, new NodePath(), newDoc, false, true, listener);
}
flush();
closeDocument();
LOG.debug("Copy took " + (System.currentTimeMillis() - start) + "ms.");
}
/**
* Move (and/or rename) a Resource to another collection
*
* @param doc source document
* @param destination the destination collection
* @param newName the new name for the resource
*
* @throws TriggerException
*/
@Override
public void moveResource(Txn transaction, DocumentImpl doc, Collection destination, XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
final Account docUser = doc.getUserLock();
if(docUser != null) {
if(!(getSubject().getName()).equals(docUser.getName())) {
throw new PermissionDeniedException("Cannot move '" + doc.getFileURI() + " because is locked by getUser() '" + docUser.getName() + "'");
}
}
final Collection collection = doc.getCollection();
if(!collection.getPermissions().validate(getSubject(), Permission.WRITE | Permission.EXECUTE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on source Collection to move resource " + doc.getFileURI());
}
//must be owner of have execute access for the rename
if(!((doc.getPermissions().getOwner().getId() != getSubject().getId()) | (doc.getPermissions().validate(getSubject(), Permission.EXECUTE)))) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on destination Collection to move resource " + doc.getFileURI());
}
if(!destination.getPermissions().validate(getSubject(), Permission.WRITE | Permission.EXECUTE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on destination Collection to move resource " + doc.getFileURI());
}
/* Copy reference to original document */
final File fsOriginalDocument = getCollectionFile(fsDir, doc.getURI(), true);
final XmldbURI oldName = doc.getFileURI();
if(newName == null) {
newName = oldName;
}
try {
if(destination.hasChildCollection(this, newName.lastSegment())) {
throw new PermissionDeniedException(
"The collection '" + destination.getURI() + "' have collection '" + newName.lastSegment() + "'. " +
"Document with same name can't be created."
);
}
// check if the move would overwrite a collection
//TODO : resolve URIs : destination.getURI().resolve(newName)
final DocumentImpl oldDoc = destination.getDocument(this, newName);
if(oldDoc != null) {
if(doc.getDocId() == oldDoc.getDocId()) {
throw new PermissionDeniedException("Cannot move resource to itself '"+doc.getURI()+"'.");
}
// GNU mv command would prompt for Confirmation here, you can say yes or pass the '-f' flag. As we cant prompt for confirmation we assume OK
/* if(!oldDoc.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Resource with same name exists in target collection and write is denied");
}
*/
pool.getDocumentTrigger().beforeDeleteDocument(this, transaction, oldDoc);
pool.getDocumentTrigger().afterDeleteDocument(this, transaction, oldDoc.getURI());
}
boolean renameOnly = collection.getId() == destination.getId();
final XmldbURI oldURI = doc.getURI();
final XmldbURI newURI = destination.getURI().append(newName);
pool.getDocumentTrigger().beforeMoveDocument(this, transaction, doc, newURI);
final DocumentTriggersVisitor triggersVisitor = collection.getConfiguration(this).getDocumentTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeMoveDocument(this, transaction, doc, newURI);
collection.unlinkDocument(this, doc);
removeResourceMetadata(transaction, doc);
doc.setFileURI(newName);
if(doc.getResourceType() == DocumentImpl.XML_FILE) {
if(!renameOnly) {
//XXX: BUG: doc have new uri here!
dropIndex(transaction, doc);
saveCollection(transaction, collection);
}
doc.setCollection(destination);
destination.addDocument(transaction, this, doc);
if(!renameOnly) {
// reindexing
reindexXMLResource(transaction, doc, NodeProcessor.MODE_REPAIR);
}
} else {
// binary resource
doc.setCollection(destination);
destination.addDocument(transaction, this, doc);
final File colDir = getCollectionFile(fsDir,destination.getURI(),true);
final File binFile = new File(colDir,newName.lastSegment().toString());
final File sourceFile = getCollectionFile(fsDir,doc.getURI(),false);
/* Create required directories */
binFile.getParentFile().mkdirs();
/* Rename original file to new location */
if(fsOriginalDocument.renameTo(binFile)) {
final Loggable loggable = new RenameBinaryLoggable(this,transaction,sourceFile,binFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
LOG.fatal("Cannot rename "+sourceFile+" to "+binFile+" for journaling of binary resource move.");
}
}
storeXMLResource(transaction, doc);
saveCollection(transaction, destination);
pool.getDocumentTrigger().afterMoveDocument(this, transaction, doc, oldURI);
triggersVisitor.afterMoveDocument(this, transaction, doc, oldURI);
} catch (ReadOnlyException e) {
throw new PermissionDeniedException(e.getMessage(), e);
}
}
@Override
public void removeXMLResource(final Txn transaction, final DocumentImpl document, boolean freeDocId) throws PermissionDeniedException {
if (pool.isReadOnly())
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
try {
if (LOG.isInfoEnabled()) {
LOG.info("Removing document " + document.getFileURI() +
" (" + document.getDocId() + ") ...");
}
if (freeDocId) {
pool.getDocumentTrigger().beforeDeleteDocument(this, transaction, document);
}
dropIndex(transaction, document);
if (LOG.isDebugEnabled()) {
LOG.debug("removeDocument() - removing dom");
}
if (!document.getMetadata().isReferenced()) {
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
StoredNode node = (StoredNode)document.getFirstChild();
domDb.removeAll(transaction, node.getInternalAddress());
return null;
}
}.run();
}
NodeRef ref = new NodeRef(document.getDocId());
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
domDb.remove(transaction, idx, null);
} catch (BTreeException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (IOException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (TerminatedException e) {
LOG.warn("method terminated", e);
}
return null;
}
}.run();
removeResourceMetadata(transaction, document);
if (freeDocId) {
freeResourceId(transaction, document.getDocId());
pool.getDocumentTrigger().afterDeleteDocument(this, transaction, document.getURI());
}
} catch (ReadOnlyException e) {
LOG.warn("removeDocument(String) - " + DATABASE_IS_READ_ONLY);
} catch (TriggerException e) {
LOG.warn(e);
}
}
private void dropIndex(Txn transaction, DocumentImpl document) throws ReadOnlyException {
indexController.setDocument(document, StreamListener.REMOVE_ALL_NODES);
StreamListener listener = indexController.getStreamListener();
NodeList nodes = document.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
scanNodes(transaction, iterator, node, new NodePath(), NodeProcessor.MODE_REMOVE, listener);
}
notifyDropIndex(document);
indexController.flush();
}
@Override
public void removeBinaryResource(final Txn transaction, final BinaryDocument blob) throws PermissionDeniedException,IOException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
if(LOG.isDebugEnabled()) {
LOG.debug("removing binary resource " + blob.getDocId() + "...");
}
File binFile = getCollectionFile(fsDir,blob.getURI(),false);
if (binFile.exists()) {
File binBackupFile = getCollectionFile(fsBackupDir, transaction, blob.getURI(), true);
Loggable loggable = new RenameBinaryLoggable(this, transaction, binFile, binBackupFile);
if (!binFile.renameTo(binBackupFile)) {
// Workaround for Java bug 6213298 - renameTo() sometimes doesn't work
// See http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6213298
System.gc();
try {
Thread.sleep(50);
} catch (Exception e) {
//ignore
}
if (!binFile.renameTo(binBackupFile)) {
throw new IOException("Cannot move file " + binFile
+ " for delete journal to " + binBackupFile);
}
}
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
}
removeResourceMetadata(transaction, blob);
getIndexController().setDocument(blob, StreamListener.REMOVE_BINARY);
getIndexController().flush();
}
/**
* @param transaction
* @param document
*/
private void removeResourceMetadata(final Txn transaction, final DocumentImpl document) {
// remove document metadata
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
if (LOG.isDebugEnabled())
LOG.debug("Removing resource metadata for " + document.getDocId());
Value key = new CollectionStore.DocumentKey(document.getCollection().getId(), document.getResourceType(), document.getDocId());
collectionsDb.remove(transaction, key);
//} catch (ReadOnlyException e) {
//LOG.warn(DATABASE_IS_READ_ONLY);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} finally {
lock.release(Lock.READ_LOCK);
}
}
/**
* Release the document id reserved for a document so it
* can be reused.
*
* @param id
* @throws PermissionDeniedException
*/
protected void freeResourceId(Txn transaction, int id) throws PermissionDeniedException {
if (incrementalDocIds)
return;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.FREE_DOC_ID_KEY);
Value value = collectionsDb.get(key);
if (value != null) {
byte[] data = value.getData();
byte[] ndata = new byte[data.length + 4];
System.arraycopy(data, 0, ndata, 4, data.length);
ByteConversion.intToByte(id, ndata, 0);
collectionsDb.put(transaction, key, ndata, true);
} else {
byte[] data = new byte[4];
ByteConversion.intToByte(id, data, 0);
collectionsDb.put(transaction, key, data, true);
}
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
//TODO : rethrow ? -pb
//} catch (ReadOnlyException e) {
//throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
/**
* Get the next unused document id. If a document is removed, its doc id is
* released, so it can be reused.
*
* @return Next unused document id
* @throws ReadOnlyException
*/
public int getFreeResourceId(Txn transaction) throws ReadOnlyException {
int freeDocId = DocumentImpl.UNKNOWN_DOCUMENT_ID;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.FREE_DOC_ID_KEY);
Value value = collectionsDb.get(key);
if (value != null) {
byte[] data = value.getData();
freeDocId = ByteConversion.byteToInt(data, data.length - 4);
//LOG.debug("reusing document id: " + freeDocId);
if(data.length - 4 > 0) {
byte[] ndata = new byte[data.length - 4];
System.arraycopy(data, 0, ndata, 0, ndata.length);
collectionsDb.put(transaction, key, ndata, true);
} else {
collectionsDb.remove(transaction, key);
}
}
//TODO : maybe something ? -pb
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
return DocumentImpl.UNKNOWN_DOCUMENT_ID;
//TODO : rethrow ? -pb
} finally {
lock.release(Lock.WRITE_LOCK);
}
return freeDocId;
}
/** get next Free Doc Id
* @throws EXistException If there's no free document id */
@Override
public int getNextResourceId(Txn transaction, Collection collection) throws EXistException {
int nextDocId;
try {
nextDocId = getFreeResourceId(transaction);
} catch (ReadOnlyException e) {
//TODO : rethrow ? -pb
return 1;
}
if (nextDocId != DocumentImpl.UNKNOWN_DOCUMENT_ID)
return nextDocId;
nextDocId = 1;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.NEXT_DOC_ID_KEY);
Value data = collectionsDb.get(key);
if (data != null) {
nextDocId = ByteConversion.byteToInt(data.getData(), 0);
++nextDocId;
if (nextDocId == 0x7FFFFFFF) {
pool.setReadOnly();
throw new EXistException("Max. number of document ids reached. Database is set to " +
"read-only state. Please do a complete backup/restore to compact the db and " +
"free document ids.");
}
}
byte[] d = new byte[4];
ByteConversion.intToByte(nextDocId, d, 0);
collectionsDb.put(transaction, key, d, true);
//} catch (ReadOnlyException e) {
//LOG.warn("Database is read-only");
//return DocumentImpl.UNKNOWN_DOCUMENT_ID;
//TODO : rethrow ? -pb
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
//TODO : rethrow ? -pb
} finally {
lock.release(Lock.WRITE_LOCK);
}
return nextDocId;
}
/**
* Reindex the nodes in the document. This method will either reindex all
* descendant nodes of the passed node, or all nodes below some level of
* the document if node is null.
*/
private void reindexXMLResource(Txn transaction, DocumentImpl doc, int mode) {
if(doc.isCollectionConfig())
doc.getCollection().setCollectionConfigEnabled(false);
indexController.setDocument(doc, StreamListener.STORE);
StreamListener listener = indexController.getStreamListener();
NodeList nodes = doc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
scanNodes(transaction, iterator, node, new NodePath(), mode, listener);
}
flush();
if(doc.isCollectionConfig())
doc.getCollection().setCollectionConfigEnabled(true);
}
@Override
public void defragXMLResource(final Txn transaction, final DocumentImpl doc) {
//TODO : use dedicated function in XmldbURI
LOG.debug("============> Defragmenting document " +
doc.getCollection().getURI() + "/" + doc.getFileURI());
final long start = System.currentTimeMillis();
try {
final long firstChild = doc.getFirstChildAddress();
// dropping old structure index
dropIndex(transaction, doc);
// dropping dom index
NodeRef ref = new NodeRef(doc.getDocId());
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
domDb.remove(transaction, idx, null);
domDb.flush();
} catch (BTreeException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (IOException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (TerminatedException e) {
LOG.warn("method terminated", e);
} catch (DBException e) {
LOG.warn("start() - " + "error while removing doc", e);
}
return null;
}
}.run();
// create a copy of the old doc to copy the nodes into it
DocumentImpl tempDoc = new DocumentImpl(pool, doc.getCollection(), doc.getFileURI());
tempDoc.copyOf(doc);
tempDoc.setDocId(doc.getDocId());
indexController.setDocument(doc, StreamListener.STORE);
StreamListener listener = indexController.getStreamListener();
// copy the nodes
NodeList nodes = doc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
copyNodes(transaction, iterator, node, new NodePath(), tempDoc, true, true, listener);
}
flush();
// remove the old nodes
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
domDb.removeAll(transaction, firstChild);
try {
domDb.flush();
} catch (DBException e) {
LOG.warn("start() - " + "error while removing doc", e);
}
return null;
}
}.run();
doc.copyChildren(tempDoc);
doc.getMetadata().setSplitCount(0);
doc.getMetadata().setPageCount(tempDoc.getMetadata().getPageCount());
storeXMLResource(transaction, doc);
closeDocument();
LOG.debug("Defragmentation took " + (System.currentTimeMillis() - start) + "ms.");
} catch (ReadOnlyException e) {
LOG.warn(DATABASE_IS_READ_ONLY, e);
}
}
/** consistency Check of the database; useful after XUpdates;
* called if xupdate.consistency-checks is true in configuration */
@Override
public void checkXMLResourceConsistency(DocumentImpl doc) throws EXistException {
boolean xupdateConsistencyChecks = false;
Object property = pool.getConfiguration().getProperty(PROPERTY_XUPDATE_CONSISTENCY_CHECKS);
if (property != null)
xupdateConsistencyChecks = ((Boolean) property).booleanValue();
if(xupdateConsistencyChecks) {
LOG.debug("Checking document " + doc.getFileURI());
checkXMLResourceTree(doc);
}
}
/** consistency Check of the database; useful after XUpdates;
* called by {@link #checkXMLResourceConsistency(DocumentImpl)} */
@Override
public void checkXMLResourceTree(final DocumentImpl doc) {
LOG.debug("Checking DOM tree for document " + doc.getFileURI());
boolean xupdateConsistencyChecks = false;
Object property = pool.getConfiguration().getProperty(PROPERTY_XUPDATE_CONSISTENCY_CHECKS);
if (property != null)
xupdateConsistencyChecks = ((Boolean) property).booleanValue();
if(xupdateConsistencyChecks) {
new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() throws ReadOnlyException {
LOG.debug("Pages used: " + domDb.debugPages(doc, false));
return null;
}
}.run();
NodeList nodes = doc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
StringBuilder buf = new StringBuilder();
//Pass buf to the following method to get a dump of all node ids in the document
if (!checkNodeTree(iterator, node, buf)) {
LOG.debug("node tree: " + buf.toString());
throw new RuntimeException("Error in document tree structure");
}
}
NodeRef ref = new NodeRef(doc.getDocId());
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() {
try {
domDb.findKeys(idx);
} catch (BTreeException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (IOException e) {
LOG.warn("start() - " + "error while removing doc", e);
}
return null;
}
}.run();
}
}
/**
* Store a node into the database. This method is called by the parser to
* write a node to the storage backend.
*
*@param node the node to be stored
*@param currentPath path expression which points to this node's
* element-parent or to itself if it is an element (currently used by
* the Broker to determine if a node's content should be
* fulltext-indexed). @param index switch to activate fulltext indexation
*/
@Override
public void storeNode(final Txn transaction, final StoredNode node, NodePath currentPath, IndexSpec indexSpec, boolean fullTextIndex) {
checkAvailableMemory();
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
final short nodeType = node.getNodeType();
final byte data[] = node.serialize();
new DOMTransaction(this, domDb, Lock.WRITE_LOCK, doc) {
@Override
public Object start() throws ReadOnlyException {
long address;
if (nodeType == Node.TEXT_NODE
|| nodeType == Node.ATTRIBUTE_NODE
|| nodeType == Node.CDATA_SECTION_NODE
|| node.getNodeId().getTreeLevel() > defaultIndexDepth)
address = domDb.add(transaction, data);
else {
address = domDb.put(transaction, new NodeRef(doc.getDocId(), node.getNodeId()), data);
}
if (address == BFile.UNKNOWN_ADDRESS)
LOG.warn("address is missing");
//TODO : how can we continue here ? -pb
node.setInternalAddress(address);
return null;
}
}.run();
++nodesCount;
ByteArrayPool.releaseByteArray(data);
nodeProcessor.reset(transaction, node, currentPath, indexSpec, fullTextIndex);
nodeProcessor.doIndex();
}
@Override
public void updateNode(final Txn transaction, final StoredNode node, boolean reindex) {
try {
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
final long internalAddress = node.getInternalAddress();
final byte[] data = node.serialize();
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() throws ReadOnlyException {
if (StorageAddress.hasAddress(internalAddress))
domDb.update(transaction, internalAddress, data);
else {
domDb.update(transaction, new NodeRef(doc.getDocId(), node.getNodeId()), data);
}
return null;
}
}.run();
ByteArrayPool.releaseByteArray(data);
} catch (Exception e) {
Value oldVal = domDb.get(node.getInternalAddress());
StoredNode old = StoredNode.deserialize(oldVal.data(),
oldVal.start(), oldVal.getLength(),
(DocumentImpl)node.getOwnerDocument(), false);
LOG.warn(
"Exception while storing "
+ node.getNodeName()
+ "; gid = "
+ node.getNodeId()
+ "; old = " + old.getNodeName(),
e);
}
}
/**
* Physically insert a node into the DOM storage.
*/
@Override
public void insertNodeAfter(final Txn transaction, final StoredNode previous, final StoredNode node) {
final byte data[] = node.serialize();
final DocumentImpl doc = (DocumentImpl)previous.getOwnerDocument();
new DOMTransaction(this, domDb, Lock.WRITE_LOCK, doc) {
@Override
public Object start() {
long address = previous.getInternalAddress();
if (address != BFile.UNKNOWN_ADDRESS) {
address = domDb.insertAfter(transaction, doc, address, data);
} else {
NodeRef ref = new NodeRef(doc.getDocId(), previous.getNodeId());
address = domDb.insertAfter(transaction, doc, ref, data);
}
node.setInternalAddress(address);
return null;
}
}.run();
}
private void copyNodes(Txn transaction, Iterator<StoredNode> iterator, StoredNode node,
NodePath currentPath, DocumentImpl newDoc, boolean defrag, boolean index,
StreamListener listener) {
copyNodes(transaction, iterator, node, currentPath, newDoc, defrag, index, listener, null);
}
private void copyNodes(Txn transaction, Iterator<StoredNode> iterator, StoredNode node,
NodePath currentPath, DocumentImpl newDoc, boolean defrag, boolean index,
StreamListener listener, NodeId oldNodeId) {
if (node.getNodeType() == Node.ELEMENT_NODE)
currentPath.addComponent(node.getQName());
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
final long oldAddress = node.getInternalAddress();
node.setOwnerDocument(newDoc);
node.setInternalAddress(BFile.UNKNOWN_ADDRESS);
storeNode(transaction, node, currentPath, null, index);
if (defrag && oldNodeId != null)
pool.getNotificationService().notifyMove(oldNodeId, node);
if (node.getNodeType() == Node.ELEMENT_NODE) {
//save old value, whatever it is
long address = node.getInternalAddress();
node.setInternalAddress(oldAddress);
endElement(node, currentPath, null);
//restore old value, whatever it was
node.setInternalAddress(address);
node.setDirty(false);
}
if (node.getNodeId().getTreeLevel() == 1)
newDoc.appendChild(node);
node.setOwnerDocument(doc);
if (listener != null) {
switch (node.getNodeType()) {
case Node.TEXT_NODE :
listener.characters(transaction, (TextImpl) node, currentPath);
break;
case Node.ELEMENT_NODE :
listener.startElement(transaction, (ElementImpl) node, currentPath);
break;
case Node.ATTRIBUTE_NODE :
listener.attribute(transaction, (AttrImpl) node, currentPath);
break;
case Node.COMMENT_NODE :
case Node.PROCESSING_INSTRUCTION_NODE :
break;
default :
LOG.debug("Unhandled node type: " + node.getNodeType());
}
}
if (node.hasChildNodes()) {
int count = node.getChildCount();
NodeId nodeId = node.getNodeId();
for (int i = 0; i < count; i++) {
StoredNode child = iterator.next();
oldNodeId = child.getNodeId();
if (defrag) {
if (i == 0)
nodeId = nodeId.newChild();
else
nodeId = nodeId.nextSibling();
child.setNodeId(nodeId);
}
copyNodes(transaction, iterator, child, currentPath, newDoc, defrag, index, listener, oldNodeId);
}
}
if(node.getNodeType() == Node.ELEMENT_NODE) {
if (listener != null)
listener.endElement(transaction, (ElementImpl) node, currentPath);
currentPath.removeLastComponent();
}
}
/** Removes the Node Reference from the database.
* The index will be updated later, i.e. after all nodes have been physically
* removed. See {@link #endRemove(org.exist.storage.txn.Txn)}.
* removeNode() just adds the node ids to the list in elementIndex
* for later removal.
*/
@Override
public void removeNode(final Txn transaction, final StoredNode node, NodePath currentPath,
String content) {
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
new DOMTransaction(this, domDb, Lock.WRITE_LOCK, doc) {
@Override
public Object start() {
final long address = node.getInternalAddress();
if (StorageAddress.hasAddress(address))
domDb.remove(transaction, new NodeRef(doc.getDocId(), node.getNodeId()), address);
else
domDb.remove(transaction, new NodeRef(doc.getDocId(), node.getNodeId()));
return null;
}
}.run();
notifyRemoveNode(node, currentPath, content);
NodeProxy p = new NodeProxy(node);
QName qname;
switch (node.getNodeType()) {
case Node.ELEMENT_NODE :
qname = node.getQName();
qname.setNameType(ElementValue.ELEMENT);
GeneralRangeIndexSpec spec1 = doc.getCollection().getIndexByPathConfiguration(this, currentPath);
if(spec1 != null) {
valueIndex.setDocument(doc);
valueIndex.storeElement((ElementImpl) node, content, spec1.getType(), NativeValueIndex.IDX_GENERIC, false);
}
QNameRangeIndexSpec qnSpec = doc.getCollection().getIndexByQNameConfiguration(this, qname);
if (qnSpec != null) {
valueIndex.setDocument(doc);
valueIndex.storeElement((ElementImpl) node, content, qnSpec.getType(),
NativeValueIndex.IDX_QNAME, false);
}
break;
case Node.ATTRIBUTE_NODE :
qname = node.getQName();
qname.setNameType(ElementValue.ATTRIBUTE);
currentPath.addComponent(qname);
//Strange : does it mean that the node is added 2 times under 2 different identities ?
AttrImpl attr;
attr = (AttrImpl) node;
switch(attr.getType()) {
case AttrImpl.ID:
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, attr.getValue(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.ID, NativeValueIndex.IDX_GENERIC, false);
break;
case AttrImpl.IDREF:
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, attr.getValue(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.IDREF, NativeValueIndex.IDX_GENERIC, false);
break;
case AttrImpl.IDREFS:
valueIndex.setDocument(doc);
StringTokenizer tokenizer = new StringTokenizer(attr.getValue(), " ");
while (tokenizer.hasMoreTokens()) {
valueIndex.storeAttribute(attr, tokenizer.nextToken(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.IDREF, NativeValueIndex.IDX_GENERIC, false);
}
break;
default:
// do nothing special
}
RangeIndexSpec spec2 = doc.getCollection().getIndexByPathConfiguration(this, currentPath);
if(spec2 != null) {
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, null, NativeValueIndex.WITHOUT_PATH, spec2, false);
}
qnSpec = doc.getCollection().getIndexByQNameConfiguration(this, qname);
if (qnSpec != null) {
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, null, NativeValueIndex.WITHOUT_PATH, qnSpec, false);
}
currentPath.removeLastComponent();
break;
case Node.TEXT_NODE :
break;
}
}
@Override
public void removeAllNodes(Txn transaction, StoredNode node, NodePath currentPath,
StreamListener listener) {
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
Stack<RemovedNode> stack = new Stack<RemovedNode>();
collectNodesForRemoval(transaction, stack, iterator, listener, node, currentPath);
while (!stack.isEmpty()) {
RemovedNode next = stack.pop();
removeNode(transaction, next.node, next.path, next.content);
}
}
private void collectNodesForRemoval(Txn transaction, Stack<RemovedNode> stack,
Iterator<StoredNode> iterator, StreamListener listener, StoredNode node, NodePath currentPath) {
RemovedNode removed;
switch (node.getNodeType()) {
case Node.ELEMENT_NODE:
DocumentImpl doc = node.getDocument();
String content = null;
GeneralRangeIndexSpec spec = doc.getCollection().getIndexByPathConfiguration(this, currentPath);
if (spec != null) {
content = getNodeValue(node, false);
} else {
QNameRangeIndexSpec qnIdx = doc.getCollection().getIndexByQNameConfiguration(this, node.getQName());
if (qnIdx != null) {
content = getNodeValue(node, false);
}
}
removed = new RemovedNode(node, new NodePath(currentPath), content);
stack.push(removed);
if (listener != null) {
listener.startElement(transaction, (ElementImpl) node, currentPath);
}
if (node.hasChildNodes()) {
int childCount = node.getChildCount();
for (int i = 0; i < childCount; i++) {
StoredNode child = iterator.next();
if (child.getNodeType() == Node.ELEMENT_NODE)
currentPath.addComponent(child.getQName());
collectNodesForRemoval(transaction, stack, iterator, listener, child, currentPath);
if (child.getNodeType() == Node.ELEMENT_NODE)
currentPath.removeLastComponent();
}
}
if (listener != null) {
listener.endElement(transaction, (ElementImpl) node, currentPath);
}
break;
case Node.TEXT_NODE :
if (listener != null) {
listener.characters(transaction, (TextImpl) node, currentPath);
}
break;
case Node.ATTRIBUTE_NODE :
if (listener != null) {
listener.attribute(transaction, (AttrImpl) node, currentPath);
}
break;
}
if (node.getNodeType() != Node.ELEMENT_NODE) {
removed = new RemovedNode(node, new NodePath(currentPath), null);
stack.push(removed);
}
}
/**
* Index a single node, which has been added through an XUpdate
* operation. This method is only called if inserting the node is possible
* without changing the node identifiers of sibling or parent nodes. In other
* cases, reindex will be called.
*/
@Override
public void indexNode(Txn transaction, StoredNode node, NodePath currentPath) {
indexNode(transaction, node, currentPath, NodeProcessor.MODE_STORE);
}
public void indexNode(final Txn transaction, final StoredNode node, NodePath currentPath, int repairMode) {
nodeProcessor.reset(transaction, node, currentPath, null, true);
nodeProcessor.setMode(repairMode);
nodeProcessor.index();
}
private boolean checkNodeTree(Iterator<StoredNode> iterator, StoredNode node, StringBuilder buf) {
if (buf != null) {
if (buf.length() > 0)
buf.append(", ");
buf.append(node.getNodeId());
}
boolean docIsValid = true;
if (node.hasChildNodes()) {
int count = node.getChildCount();
if (buf != null)
buf.append('[').append(count).append(']');
StoredNode previous = null;
for (int i = 0; i < count; i++) {
StoredNode child = iterator.next();
if (i > 0 && !(child.getNodeId().isSiblingOf(previous.getNodeId()) &&
child.getNodeId().compareTo(previous.getNodeId()) > 0)) {
LOG.fatal("node " + child.getNodeId() + " cannot be a sibling of " + previous.getNodeId() +
"; node read from " + StorageAddress.toString(child.getInternalAddress()));
docIsValid = false;
}
previous = child;
if(child == null) {
LOG.fatal("child " + i + " not found for node: " + node.getNodeName() +
": " + node.getNodeId() + "; children = " + node.getChildCount());
docIsValid = false;
//TODO : emergency exit ?
}
NodeId parentId = child.getNodeId().getParentId();
if (!parentId.equals(node.getNodeId())) {
LOG.fatal(child.getNodeId() + " is not a child of " + node.getNodeId());
docIsValid = false;
}
boolean check = checkNodeTree(iterator, child, buf);
if (docIsValid)
docIsValid = check;
}
}
return docIsValid;
}
/**
* Called by reindex to walk through all nodes in the tree and reindex them
* if necessary.
*
* @param iterator
* @param node
* @param currentPath
*/
private void scanNodes(Txn transaction, Iterator<StoredNode> iterator, StoredNode node,
NodePath currentPath, int mode, StreamListener listener) {
if (node.getNodeType() == Node.ELEMENT_NODE)
currentPath.addComponent(node.getQName());
indexNode(transaction, node, currentPath, mode);
if (listener != null) {
switch (node.getNodeType()) {
case Node.TEXT_NODE :
case Node.CDATA_SECTION_NODE :
listener.characters(transaction, (CharacterDataImpl) node, currentPath);
break;
case Node.ELEMENT_NODE :
listener.startElement(transaction, (ElementImpl) node, currentPath);
break;
case Node.ATTRIBUTE_NODE :
listener.attribute(transaction, (AttrImpl) node, currentPath);
break;
case Node.COMMENT_NODE :
case Node.PROCESSING_INSTRUCTION_NODE :
break;
default :
LOG.debug("Unhandled node type: " + node.getNodeType());
}
}
if (node.hasChildNodes()) {
final int count = node.getChildCount();
for (int i = 0; i < count; i++) {
StoredNode child = iterator.next();
if (child == null) {
LOG.fatal("child " + i + " not found for node: " + node.getNodeName() +
"; children = " + node.getChildCount());
throw new IllegalStateException("Wrong node id");
}
scanNodes(transaction, iterator, child, currentPath, mode, listener);
}
}
if (node.getNodeType() == Node.ELEMENT_NODE) {
endElement(node, currentPath, null, mode == NodeProcessor.MODE_REMOVE);
if (listener != null)
listener.endElement(transaction, (ElementImpl) node, currentPath);
currentPath.removeLastComponent();
}
}
@Override
public String getNodeValue(final StoredNode node, final boolean addWhitespace) {
return (String) new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() {
return domDb.getNodeValue(NativeBroker.this, node, addWhitespace);
}
}.run();
}
@Override
public StoredNode objectWith(final Document doc, final NodeId nodeId) {
return (StoredNode) new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() {
Value val = domDb.get(NativeBroker.this, new NodeProxy((DocumentImpl) doc, nodeId));
if (val == null) {
if (LOG.isDebugEnabled())
LOG.debug("Node " + nodeId + " not found. This is usually not an error.");
return null;
}
StoredNode node = StoredNode.deserialize(val.getData(), 0, val.getLength(), (DocumentImpl) doc);
node.setOwnerDocument((DocumentImpl)doc);
node.setInternalAddress(val.getAddress());
return node;
}
}.run();
}
@Override
public StoredNode objectWith(final NodeProxy p) {
if (!StorageAddress.hasAddress(p.getInternalAddress()))
return objectWith(p.getDocument(), p.getNodeId());
return (StoredNode) new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() {
// DocumentImpl sets the nodeId to DOCUMENT_NODE when it's trying to find its top-level
// children (for which it doesn't persist the actual node ids), so ignore that. Nobody else
// should be passing DOCUMENT_NODE into here.
boolean fakeNodeId = p.getNodeId().equals(NodeId.DOCUMENT_NODE);
Value val = domDb.get(p.getInternalAddress(), false);
if (val == null) {
LOG.debug("Node " + p.getNodeId() + " not found in document " + p.getDocument().getURI() +
"; docId = " + p.getDocument().getDocId() + ": " + StorageAddress.toString(p.getInternalAddress()));
if (fakeNodeId)
return null;
} else {
StoredNode node = StoredNode.deserialize(val.getData(), 0, val.getLength(), p.getDocument());
node.setOwnerDocument((DocumentImpl)p.getOwnerDocument());
node.setInternalAddress(p.getInternalAddress());
if (fakeNodeId) return node;
if (p.getDocument().getDocId() == node.getDocId() &&
p.getNodeId().equals(node.getNodeId())) {
return node;
}
LOG.debug(
"Node " + p.getNodeId() + " not found in document " + p.getDocument().getURI() +
"; docId = " + p.getDocument().getDocId() + ": " + StorageAddress.toString(p.getInternalAddress()) +
"; found node " + node.getNodeId() + " instead"
);
}
// retry based on nodeid
StoredNode node = objectWith(p.getDocument(), p.getNodeId());
if (node != null) p.setInternalAddress(node.getInternalAddress()); // update proxy with correct address
return node;
}
}.run();
}
@Override
public void repair() throws PermissionDeniedException {
if (pool.isReadOnly())
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
LOG.info("Removing index files ...");
notifyCloseAndRemove();
try {
pool.getIndexManager().removeIndexes();
} catch (DBException e) {
LOG.warn("Failed to remove index failes during repair: " + e.getMessage(), e);
}
LOG.info("Recreating index files ...");
try {
valueIndex = new NativeValueIndex(this, VALUES_DBX_ID, dataDir, config);
} catch (DBException e) {
LOG.warn("Exception during repair: " + e.getMessage(), e);
}
try {
pool.getIndexManager().reopenIndexes();
} catch (DatabaseConfigurationException e) {
LOG.warn("Failed to reopen index files after repair: " + e.getMessage(), e);
}
initIndexModules();
LOG.info("Reindexing database files ...");
//Reindex from root collection
reindexCollection(null, getCollection(XmldbURI.ROOT_COLLECTION_URI), NodeProcessor.MODE_REPAIR);
}
@Override
public void flush() {
notifyFlush();
try {
pool.getSymbols().flush();
} catch (EXistException e) {
LOG.warn(e);
}
indexController.flush();
nodesCount = 0;
}
@Override
public void sync(int syncEvent) {
if (isReadOnly())
return;
try {
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
domDb.flush();
} catch (DBException e) {
LOG.warn("error while flushing dom.dbx", e);
}
return null;
}
}.run();
if(syncEvent == Sync.MAJOR_SYNC) {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
collectionsDb.flush();
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
} finally {
lock.release(Lock.WRITE_LOCK);
}
notifySync();
pool.getIndexManager().sync();
NumberFormat nf = NumberFormat.getNumberInstance();
LOGSTATS.info("Memory: " + nf.format(run.totalMemory() / 1024) + "K total; " +
nf.format(run.maxMemory() / 1024) + "K max; " +
nf.format(run.freeMemory() / 1024) + "K free");
domDb.printStatistics();
collectionsDb.printStatistics();
notifyPrintStatistics();
}
} catch (DBException dbe) {
dbe.printStackTrace();
LOG.warn(dbe);
}
}
@Override
public void shutdown() {
try {
flush();
sync(Sync.MAJOR_SYNC);
domDb.close();
collectionsDb.close();
notifyClose();
} catch (Exception e) {
LOG.warn(e.getMessage(), e);
}
super.shutdown();
}
/** check available memory */
@Override
public void checkAvailableMemory() {
if (nodesCountThreshold <= 0) {
if (nodesCount > DEFAULT_NODES_BEFORE_MEMORY_CHECK) {
if (run.totalMemory() >= run.maxMemory() && run.freeMemory() < pool.getReservedMem()) {
NumberFormat nf = NumberFormat.getNumberInstance();
LOG.info("total memory: " + nf.format(run.totalMemory()) +
"; max: " + nf.format(run.maxMemory()) +
"; free: " + nf.format(run.freeMemory()) +
"; reserved: " + nf.format(pool.getReservedMem()) +
"; used: " + nf.format(pool.getCacheManager().getSizeInBytes()));
flush();
System.gc();
}
nodesCount = 0;
}
} else if (nodesCount > nodesCountThreshold) {
flush();
nodesCount = 0;
}
}
//TOUNDERSTAND : why not use shutdown ? -pb
@Override
public void closeDocument() {
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
domDb.closeDocument();
return null;
}
}.run();
}
public final static class NodeRef extends Value {
public static int OFFSET_DOCUMENT_ID = 0;
public static int OFFSET_NODE_ID = OFFSET_DOCUMENT_ID + DocumentImpl.LENGTH_DOCUMENT_ID;
public NodeRef(int docId) {
len = DocumentImpl.LENGTH_DOCUMENT_ID;
data = new byte[len];
ByteConversion.intToByte(docId, data, OFFSET_DOCUMENT_ID);
pos = OFFSET_DOCUMENT_ID;
}
public NodeRef(int docId, NodeId nodeId) {
len = DocumentImpl.LENGTH_DOCUMENT_ID + nodeId.size();
data = new byte[len];
ByteConversion.intToByte(docId, data, OFFSET_DOCUMENT_ID);
nodeId.serialize(data, OFFSET_NODE_ID);
pos = OFFSET_DOCUMENT_ID;
}
int getDocId() {
return ByteConversion.byteToInt(data, OFFSET_DOCUMENT_ID);
}
}
private final static class RemovedNode {
StoredNode node;
String content;
NodePath path;
RemovedNode(StoredNode node, NodePath path, String content) {
this.node = node;
this.path = path;
this.content = content;
}
}
/** Delegate for Node Processings : indexing */
private class NodeProcessor {
final static int MODE_STORE = 0;
final static int MODE_REPAIR = 1;
final static int MODE_REMOVE = 2;
private Txn transaction;
private StoredNode node;
private NodePath currentPath;
/** work variables */
private DocumentImpl doc;
private long address;
private IndexSpec idxSpec;
//private FulltextIndexSpec ftIdx;
private int level;
private int mode = MODE_STORE;
/** overall switch to activate fulltext indexation */
private boolean fullTextIndex = true;
NodeProcessor() {
//ignore
}
public void reset(Txn transaction, StoredNode node, NodePath currentPath, IndexSpec indexSpec, boolean fullTextIndex) {
if (node.getNodeId() == null)
LOG.warn("illegal node: " + node.getNodeName());
//TODO : why continue processing ? return ? -pb
this.transaction = transaction;
this.node = node;
this.currentPath = currentPath;
this.mode = MODE_STORE;
doc = (DocumentImpl) node.getOwnerDocument();
address = node.getInternalAddress();
if (indexSpec == null)
indexSpec = doc.getCollection().getIndexConfiguration(NativeBroker.this);
idxSpec = indexSpec;
//ftIdx = idxSpec == null ? null : idxSpec.getFulltextIndexSpec();
level = node.getNodeId().getTreeLevel();
this.fullTextIndex = fullTextIndex;
}
public void setMode(int mode) {
this.mode = mode;
}
/** Updates the various indices */
public void doIndex() {
//TODO : resolve URI !
final boolean isTemp = XmldbURI.TEMP_COLLECTION_URI.equalsInternal(((DocumentImpl)node.getOwnerDocument()).getCollection().getURI());
int indexType;
switch (node.getNodeType()) {
case Node.ELEMENT_NODE :
//Compute index type
//TODO : let indexers OR it themselves
//we'd need to notify the ElementIndexer at the very end then...
indexType = RangeIndexSpec.NO_INDEX;
if (idxSpec != null && idxSpec.getIndexByPath(currentPath) != null) {
indexType |= idxSpec.getIndexByPath(currentPath).getIndexType();
}
if (idxSpec != null) {
QNameRangeIndexSpec qnIdx = idxSpec.getIndexByQName(node.getQName());
if (qnIdx != null) {
indexType |= RangeIndexSpec.QNAME_INDEX;
if (!RangeIndexSpec.hasRangeIndex(indexType))
indexType |= qnIdx.getIndexType();
}
}
((ElementImpl) node).setIndexType(indexType);
//notifyStartElement((ElementImpl)node, currentPath, fullTextIndex);
break;
case Node.ATTRIBUTE_NODE :
QName qname = node.getQName();
if (currentPath != null)
currentPath.addComponent(qname);
//Compute index type
//TODO : let indexers OR it themselves
//we'd need to notify the ElementIndexer at the very end then...
indexType = RangeIndexSpec.NO_INDEX;
if (idxSpec != null) {
RangeIndexSpec rangeSpec = idxSpec.getIndexByPath(currentPath);
if (rangeSpec != null) {
indexType |= rangeSpec.getIndexType();
}
if (rangeSpec != null) {
valueIndex.setDocument((DocumentImpl)node.getOwnerDocument());
//Oh dear : is it the right semantics then ?
valueIndex.storeAttribute((AttrImpl) node, currentPath,
NativeValueIndex.WITHOUT_PATH,
rangeSpec, mode == MODE_REMOVE);
}
QNameRangeIndexSpec qnIdx = idxSpec.getIndexByQName(node.getQName());
if (qnIdx != null) {
indexType |= RangeIndexSpec.QNAME_INDEX;
if (!RangeIndexSpec.hasRangeIndex(indexType))
indexType |= qnIdx.getIndexType();
valueIndex.setDocument((DocumentImpl)node.getOwnerDocument());
//Oh dear : is it the right semantics then ?
valueIndex.storeAttribute((AttrImpl) node, currentPath, NativeValueIndex.WITHOUT_PATH,
qnIdx, mode == MODE_REMOVE);
}
}
final NodeProxy tempProxy = new NodeProxy(doc, node.getNodeId(), address);
tempProxy.setIndexType(indexType);
qname.setNameType(ElementValue.ATTRIBUTE);
AttrImpl attr = (AttrImpl) node;
attr.setIndexType(indexType);
switch(attr.getType()) {
case AttrImpl.ID:
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, attr.getValue(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.ID, NativeValueIndex.IDX_GENERIC, mode == MODE_REMOVE);
break;
case AttrImpl.IDREF:
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, attr.getValue(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.IDREF, NativeValueIndex.IDX_GENERIC, mode == MODE_REMOVE);
break;
case AttrImpl.IDREFS:
valueIndex.setDocument(doc);
StringTokenizer tokenizer = new StringTokenizer(attr.getValue(), " ");
while (tokenizer.hasMoreTokens()) {
valueIndex.storeAttribute(attr, tokenizer.nextToken(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.IDREF, NativeValueIndex.IDX_GENERIC, mode == MODE_REMOVE);
}
break;
default:
// do nothing special
}
if (currentPath != null)
currentPath.removeLastComponent();
break;
case Node.TEXT_NODE:
notifyStoreText( (TextImpl)node, currentPath,
fullTextIndex ? NativeTextEngine.DO_NOT_TOKENIZE : NativeTextEngine.TOKENIZE);
break;
}
}
/** Stores this node into the database, if it's an element */
public void store() {
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
if (mode == MODE_STORE && node.getNodeType() == Node.ELEMENT_NODE && level <= defaultIndexDepth) {
//TODO : used to be this, but NativeBroker.this avoids an owner change
new DOMTransaction(NativeBroker.this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() throws ReadOnlyException {
try {
domDb.addValue(transaction, new NodeRef(doc.getDocId(), node.getNodeId()), address);
} catch (BTreeException e) {
LOG.warn(EXCEPTION_DURING_REINDEX, e);
} catch (IOException e) {
LOG.warn(EXCEPTION_DURING_REINDEX, e);
}
return null;
}
}.run();
}
}
/** check available memory */
private void checkAvailableMemory() {
if (mode != MODE_REMOVE && nodesCount > DEFAULT_NODES_BEFORE_MEMORY_CHECK) {
if (run.totalMemory() >= run.maxMemory() && run.freeMemory() < pool.getReservedMem()) {
//LOG.info("total memory: " + run.totalMemory() + "; free: " + run.freeMemory());
flush();
System.gc();
LOG.info("total memory: " + run.totalMemory() + "; free: " + run.freeMemory());
}
nodesCount = 0;
}
}
/** Updates the various indices and stores this node into the database */
public void index() {
++nodesCount;
checkAvailableMemory();
doIndex();
store();
}
}
private final class DocumentCallback implements BTreeCallback {
private final Collection.InternalAccess collectionInternalAccess;
private DocumentCallback(final Collection.InternalAccess collectionInternalAccess) {
this.collectionInternalAccess = collectionInternalAccess;
}
@Override
public boolean indexInfo(final Value key, final long pointer) throws TerminatedException {
try {
final byte type = key.data()[key.start() + Collection.LENGTH_COLLECTION_ID + DocumentImpl.LENGTH_DOCUMENT_TYPE];
final VariableByteInput istream = collectionsDb.getAsStream(pointer);
final DocumentImpl doc;
if (type == DocumentImpl.BINARY_FILE) {
doc = new BinaryDocument(pool);
} else {
doc = new DocumentImpl(pool);
}
doc.read(istream);
collectionInternalAccess.addDocument(doc);
} catch (EOFException e) {
LOG.error("EOFException while reading document data", e);
} catch (IOException e) {
LOG.error("IOException while reading document data", e);
} catch(EXistException ee) {
LOG.error("EXistException while reading document data", ee);
}
return true;
}
}
}
| src/org/exist/storage/NativeBroker.java | /*
* eXist Open Source Native XML Database
* Copyright (C) 2001-2007 The eXist team
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Id$
*/
package org.exist.storage;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Observer;
import java.util.Stack;
import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.stream.XMLStreamException;
import org.apache.log4j.Logger;
import org.exist.EXistException;
import org.exist.Indexer;
import org.exist.backup.RawDataBackup;
import org.exist.collections.Collection;
import org.exist.collections.Collection.SubCollectionEntry;
import org.exist.collections.CollectionCache;
import org.exist.collections.CollectionConfiguration;
import org.exist.collections.CollectionConfigurationException;
import org.exist.collections.CollectionConfigurationManager;
import org.exist.collections.triggers.CollectionTriggersVisitor;
import org.exist.collections.triggers.DocumentTriggersVisitor;
import org.exist.collections.triggers.TriggerException;
import org.exist.dom.*;
import org.exist.fulltext.FTIndex;
import org.exist.fulltext.FTIndexWorker;
import org.exist.indexing.StreamListener;
import org.exist.indexing.StructuralIndex;
import org.exist.memtree.DOMIndexer;
import org.exist.numbering.NodeId;
import org.exist.security.Account;
import org.exist.security.MessageDigester;
import org.exist.security.Permission;
import org.exist.security.PermissionDeniedException;
import org.exist.security.Subject;
import org.exist.stax.EmbeddedXMLStreamReader;
import org.exist.storage.btree.BTree;
import org.exist.storage.btree.BTreeCallback;
import org.exist.storage.btree.BTreeException;
import org.exist.storage.btree.DBException;
import org.exist.storage.btree.IndexQuery;
import org.exist.storage.btree.Paged;
import org.exist.storage.btree.Value;
import org.exist.storage.btree.Paged.Page;
import org.exist.storage.dom.DOMFile;
import org.exist.storage.dom.DOMTransaction;
import org.exist.storage.dom.NodeIterator;
import org.exist.storage.dom.RawNodeIterator;
import org.exist.storage.index.BFile;
import org.exist.storage.index.CollectionStore;
import org.exist.storage.io.VariableByteInput;
import org.exist.storage.io.VariableByteOutputStream;
import org.exist.storage.journal.Journal;
import org.exist.storage.journal.LogEntryTypes;
import org.exist.storage.journal.Loggable;
import org.exist.storage.lock.Lock;
import org.exist.storage.serializers.NativeSerializer;
import org.exist.storage.serializers.Serializer;
import org.exist.storage.sync.Sync;
import org.exist.storage.txn.TransactionException;
import org.exist.storage.txn.TransactionManager;
import org.exist.storage.txn.Txn;
import org.exist.util.ByteArrayPool;
import org.exist.util.ByteConversion;
import org.exist.util.Configuration;
import org.exist.util.DatabaseConfigurationException;
import org.exist.util.LockException;
import org.exist.util.ReadOnlyException;
import org.exist.xmldb.XmldbURI;
import org.exist.xquery.value.Type;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.exist.xquery.TerminatedException;
/**
* Main class for the native XML storage backend.
* By "native" it is meant file-based, embedded backend.
*
* Provides access to all low-level operations required by
* the database. Extends {@link DBBroker}.
*
* Observer Design Pattern: role : this class is the subject (alias observable)
* for various classes that generate indices for the database content :
* @link org.exist.storage.NativeElementIndex
* @link org.exist.storage.NativeTextEngine
* @link org.exist.storage.NativeValueIndex
* @link org.exist.storage.NativeValueIndexByQName
*
* This class dispatches the various events (defined by the methods
* of @link org.exist.storage.ContentLoadingObserver) to indexing classes.
*
*@author Wolfgang Meier
*/
public class NativeBroker extends DBBroker {
public final static String EXIST_STATISTICS_LOGGER = "org.exist.statistics";
protected final static Logger LOGSTATS = Logger.getLogger( EXIST_STATISTICS_LOGGER );
public final static byte LOG_RENAME_BINARY = 0x40;
public final static byte LOG_CREATE_BINARY = 0x41;
public final static byte LOG_UPDATE_BINARY = 0x42;
static {
LogEntryTypes.addEntryType(LOG_RENAME_BINARY, RenameBinaryLoggable.class);
LogEntryTypes.addEntryType(LOG_CREATE_BINARY, CreateBinaryLoggable.class);
LogEntryTypes.addEntryType(LOG_UPDATE_BINARY, UpdateBinaryLoggable.class);
}
public static final byte PREPEND_DB_ALWAYS = 0;
public static final byte PREPEND_DB_NEVER = 1;
public static final byte PREPEND_DB_AS_NEEDED = 2;
public static final byte COLLECTIONS_DBX_ID = 0;
public static final byte VALUES_DBX_ID = 2;
public static final byte DOM_DBX_ID = 3;
//Note : no ID for symbols ? Too bad...
public static final String PAGE_SIZE_ATTRIBUTE = "pageSize";
public static final String INDEX_DEPTH_ATTRIBUTE = "index-depth";
public static final String PROPERTY_INDEX_DEPTH = "indexer.index-depth";
private static final byte[] ALL_STORAGE_FILES = {
COLLECTIONS_DBX_ID, VALUES_DBX_ID, DOM_DBX_ID
};
//private static final String TEMP_FRAGMENT_REMOVE_ERROR = "Could not remove temporary fragment";
// private static final String TEMP_STORE_ERROR = "An error occurred while storing temporary data: ";
private static final String EXCEPTION_DURING_REINDEX = "exception during reindex";
private static final String DATABASE_IS_READ_ONLY = "database is read-only";
public static final String DEFAULT_DATA_DIR = "data";
public static final int DEFAULT_INDEX_DEPTH = 1;
public static final int DEFAULT_MIN_MEMORY = 5000000;
public static final long TEMP_FRAGMENT_TIMEOUT = 60000;
/** default buffer size setting */
public static final int BUFFERS = 256;
/** check available memory after storing DEFAULT_NODES_BEFORE_MEMORY_CHECK nodes */
public static final int DEFAULT_NODES_BEFORE_MEMORY_CHECK = 500;
public static int OFFSET_COLLECTION_ID = 0;
public static int OFFSET_VALUE = OFFSET_COLLECTION_ID + Collection.LENGTH_COLLECTION_ID; //2
public final static String INIT_COLLECTION_CONFIG = "collection.xconf.init";
/** the database files */
protected CollectionStore collectionsDb;
protected DOMFile domDb;
/** the index processors */
protected NativeValueIndex valueIndex;
protected IndexSpec indexConfiguration;
protected int defaultIndexDepth;
protected Serializer xmlSerializer;
/** used to count the nodes inserted after the last memory check */
protected int nodesCount = 0;
protected int nodesCountThreshold = DEFAULT_NODES_BEFORE_MEMORY_CHECK;
protected String dataDir;
protected File fsDir;
protected File fsBackupDir;
protected int pageSize;
protected byte prepend;
private final Runtime run = Runtime.getRuntime();
private NodeProcessor nodeProcessor = new NodeProcessor();
private EmbeddedXMLStreamReader streamReader = null;
protected Journal logManager;
protected boolean incrementalDocIds = false;
/** initialize database; read configuration, etc. */
public NativeBroker(BrokerPool pool, Configuration config) throws EXistException {
super(pool, config);
this.logManager = pool.getTransactionManager().getJournal();
LOG.debug("Initializing broker " + hashCode());
String prependDB = (String) config.getProperty("db-connection.prepend-db");
if ("always".equalsIgnoreCase(prependDB)) {
prepend = PREPEND_DB_ALWAYS;
} else if("never".equalsIgnoreCase(prependDB)) {
prepend = PREPEND_DB_NEVER;
} else {
prepend = PREPEND_DB_AS_NEEDED;
}
dataDir = (String) config.getProperty(BrokerPool.PROPERTY_DATA_DIR);
if (dataDir == null)
dataDir = DEFAULT_DATA_DIR;
fsDir = new File(new File(dataDir),"fs");
if (!fsDir.exists()) {
if (!fsDir.mkdir()) {
throw new EXistException("Cannot make collection filesystem directory: "+fsDir);
}
}
fsBackupDir = new File(new File(dataDir),"fs.journal");
if (!fsBackupDir.exists()) {
if (!fsBackupDir.mkdir()) {
throw new EXistException("Cannot make collection filesystem directory: "+fsBackupDir);
}
}
nodesCountThreshold = config.getInteger(BrokerPool.PROPERTY_NODES_BUFFER);
if (nodesCountThreshold > 0)
nodesCountThreshold = nodesCountThreshold * 1000;
defaultIndexDepth = config.getInteger(PROPERTY_INDEX_DEPTH);
if (defaultIndexDepth < 0)
defaultIndexDepth = DEFAULT_INDEX_DEPTH;
String docIdProp = (String) config.getProperty(BrokerPool.DOC_ID_MODE_PROPERTY);
if (docIdProp != null) {
incrementalDocIds = docIdProp.equalsIgnoreCase("incremental");
}
indexConfiguration = (IndexSpec) config.getProperty(Indexer.PROPERTY_INDEXER_CONFIG);
xmlSerializer = new NativeSerializer(this, config);
setSubject(pool.getSecurityManager().getSystemSubject());
try {
//TODO : refactor so that we can,
//1) customize the different properties (file names, cache settings...)
//2) have a consistent READ-ONLY behaviour (based on *mandatory* files ?)
//3) have consistent file creation behaviour (we can probably avoid some unnecessary files)
//4) use... *customized* factories for a better index plugability ;-)
// Initialize DOM storage
domDb = (DOMFile) config.getProperty(DOMFile.getConfigKeyForFile());
if (domDb == null)
domDb = new DOMFile(pool, DOM_DBX_ID, dataDir, config);
if (domDb.isReadOnly()) {
LOG.warn(domDb.getFile().getName() + " is read-only!");
pool.setReadOnly();
}
//Initialize collections storage
collectionsDb = (CollectionStore) config.getProperty(CollectionStore.getConfigKeyForFile());
if (collectionsDb == null)
collectionsDb = new CollectionStore(pool, COLLECTIONS_DBX_ID, dataDir, config);
if (collectionsDb.isReadOnly()) {
LOG.warn(collectionsDb.getFile().getName() + " is read-only!");
pool.setReadOnly();
}
valueIndex = new NativeValueIndex(this, VALUES_DBX_ID, dataDir, config);
if (pool.isReadOnly())
LOG.info("Database runs in read-only mode");
} catch (DBException e) {
LOG.debug(e.getMessage(), e);
throw new EXistException(e);
}
}
@Override
public ElementIndex getElementIndex() {
return null;
}
@Override
public synchronized void addObserver(Observer o) {
super.addObserver(o);
//textEngine.addObserver(o);
//elementIndex.addObserver(o);
//TODO : what about other indexes observers ?
}
@Override
public synchronized void deleteObservers() {
super.deleteObservers();
//if (elementIndex != null)
//elementIndex.deleteObservers();
//TODO : what about other indexes observers ?
//if (textEngine != null)
//textEngine.deleteObservers();
}
// ============ dispatch the various events to indexing classes ==========
private void notifyRemoveNode(StoredNode node, NodePath currentPath, String content) {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.removeNode(node, currentPath, content);
}
}
//private void notifyStoreAttribute(AttrImpl attr, NodePath currentPath, int indexingHint, RangeIndexSpec spec, boolean remove) {
// for (int i = 0; i < contentLoadingObservers.size(); i++) {
// ContentLoadingObserver observer = (ContentLoadingObserver) contentLoadingObservers.get(i);
// observer.storeAttribute(attr, currentPath, indexingHint, spec, remove);
// }
//}
private void notifyStoreText(TextImpl text, NodePath currentPath, int indexingHint) {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.storeText(text, currentPath, indexingHint);
}
}
private void notifyDropIndex(Collection collection) {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.dropIndex(collection);
}
}
private void notifyDropIndex(DocumentImpl doc) throws ReadOnlyException {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.dropIndex(doc);
}
}
private void notifyRemove() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.remove();
}
}
private void notifySync() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.sync();
}
}
private void notifyFlush() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
try {
observer.flush();
} catch (DBException e) {
LOG.warn(e);
//Ignore the exception ; try to continue on other files
}
}
}
private void notifyPrintStatistics() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.printStatistics();
}
}
private void notifyClose() throws DBException {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.close();
}
clearContentLoadingObservers();
}
private void notifyCloseAndRemove() {
for (int i = 0; i < contentLoadingObservers.size(); i++) {
ContentLoadingObserver observer = contentLoadingObservers.get(i);
observer.closeAndRemove();
}
clearContentLoadingObservers();
}
/**
* Update indexes for the given element node. This method is called when the indexer
* encounters a closing element tag. It updates any range indexes defined on the
* element value and adds the element id to the structural index.
*
* @param node the current element node
* @param currentPath node path leading to the element
* @param content contains the string value of the element. Needed if a range index
* is defined on it.
*/
@Override
public void endElement(final StoredNode node, NodePath currentPath, String content, boolean remove) {
final int indexType = ((ElementImpl) node).getIndexType();
//TODO : do not care about the current code redundancy : this will move in the (near) future
// TODO : move to NativeValueIndex
if (RangeIndexSpec.hasRangeIndex(indexType)) {
node.getQName().setNameType(ElementValue.ELEMENT);
if (content == null) {
//NodeProxy p = new NodeProxy(node);
//if (node.getOldInternalAddress() != StoredNode.UNKNOWN_NODE_IMPL_ADDRESS)
// p.setInternalAddress(node.getOldInternalAddress());
content = getNodeValue(node, false);
//Curious... I assume getNodeValue() needs the old address
//p.setInternalAddress(node.getInternalAddress());
}
valueIndex.setDocument((DocumentImpl) node.getOwnerDocument());
valueIndex.storeElement((ElementImpl) node, content, RangeIndexSpec.indexTypeToXPath(indexType),
NativeValueIndex.IDX_GENERIC, remove);
}
// TODO : move to NativeValueIndexByQName
if ( RangeIndexSpec.hasQNameIndex(indexType) ) {
node.getQName().setNameType(ElementValue.ELEMENT);
if (content == null) {
//NodeProxy p = new NodeProxy(node);
//if (node.getOldInternalAddress() != StoredNode.UNKNOWN_NODE_IMPL_ADDRESS)
// p.setInternalAddress(node.getOldInternalAddress());
content = getNodeValue(node, false);
//Curious... I assume getNodeValue() needs the old address
//p.setInternalAddress(node.getInternalAddress());
}
valueIndex.setDocument((DocumentImpl) node.getOwnerDocument());
valueIndex.storeElement((ElementImpl) node, content, RangeIndexSpec.indexTypeToXPath(indexType),
NativeValueIndex.IDX_QNAME, remove);
//qnameValueIndex.setDocument((DocumentImpl) node.getOwnerDocument());
//qnameValueIndex.endElement((ElementImpl) node, currentPath, content);
}
}
/*
private String getOldNodeContent(StoredNode node, long oldAddress) {
NodeProxy p = new NodeProxy(node);
if (oldAddress != StoredNode.UNKNOWN_NODE_IMPL_ADDRESS)
p.setInternalAddress(oldAddress);
String content = getNodeValue(node, false);
//Curious... I assume getNodeValue() needs the old address
p.setInternalAddress(node.getInternalAddress());
return content;
}
*/
/** Takes care of actually remove entries from the indices;
* must be called after one or more call to {@link #removeNode(Txn, StoredNode, NodePath, String)}. */
@Override
public void endRemove(Txn transaction) {
notifyRemove();
}
@Override
public boolean isReadOnly() {
return pool.isReadOnly();
}
public DOMFile getDOMFile() {
return domDb;
}
public BTree getStorage(byte id) {
//Notice that there is no entry for the symbols table
switch (id) {
case DOM_DBX_ID :
return domDb;
case COLLECTIONS_DBX_ID :
return collectionsDb;
case VALUES_DBX_ID :
return valueIndex.dbValues;
default:
return null;
}
}
public byte[] getStorageFileIds() {
return ALL_STORAGE_FILES;
}
public int getDefaultIndexDepth() {
return defaultIndexDepth;
}
@Override
public void backupToArchive(RawDataBackup backup) throws IOException, EXistException {
for (byte i : ALL_STORAGE_FILES) {
Paged paged = getStorage(i);
if (paged == null) {
LOG.warn("Storage file is null: " + i);
continue;
}
OutputStream os = backup.newEntry(paged.getFile().getName());
paged.backupToStream(os);
backup.closeEntry();
}
pool.getSymbols().backupToArchive(backup);
backupBinary(backup, fsDir, "");
pool.getIndexManager().backupToArchive(backup);
//TODO backup counters
//TODO USE zip64 or tar to create snapshots larger then 4Gb
}
private void backupBinary(RawDataBackup backup, File file, String path) throws IOException {
path = path + "/" + file.getName();
if (file.isDirectory()) {
for (File f : file.listFiles()) {
backupBinary(backup, f, path);
}
} else {
OutputStream os = backup.newEntry(path);
InputStream is = new FileInputStream(file);
byte[] buf = new byte[4096];
int len;
while ((len = is.read(buf)) > 0) {
os.write(buf, 0, len);
}
is.close();
backup.closeEntry();
}
}
@Override
public IndexSpec getIndexConfiguration() {
return indexConfiguration;
}
@Override
public StructuralIndex getStructuralIndex() {
return (StructuralIndex) getIndexController().getWorkerByIndexName(StructuralIndex.STRUCTURAL_INDEX_ID);
}
@Override
public NativeValueIndex getValueIndex() {
return valueIndex;
}
@Override
public TextSearchEngine getTextEngine() {
FTIndexWorker worker = (FTIndexWorker) indexController.getWorkerByIndexId(FTIndex.ID);
if (worker == null) {
LOG.warn("Fulltext index is not configured. Please check the <modules> section in conf.xml");
return null;
}
return worker.getEngine();
}
@Override
public EmbeddedXMLStreamReader getXMLStreamReader(NodeHandle node, boolean reportAttributes)
throws IOException, XMLStreamException {
if (streamReader == null) {
RawNodeIterator iterator = new RawNodeIterator(this, domDb, node);
streamReader = new EmbeddedXMLStreamReader(this, (DocumentImpl) node.getOwnerDocument(), iterator, node, reportAttributes);
} else {
streamReader.reposition(this, node, reportAttributes);
}
return streamReader;
}
@Override
public EmbeddedXMLStreamReader newXMLStreamReader(NodeHandle node, boolean reportAttributes)
throws IOException, XMLStreamException {
RawNodeIterator iterator = new RawNodeIterator(this, domDb, node);
return new EmbeddedXMLStreamReader(this, (DocumentImpl) node.getOwnerDocument(), iterator, null, reportAttributes);
}
@Override
public Iterator<StoredNode> getNodeIterator(StoredNode node) {
if (node == null)
throw new IllegalArgumentException("The node parameter cannot be null.");
try {
return new NodeIterator(this, domDb, node, false);
} catch (BTreeException e) {
LOG.warn("failed to create node iterator", e);
} catch (IOException e) {
LOG.warn("failed to create node iterator", e);
}
return null;
}
@Override
public Serializer getSerializer() {
xmlSerializer.reset();
return xmlSerializer;
}
@Override
public Serializer newSerializer() {
return new NativeSerializer(this, getConfiguration());
}
public XmldbURI prepend(XmldbURI uri) {
switch(prepend) {
case PREPEND_DB_ALWAYS:
return uri.prepend(XmldbURI.ROOT_COLLECTION_URI);
case PREPEND_DB_AS_NEEDED:
return uri.startsWith(XmldbURI.ROOT_COLLECTION_URI)?
uri:
uri.prepend(XmldbURI.ROOT_COLLECTION_URI);
default:
return uri;
}
}
/**
* Creates a temporary collecion
*
* @param transaction : The transaction, which registers the acquired write locks. The locks should be released on commit/abort.
* @return The temporary collection
* @throws LockException
* @throws PermissionDeniedException
* @throws IOException
* @throws TriggerException
*/
private Collection createTempCollection(Txn transaction)
throws LockException, PermissionDeniedException, IOException, TriggerException {
Subject u = getSubject();
try {
setSubject( pool.getSecurityManager().getSystemSubject() );
Collection temp = getOrCreateCollection(transaction, XmldbURI.TEMP_COLLECTION_URI);
temp.setPermissions(0771);
saveCollection(transaction, temp);
return temp;
} finally {
setSubject( u );
}
}
private final String readInitCollectionConfig() {
final File fInitCollectionConfig = new File(pool.getConfiguration().getExistHome(), INIT_COLLECTION_CONFIG);
if(fInitCollectionConfig.exists() && fInitCollectionConfig.isFile()) {
InputStream is = null;
try {
final StringBuilder initCollectionConfig = new StringBuilder();
is = new FileInputStream(fInitCollectionConfig);
int read = -1;
byte buf[] = new byte[1024];
while((read = is.read(buf)) != -1) {
initCollectionConfig.append(new String(buf, 0, read));
}
return initCollectionConfig.toString();
} catch(final IOException ioe) {
LOG.error(ioe.getMessage(), ioe);
} finally {
if(is != null) {
try {
is.close();
} catch(final IOException ioe) {
LOG.warn(ioe.getMessage(), ioe);
}
}
}
};
return null;
}
/* (non-Javadoc)
* @see org.exist.storage.DBBroker#getOrCreateCollection(org.exist.storage.txn.Txn, org.exist.xmldb.XmldbURI)
*/
@Override
public Collection getOrCreateCollection(Txn transaction, XmldbURI name) throws PermissionDeniedException, IOException, TriggerException {
name = prepend(name.normalizeCollectionPath());
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
try {
//TODO : resolve URIs !
XmldbURI[] segments = name.getPathSegments();
XmldbURI path = XmldbURI.ROOT_COLLECTION_URI;
Collection sub;
Collection current = getCollection(XmldbURI.ROOT_COLLECTION_URI);
if (current == null) {
LOG.debug("Creating root collection '" + XmldbURI.ROOT_COLLECTION_URI + "'");
pool.getCollectionTrigger().beforeCreateCollection(this, transaction, XmldbURI.ROOT_COLLECTION_URI);
current = new Collection(this, XmldbURI.ROOT_COLLECTION_URI);
current.setId(getNextCollectionId(transaction));
current.setCreationTime(System.currentTimeMillis());
if(transaction != null) {
transaction.acquireLock(current.getLock(), Lock.WRITE_LOCK);
}
//TODO : acquire lock manually if transaction is null ?
saveCollection(transaction, current);
pool.getCollectionTrigger().afterCreateCollection(this, transaction, current);
//import an initial collection configuration
try {
final String initCollectionConfig = readInitCollectionConfig();
if(initCollectionConfig != null) {
CollectionConfigurationManager collectionConfigurationManager = pool.getConfigurationManager();
if(collectionConfigurationManager == null) {
//might not yet have been initialised
pool.initCollectionConfigurationManager(this);
collectionConfigurationManager = pool.getConfigurationManager();
}
if(collectionConfigurationManager != null) {
collectionConfigurationManager.addConfiguration(transaction, this, current, initCollectionConfig);
}
}
} catch(final CollectionConfigurationException cce) {
LOG.error("Could not load initial collection configuration for /db: " + cce.getMessage(), cce);
}
}
for(int i=1;i<segments.length;i++) {
XmldbURI temp = segments[i];
path = path.append(temp);
if(current.hasSubcollectionNoLock(this, temp)) {
current = getCollection(path);
if (current == null) {
LOG.debug("Collection '" + path + "' not found!");
}
} else {
if (pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
if(!current.getPermissionsNoLock().validate(getSubject(), Permission.WRITE)) {
LOG.error("Permission denied to create collection '" + path + "'");
throw new PermissionDeniedException("Account '"+ getSubject().getName() + "' not allowed to write to collection '" + current.getURI() + "'");
}
if (!current.getPermissionsNoLock().validate(getSubject(), Permission.EXECUTE)) {
LOG.error("Permission denied to create collection '" + path + "'");
throw new PermissionDeniedException("Account '"+ getSubject().getName() + "' not allowed to execute to collection '" + current.getURI() + "'");
}
if (current.hasDocument(this, path.lastSegment())) {
LOG.error("Collection '" + current.getURI() + "' have document '" + path.lastSegment() + "'");
throw new PermissionDeniedException("Collection '" + current.getURI() + "' have document '" + path.lastSegment() + "'.");
}
LOG.debug("Creating collection '" + path + "'...");
CollectionConfiguration colConf = current.getConfiguration(this);
pool.getCollectionTrigger().beforeCreateCollection(this, transaction, path);
CollectionTriggersVisitor triggersVisitor = null;
if(colConf != null) {
triggersVisitor = colConf.getCollectionTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeCreateCollection(this, transaction, path);
}
sub = new Collection(this, path);
sub.setId(getNextCollectionId(transaction));
if (transaction != null) {
transaction.acquireLock(sub.getLock(), Lock.WRITE_LOCK);
}
//TODO : acquire lock manually if transaction is null ?
current.addCollection(this, sub, true);
saveCollection(transaction, current);
pool.getCollectionTrigger().afterCreateCollection(this, transaction, sub);
if(colConf != null) {
triggersVisitor.afterCreateCollection(this, transaction, sub);
}
current = sub;
}
}
return current;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
return null;
} catch (ReadOnlyException e) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
}
}
@Override
public Collection getCollection(XmldbURI uri) throws PermissionDeniedException {
return openCollection(uri, Lock.NO_LOCK);
}
@Override
public Collection openCollection(XmldbURI uri, int lockMode) throws PermissionDeniedException {
return openCollection(uri, BFile.UNKNOWN_ADDRESS, lockMode);
}
@Override
public List<String> findCollectionsMatching(String regexp) {
final List<String> collections = new ArrayList<String>();
final Pattern p = Pattern.compile(regexp);
final Matcher m = p.matcher("");
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
//TODO write a regexp lookup for key data in BTree.query
//IndexQuery idxQuery = new IndexQuery(IndexQuery.REGEXP, regexp);
//List<Value> keys = collectionsDb.findKeysByCollectionName(idxQuery);
List<Value> keys = collectionsDb.getKeys();
for(Value key : keys) {
//TODO restrict keys to just collection uri's
final String collectionName = new String(key.getData());
m.reset(collectionName);
if(m.matches()) {
collections.add(collectionName);
}
}
} catch (UnsupportedEncodingException e) {
//LOG.error("Unable to encode '" + uri + "' in UTF-8");
//return null;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
//return null;
} catch (TerminatedException e) {
LOG.error(e.getMessage(), e);
//return null;
} catch (BTreeException e) {
LOG.error(e.getMessage(), e);
//return null;
} catch (IOException e) {
LOG.error(e.getMessage(), e);
//return null;
} finally {
lock.release(Lock.READ_LOCK);
}
return collections;
}
@Override
public void readCollectionEntry(SubCollectionEntry entry) {
final XmldbURI uri = prepend(entry.getUri().toCollectionPathURI());
Collection collection;
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
collection = collectionsCache.get(uri);
if (collection == null) {
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.CollectionKey(uri.toString());
VariableByteInput is = collectionsDb.getAsStream(key);
if (is == null) {
LOG.warn("Could not read collection entry for: " + uri);
return;
}
//read the entry details
entry.read(is);
} catch (UnsupportedEncodingException e) {
LOG.error("Unable to encode '" + uri + "' in UTF-8");
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.error(e.getMessage(), e);
} finally {
lock.release(Lock.READ_LOCK);
}
} else {
if (!collection.getURI().equalsInternal(uri)) {
LOG.error("The collection received from the cache is not the requested: " + uri +
"; received: " + collection.getURI());
return;
}
entry.read(collection);
collectionsCache.add(collection);
}
}
}
/**
* Get collection object. If the collection does not exist, null is
* returned.
*
*@param uri collection URI
*@return The collection value
*/
private Collection openCollection(XmldbURI uri, long addr, int lockMode) throws PermissionDeniedException {
uri = prepend(uri.toCollectionPathURI());
//We *must* declare it here (see below)
Collection collection;
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
collection = collectionsCache.get(uri);
if (collection == null) {
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
VariableByteInput is;
if (addr == BFile.UNKNOWN_ADDRESS) {
Value key = new CollectionStore.CollectionKey(uri.toString());
is = collectionsDb.getAsStream(key);
} else {
is = collectionsDb.getAsStream(addr);
}
if (is == null)
return null;
collection = new Collection(this, uri);
collection.read(this, is);
//TODO : manage this from within the cache -pb
if(!pool.isInitializing())
collectionsCache.add(collection);
//TODO : rethrow exceptions ? -pb
} catch (UnsupportedEncodingException e) {
LOG.error("Unable to encode '" + uri + "' in UTF-8");
return null;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
return null;
} catch (IOException e) {
LOG.error(e.getMessage(), e);
return null;
} finally {
lock.release(Lock.READ_LOCK);
}
} else {
if (!collection.getURI().equalsInternal(uri)) {
LOG.error("The collection received from the cache is not the requested: " + uri +
"; received: " + collection.getURI());
}
collectionsCache.add(collection);
if(!collection.getPermissionsNoLock().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Permission denied to open collection: " + collection.getURI().toString() + " by " + getSubject().getName());
}
}
}
//Important :
//This code must remain outside of the synchonized block
//because another thread may already own a lock on the collection
//This would result in a deadlock... until the time-out raises the Exception
//TODO : make an attempt to an immediate lock ?
//TODO : manage a collection of requests for locks ?
//TODO : another yet smarter solution ?
if(lockMode != Lock.NO_LOCK) {
try {
collection.getLock().acquire(lockMode);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on collection '" + uri + "'");
}
}
return collection;
}
/**
* Checks all permissions in the tree to ensure that a copy operation will succeed
*/
final void checkPermissionsForCopy(final Collection src, final XmldbURI destUri) throws PermissionDeniedException, LockException {
if(!src.getPermissions().validate(getSubject(), Permission.EXECUTE | Permission.READ)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " by " + getSubject().getName());
}
final Collection dest = getCollection(destUri);
final XmldbURI newDestUri = destUri.append(src.getURI().lastSegment());
final Collection newDest = getCollection(newDestUri);
if(dest != null) {
if(!dest.getPermissions().validate(getSubject(), Permission.EXECUTE | Permission.WRITE | Permission.READ)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getSubject().getName());
}
if(newDest != null) {
if(!dest.getPermissions().validate(getSubject(), Permission.EXECUTE | Permission.READ)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getSubject().getName());
}
if(newDest.isEmpty(this)) {
if(!dest.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getSubject().getName());
}
}
}
}
for(Iterator<DocumentImpl> itSrcSubDoc = src.iterator(this); itSrcSubDoc.hasNext();) {
final DocumentImpl srcSubDoc = itSrcSubDoc.next();
if(!srcSubDoc.getPermissions().validate(getSubject(), Permission.READ)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " for resource " + srcSubDoc.getURI() + " by " + getSubject().getName());
}
if(newDest != null && !newDest.isEmpty(this)) {
final DocumentImpl newDestSubDoc = newDest.getDocument(this, srcSubDoc.getFileURI()); //TODO check this uri is just the filename!
if(newDestSubDoc != null) {
if(!newDestSubDoc.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " for resource " + newDestSubDoc.getURI() + " by " + getSubject().getName());
}
} else {
if(!dest.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getSubject().getName());
}
}
}
}
for(Iterator<XmldbURI> itSrcSubColUri = src.collectionIterator(this); itSrcSubColUri.hasNext();) {
final XmldbURI srcSubColUri = itSrcSubColUri.next();
final Collection srcSubCol = getCollection(src.getURI().append(srcSubColUri));
checkPermissionsForCopy(srcSubCol, newDestUri);
}
}
/* (non-Javadoc)
* @see org.exist.storage.DBBroker#copyCollection(org.exist.storage.txn.Txn, org.exist.collections.Collection, org.exist.collections.Collection, org.exist.xmldb.XmldbURI)
*/
@Override
public void copyCollection(final Txn transaction, final Collection collection, final Collection destination, final XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
//TODO : resolve URIs !!!
if(newName != null && newName.numSegments() != 1) {
throw new PermissionDeniedException("New collection name must have one segment!");
}
final XmldbURI srcURI = collection.getURI();
final XmldbURI dstURI = destination.getURI().append(newName);
if(collection.getURI().equals(dstURI)) {
throw new PermissionDeniedException("Cannot move collection to itself '"+collection.getURI()+"'.");
}
if(collection.getId() == destination.getId()) {
throw new PermissionDeniedException("Cannot move collection to itself '"+collection.getURI()+"'.");
}
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
final Lock lock = collectionsDb.getLock();
try {
pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_COPY_COLLECTION, collection.getURI());
lock.acquire(Lock.WRITE_LOCK);
final XmldbURI parentName = collection.getParentURI();
final Collection parent = parentName == null ? collection : getCollection(parentName);
final CollectionTriggersVisitor triggersVisitor = parent.getConfiguration(this).getCollectionTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeCopyCollection(this, transaction, collection, dstURI);
//atomically check all permissions in the tree to ensure a copy operation will succeed before starting copying
checkPermissionsForCopy(collection, destination.getURI());
Collection newCollection = doCopyCollection(transaction, collection, destination, newName);
triggersVisitor.afterCopyCollection(this, transaction, newCollection, srcURI);
} finally {
lock.release(Lock.WRITE_LOCK);
pool.getProcessMonitor().endJob();
}
}
}
private Collection doCopyCollection(final Txn transaction, final Collection collection, final Collection destination, XmldbURI newName) throws PermissionDeniedException, IOException, EXistException, TriggerException, LockException {
if(newName == null)
newName = collection.getURI().lastSegment();
newName = destination.getURI().append(newName);
if (LOG.isDebugEnabled())
LOG.debug("Copying collection to '" + newName + "'");
final Collection destCollection = getOrCreateCollection(transaction, newName);
for(Iterator<DocumentImpl> i = collection.iterator(this); i.hasNext(); ) {
final DocumentImpl child = i.next();
if (LOG.isDebugEnabled())
LOG.debug("Copying resource: '" + child.getURI() + "'");
XmldbURI newUri = destCollection.getURI().append(child.getFileURI());
pool.getDocumentTrigger().beforeCopyDocument(this, transaction, child, newUri);
DocumentImpl createdDoc;
if (child.getResourceType() == DocumentImpl.XML_FILE) {
//TODO : put a lock on newDoc ?
final DocumentImpl newDoc = new DocumentImpl(pool, destCollection, child.getFileURI());
newDoc.copyOf(child);
newDoc.setDocId(getNextResourceId(transaction, destination));
copyXMLResource(transaction, child, newDoc);
storeXMLResource(transaction, newDoc);
destCollection.addDocument(transaction, this, newDoc);
createdDoc = newDoc;
} else {
final BinaryDocument newDoc = new BinaryDocument(pool, destCollection, child.getFileURI());
newDoc.copyOf(child);
newDoc.setDocId(getNextResourceId(transaction, destination));
InputStream is = null;
try {
is = getBinaryResource((BinaryDocument)child);
storeBinaryResource(transaction,newDoc,is);
} finally {
is.close();
}
storeXMLResource(transaction, newDoc);
destCollection.addDocument(transaction, this, newDoc);
createdDoc = newDoc;
}
pool.getDocumentTrigger().afterCopyDocument(this, transaction, createdDoc, child.getURI());
}
saveCollection(transaction, destCollection);
final XmldbURI name = collection.getURI();
for(Iterator<XmldbURI> i = collection.collectionIterator(this); i.hasNext(); ) {
final XmldbURI childName = i.next();
//TODO : resolve URIs ! collection.getURI().resolve(childName)
final Collection child = openCollection(name.append(childName), Lock.WRITE_LOCK);
if(child == null) {
LOG.warn("Child collection '" + childName + "' not found");
} else {
try {
doCopyCollection(transaction, child, destCollection, childName);
} finally {
child.release(Lock.WRITE_LOCK);
}
}
}
saveCollection(transaction, destCollection);
saveCollection(transaction, destination);
return destCollection;
}
@Override
public void moveCollection(Txn transaction, Collection collection, Collection destination, XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
if(newName != null && newName.numSegments() != 1) {
throw new PermissionDeniedException("New collection name must have one segment!");
}
if(collection.getId() == destination.getId()) {
throw new PermissionDeniedException("Cannot move collection to itself '"+collection.getURI()+"'.");
}
if(collection.getURI().equals(destination.getURI().append(newName))) {
throw new PermissionDeniedException("Cannot move collection to itself '"+collection.getURI()+"'.");
}
if(collection.getURI().equals(XmldbURI.ROOT_COLLECTION_URI)) {
throw new PermissionDeniedException("Cannot move the db root collection");
}
final XmldbURI parentName = collection.getParentURI();
final Collection parent = parentName == null ? collection : getCollection(parentName);
if(!parent.getPermissions().validate(getSubject(), Permission.WRITE | Permission.EXECUTE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection " + parent.getURI() + " to move collection " + collection.getURI());
}
if(!collection.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection to move collection " + collection.getURI());
}
if(!destination.getPermissions().validate(getSubject(), Permission.WRITE | Permission.EXECUTE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection " + parent.getURI() + " to move collection " + collection.getURI());
}
/*
* If replacing another collection in the move i.e. /db/col1/A -> /db/col2 (where /db/col2/A exists)
* we have to make sure the permissions to remove /db/col2/A are okay!
*
* So we must call removeCollection on /db/col2/A
* Which will ensure that collection can be removed and then remove it.
*/
final XmldbURI movedToCollectionUri = destination.getURI().append(newName);
final Collection existingMovedToCollection = getCollection(movedToCollectionUri);
if(existingMovedToCollection != null) {
removeCollection(transaction, existingMovedToCollection);
}
pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_MOVE_COLLECTION, collection.getURI());
try {
final XmldbURI srcURI = collection.getURI();
final XmldbURI dstURI = destination.getURI().append(newName);
final CollectionTriggersVisitor triggersVisitor = parent.getConfiguration(this).getCollectionTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeMoveCollection(this, transaction, collection, dstURI);
// sourceDir must be known in advance, because once moveCollectionRecursive
// is called, both collection and destination can point to the same resource
final File fsSourceDir = getCollectionFile(fsDir, collection.getURI(),false);
// Need to move each collection in the source tree individually, so recurse.
moveCollectionRecursive(transaction, collection, destination, newName);
// For binary resources, though, just move the top level directory and all descendants come with it.
moveBinaryFork(transaction, fsSourceDir, destination, newName);
triggersVisitor.afterMoveCollection(this, transaction, collection, srcURI);
} finally {
pool.getProcessMonitor().endJob();
}
}
private void moveBinaryFork(Txn transaction, File sourceDir, Collection destination, XmldbURI newName) throws IOException {
final File targetDir = getCollectionFile(fsDir,destination.getURI().append(newName),false);
if (sourceDir.exists()) {
if(targetDir.exists()) {
final File targetDelDir = getCollectionFile(fsBackupDir,transaction,destination.getURI().append(newName),true);
targetDelDir.getParentFile().mkdirs();
if (targetDir.renameTo(targetDelDir)) {
Loggable loggable = new RenameBinaryLoggable(this,transaction,targetDir,targetDelDir);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
LOG.fatal("Cannot rename "+targetDir+" to "+targetDelDir);
}
}
targetDir.getParentFile().mkdirs();
if (sourceDir.renameTo(targetDir)) {
Loggable loggable = new RenameBinaryLoggable(this,transaction,sourceDir,targetDir);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
LOG.fatal("Cannot move "+sourceDir+" to "+targetDir);
}
}
}
private void moveCollectionRecursive(Txn transaction, Collection collection, Collection destination, XmldbURI newName) throws PermissionDeniedException, IOException, LockException, TriggerException {
final XmldbURI uri = collection.getURI();
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
final XmldbURI srcURI = collection.getURI();
final XmldbURI dstURI = destination.getURI().append(newName);
pool.getCollectionTrigger().beforeMoveCollection(this, transaction, collection, dstURI);
final XmldbURI parentName = collection.getParentURI();
final Collection parent = openCollection(parentName, Lock.WRITE_LOCK);
if(parent != null) {
try {
//TODO : resolve URIs
parent.removeCollection(this, uri.lastSegment());
} finally {
parent.release(Lock.WRITE_LOCK);
}
}
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
collectionsCache.remove(collection);
final Value key = new CollectionStore.CollectionKey(uri.toString());
collectionsDb.remove(transaction, key);
//TODO : resolve URIs destination.getURI().resolve(newName)
collection.setPath(destination.getURI().append(newName));
collection.setCreationTime(System.currentTimeMillis());
destination.addCollection(this, collection, false);
if(parent != null) {
saveCollection(transaction, parent);
}
if(parent != destination) {
saveCollection(transaction, destination);
}
saveCollection(transaction, collection);
//} catch (ReadOnlyException e) {
//throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
} finally {
lock.release(Lock.WRITE_LOCK);
}
pool.getCollectionTrigger().afterMoveCollection(this, transaction, collection, srcURI);
for(Iterator<XmldbURI> i = collection.collectionIterator(this); i.hasNext(); ) {
final XmldbURI childName = i.next();
//TODO : resolve URIs !!! name.resolve(childName)
final Collection child = openCollection(uri.append(childName), Lock.WRITE_LOCK);
if(child == null) {
LOG.warn("Child collection " + childName + " not found");
} else {
try {
moveCollectionRecursive(transaction, child, collection, childName);
} finally {
child.release(Lock.WRITE_LOCK);
}
}
}
}
}
/**
* Removes a collection and all child collections and resources
*
* We first traverse down the Collection tree to ensure that the Permissions
* enable the Collection Tree to be removed. We then return back up the Collection
* tree, removing each child as we progresses upwards.
*
* @param transaction the transaction to use
* @param collection the collection to remove
* @return true if the collection was removed, false otherwise
* @throws TriggerException
*/
@Override
public boolean removeCollection(final Txn transaction, Collection collection) throws PermissionDeniedException, IOException, TriggerException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
final XmldbURI parentName = collection.getParentURI();
final boolean isRoot = parentName == null;
final Collection parent = isRoot ? collection : getCollection(parentName);
//parent collection permissions
if(!parent.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
if(!parent.getPermissions().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
//this collection permissions
if(!collection.getPermissions().validate(getSubject(), Permission.READ)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
if(!collection.isEmpty(this)) {
if(!collection.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
if(!collection.getPermissions().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'");
}
}
try {
pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_COLLECTION, collection.getURI());
pool.getCollectionTrigger().beforeDeleteCollection(this, transaction, collection);
final CollectionTriggersVisitor triggersVisitor = parent.getConfiguration(this).getCollectionTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeDeleteCollection(this, transaction, collection);
long start = System.currentTimeMillis();
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
final XmldbURI uri = collection.getURI();
final String collName = uri.getRawCollectionPath();
// Notify the collection configuration manager
pool.getConfigurationManager().invalidateAll(uri);
if(LOG.isDebugEnabled()) {
LOG.debug("Removing children collections from their parent '" + collName + "'...");
}
for(Iterator<XmldbURI> i = collection.collectionIterator(this); i.hasNext();) {
final XmldbURI childName = i.next();
//TODO : resolve from collection's base URI
//TODO : resulve URIs !!! (uri.resolve(childName))
Collection childCollection = openCollection(uri.append(childName), Lock.WRITE_LOCK);
try {
removeCollection(transaction, childCollection);
} finally {
if (childCollection != null) {
childCollection.getLock().release(Lock.WRITE_LOCK);
} else {
LOG.warn("childCollection is null !");
}
}
}
//Drop all index entries
notifyDropIndex(collection);
// Drop custom indexes
indexController.removeCollection(collection, this);
if(!isRoot) {
// remove from parent collection
//TODO : resolve URIs ! (uri.resolve(".."))
Collection parentCollection = openCollection(collection.getParentURI(), Lock.WRITE_LOCK);
// keep the lock for the transaction
if(transaction != null) {
transaction.registerLock(parentCollection.getLock(), Lock.WRITE_LOCK);
}
if(parentCollection != null) {
try {
LOG.debug("Removing collection '" + collName + "' from its parent...");
//TODO : resolve from collection's base URI
parentCollection.removeCollection(this, uri.lastSegment());
saveCollection(transaction, parentCollection);
} catch(LockException e) {
LOG.warn("LockException while removing collection '" + collName + "'");
}
finally {
if(transaction == null){
parentCollection.getLock().release(Lock.WRITE_LOCK);
}
}
}
}
//Update current state
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
// remove the metadata of all documents in the collection
Value docKey = new CollectionStore.DocumentKey(collection.getId());
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, docKey);
collectionsDb.removeAll(transaction, query);
// if this is not the root collection remove it...
if(!isRoot) {
Value key = new CollectionStore.CollectionKey(collName);
//... from the disk
collectionsDb.remove(transaction, key);
//... from the cache
collectionsCache.remove(collection);
//and free its id for any futher use
freeCollectionId(transaction, collection.getId());
} else {
//Simply save the collection on disk
//It will remain cached
//and its id well never be made available
saveCollection(transaction, collection);
}
}
catch(LockException e) {
LOG.warn("Failed to acquire lock on '" + collectionsDb.getFile().getName() + "'");
}
//catch(ReadOnlyException e) {
//throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
//}
catch(BTreeException e) {
LOG.warn("Exception while removing collection: " + e.getMessage(), e);
}
catch(IOException e) {
LOG.warn("Exception while removing collection: " + e.getMessage(), e);
}
finally {
lock.release(Lock.WRITE_LOCK);
}
//Remove child resources
if (LOG.isDebugEnabled()) {
LOG.debug("Removing resources in '" + collName + "'...");
}
for(Iterator<DocumentImpl> i = collection.iterator(this); i.hasNext();) {
final DocumentImpl doc = i.next();
pool.getDocumentTrigger().beforeDeleteDocument(this, transaction, doc);
//Remove doc's metadata
// WM: now removed in one step. see above.
//removeResourceMetadata(transaction, doc);
//Remove document nodes' index entries
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
Value ref = new NodeRef(doc.getDocId());
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
domDb.remove(transaction, query, null);
} catch(BTreeException e) {
LOG.warn("btree error while removing document", e);
} catch(IOException e) {
LOG.warn("io error while removing document", e);
}
catch(TerminatedException e) {
LOG.warn("method terminated", e);
}
return null;
}
}.run();
//Remove nodes themselves
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
if(doc.getResourceType() == DocumentImpl.BINARY_FILE) {
long page = ((BinaryDocument)doc).getPage();
if (page > Page.NO_PAGE)
domDb.removeOverflowValue(transaction, page);
} else {
StoredNode node = (StoredNode)doc.getFirstChild();
domDb.removeAll(transaction, node.getInternalAddress());
}
return null;
}
}.run();
pool.getDocumentTrigger().afterDeleteDocument(this, transaction, doc.getURI());
//Make doc's id available again
freeResourceId(transaction, doc.getDocId());
}
//now that the database has been updated, update the binary collections on disk
final File fsSourceDir = getCollectionFile(fsDir,collection.getURI(),false);
final File fsTargetDir = getCollectionFile(fsBackupDir,transaction,collection.getURI(),true);
// remove child binary collections
if (fsSourceDir.exists()) {
fsTargetDir.getParentFile().mkdirs();
//XXX: log first, rename second ??? -shabanovd
if(fsSourceDir.renameTo(fsTargetDir)) {
final Loggable loggable = new RenameBinaryLoggable(this,transaction,fsSourceDir,fsTargetDir);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
//XXX: throw IOException -shabanovd
LOG.fatal("Cannot rename "+fsSourceDir+" to "+fsTargetDir);
}
}
if(LOG.isDebugEnabled()) {
LOG.debug("Removing collection '" + collName + "' took " + (System.currentTimeMillis() - start));
}
triggersVisitor.afterDeleteCollection(this, transaction, collection.getURI());
pool.getCollectionTrigger().afterDeleteCollection(this, transaction, collection.getURI());
return true;
}
} finally {
pool.getProcessMonitor().endJob();
}
}
/**
* Saves the specified collection to storage. Collections are usually cached in
* memory. If a collection is modified, this method needs to be called to make
* the changes persistent.
*
* Note: appending a new document to a collection does not require a save.
*
* @throws PermissionDeniedException
* @throws IOException
* @throws TriggerException
*/
@Override
public void saveCollection(Txn transaction, Collection collection) throws PermissionDeniedException, IOException, TriggerException {
if (collection == null) {
LOG.error("NativeBroker.saveCollection called with collection == null! Aborting.");
return;
}
if (pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
if (!pool.isInitializing()) {
// don't cache the collection during initialization: SecurityManager is not yet online
pool.getCollectionsCache().add(collection);
}
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
if(collection.getId() == Collection.UNKNOWN_COLLECTION_ID) {
collection.setId(getNextCollectionId(transaction));
}
Value name = new CollectionStore.CollectionKey(collection.getURI().toString());
final VariableByteOutputStream ostream = new VariableByteOutputStream(8);
collection.write(this, ostream);
final long addr = collectionsDb.put(transaction, name, ostream.data(), true);
if (addr == BFile.UNKNOWN_ADDRESS) {
//TODO : exception !!! -pb
LOG.warn("could not store collection data for '" + collection.getURI()+ "'");
return;
}
collection.setAddress(addr);
ostream.close();
} catch (ReadOnlyException e) {
LOG.warn(DATABASE_IS_READ_ONLY);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
/**
* Release the collection id assigned to a collection so it can be
* reused later.
*
* @param id
* @throws PermissionDeniedException
*/
protected void freeCollectionId(Txn transaction, int id) throws PermissionDeniedException {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.FREE_COLLECTION_ID_KEY);
Value value = collectionsDb.get(key);
if (value != null) {
byte[] data = value.getData();
byte[] ndata = new byte[data.length + Collection.LENGTH_COLLECTION_ID];
System.arraycopy(data, 0, ndata, OFFSET_VALUE, data.length);
ByteConversion.intToByte(id, ndata, OFFSET_COLLECTION_ID);
collectionsDb.put(transaction, key, ndata, true);
} else {
byte[] data = new byte[Collection.LENGTH_COLLECTION_ID];
ByteConversion.intToByte(id, data, OFFSET_COLLECTION_ID);
collectionsDb.put(transaction, key, data, true);
}
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
//TODO : rethrow ? -pb
//} catch (ReadOnlyException e) {
//throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
/**
* Get the next free collection id. If a collection is removed, its collection id
* is released so it can be reused.
*
* @return next free collection id.
* @throws ReadOnlyException
*/
public int getFreeCollectionId(Txn transaction) throws ReadOnlyException {
int freeCollectionId = Collection.UNKNOWN_COLLECTION_ID;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.FREE_COLLECTION_ID_KEY);
Value value = collectionsDb.get(key);
if (value != null) {
byte[] data = value.getData();
freeCollectionId = ByteConversion.byteToInt(data, data.length - Collection.LENGTH_COLLECTION_ID);
//LOG.debug("reusing collection id: " + freeCollectionId);
if(data.length - Collection.LENGTH_COLLECTION_ID > 0) {
byte[] ndata = new byte[data.length - Collection.LENGTH_COLLECTION_ID];
System.arraycopy(data, 0, ndata, OFFSET_COLLECTION_ID, ndata.length);
collectionsDb.put(transaction, key, ndata, true);
} else {
collectionsDb.remove(transaction, key);
}
}
return freeCollectionId;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
return Collection.UNKNOWN_COLLECTION_ID;
//TODO : rethrow ? -pb
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
/**
* Get the next available unique collection id.
*
* @return next available unique collection id
* @throws ReadOnlyException
*/
public int getNextCollectionId(Txn transaction) throws ReadOnlyException {
int nextCollectionId = getFreeCollectionId(transaction);
if (nextCollectionId != Collection.UNKNOWN_COLLECTION_ID)
return nextCollectionId;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.NEXT_COLLECTION_ID_KEY);
Value data = collectionsDb.get(key);
if (data != null) {
nextCollectionId = ByteConversion.byteToInt(data.getData(), OFFSET_COLLECTION_ID);
++nextCollectionId;
}
byte[] d = new byte[Collection.LENGTH_COLLECTION_ID];
ByteConversion.intToByte(nextCollectionId, d, OFFSET_COLLECTION_ID);
collectionsDb.put(transaction, key, d, true);
return nextCollectionId;
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
return Collection.UNKNOWN_COLLECTION_ID;
//TODO : rethrow ? -pb
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
@Override
public void reindexCollection(XmldbURI collectionName) throws PermissionDeniedException {
if (pool.isReadOnly())
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
collectionName = prepend(collectionName.toCollectionPathURI());
Collection collection = getCollection(collectionName);
if (collection == null) {
LOG.debug("collection " + collectionName + " not found!");
return;
}
reindexCollection(collection, NodeProcessor.MODE_STORE);
}
public void reindexCollection(Collection collection, int mode) throws PermissionDeniedException {
TransactionManager transact = pool.getTransactionManager();
Txn transaction = transact.beginTransaction();
try {
pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_REINDEX_COLLECTION, collection.getURI());
reindexCollection(transaction, collection, mode);
transact.commit(transaction);
} catch (TransactionException e) {
transact.abort(transaction);
LOG.warn("An error occurred during reindex: " + e.getMessage(), e);
} finally {
pool.getProcessMonitor().endJob();
}
}
public void reindexCollection(Txn transaction, Collection collection, int mode) throws PermissionDeniedException {
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
if (!collection.getPermissions().validate(getSubject(), Permission.WRITE))
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection " + collection.getURI());
LOG.debug("Reindexing collection " + collection.getURI());
if (mode == NodeProcessor.MODE_STORE)
dropCollectionIndex(transaction, collection);
for(Iterator<DocumentImpl> i = collection.iterator(this); i.hasNext(); ) {
DocumentImpl next = i.next();
reindexXMLResource(transaction, next, mode);
}
for(Iterator<XmldbURI> i = collection.collectionIterator(this); i.hasNext(); ) {
XmldbURI next = i.next();
//TODO : resolve URIs !!! (collection.getURI().resolve(next))
Collection child = getCollection(collection.getURI().append(next));
if(child == null)
LOG.warn("Collection '" + next + "' not found");
else {
reindexCollection(transaction, child, mode);
}
}
}
}
public void dropCollectionIndex(final Txn transaction, Collection collection) throws PermissionDeniedException {
if (pool.isReadOnly())
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
if (!collection.getPermissions().validate(getSubject(), Permission.WRITE))
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on collection " +collection.getURI());
notifyDropIndex(collection);
indexController.removeCollection(collection, this);
for (Iterator<DocumentImpl> i = collection.iterator(this); i.hasNext();) {
final DocumentImpl doc = i.next();
LOG.debug("Dropping index for document " + doc.getFileURI());
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
Value ref = new NodeRef(doc.getDocId());
IndexQuery query =
new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
domDb.remove(transaction, query, null);
domDb.flush();
} catch (BTreeException e) {
LOG.warn("btree error while removing document", e);
} catch (DBException e) {
LOG.warn("db error while removing document", e);
} catch (IOException e) {
LOG.warn("io error while removing document", e);
} catch (TerminatedException e) {
LOG.warn("method terminated", e);
}
return null;
}
}
.run();
}
}
/** Store into the temporary collection of the database a given in-memory Document
*
* The in-memory Document is stored without a transaction and is not journalled,
* if there is no temporary collection, this will first be created with a transaction
*
* @param doc The in-memory Document to store
* @return The document stored in the temp collection
*/
@Override
public DocumentImpl storeTempResource(org.exist.memtree.DocumentImpl doc)
throws EXistException, PermissionDeniedException, LockException {
//store the currentUser
Subject currentUser = getSubject();
//elevate getUser() to DBA_USER
setSubject(pool.getSecurityManager().getSystemSubject() );
//start a transaction
TransactionManager transact = pool.getTransactionManager();
Txn transaction = transact.beginTransaction();
//create a name for the temporary document
XmldbURI docName = XmldbURI.create(MessageDigester.md5(Thread.currentThread().getName() + Long.toString(System.currentTimeMillis()),false) + ".xml");
//get the temp collection
Collection temp = openCollection(XmldbURI.TEMP_COLLECTION_URI, Lock.WRITE_LOCK);
boolean created = false;
try {
//if no temp collection
if(temp == null) {
//creates temp collection (with write lock)
temp = createTempCollection(transaction);
if(temp == null) {
LOG.warn("Failed to create temporary collection");
//TODO : emergency exit?
}
created = true;
}
//create a temporary document
DocumentImpl targetDoc = new DocumentImpl(pool, temp, docName);
targetDoc.getPermissions().setMode(Permission.DEFAULT_TEMPORARY_DOCUMENT_PERM);
long now = System.currentTimeMillis();
DocumentMetadata metadata = new DocumentMetadata();
metadata.setLastModified(now);
metadata.setCreated(now);
targetDoc.setMetadata(metadata);
targetDoc.setDocId(getNextResourceId(transaction, temp));
//index the temporary document
DOMIndexer indexer = new DOMIndexer(this, transaction, doc, targetDoc); //NULL transaction, so temporary fragment is not journalled - AR
indexer.scan();
indexer.store();
//store the temporary document
temp.addDocument(transaction, this, targetDoc); //NULL transaction, so temporary fragment is not journalled - AR
// unlock the temp collection
if(transaction == null)
temp.getLock().release(Lock.WRITE_LOCK);
else if (!created)
transaction.registerLock(temp.getLock(), Lock.WRITE_LOCK);
//NULL transaction, so temporary fragment is not journalled - AR
storeXMLResource(transaction, targetDoc);
flush();
closeDocument();
//commit the transaction
transact.commit(transaction);
return targetDoc;
} catch (Exception e) {
LOG.warn("Failed to store temporary fragment: " + e.getMessage(), e);
//abort the transaction
transact.abort(transaction);
}
finally {
//restore the user
setUser(currentUser);
}
return null;
}
/** remove all documents from temporary collection
*
* @param forceRemoval Should temporary resources be forcefully removed
*/
@Override
public void cleanUpTempResources(boolean forceRemoval) throws PermissionDeniedException {
Collection temp = getCollection(XmldbURI.TEMP_COLLECTION_URI);
if(temp == null)
return;
TransactionManager transact = pool.getTransactionManager();
Txn transaction = transact.beginTransaction();
try {
removeCollection(transaction, temp);
transact.commit(transaction);
} catch(Exception e) {
transact.abort(transaction);
LOG.warn("Failed to remove temp collection: " + e.getMessage(), e);
}
}
@Override
public DocumentImpl getResourceById(int collectionId, byte resourceType, int documentId) throws PermissionDeniedException {
XmldbURI uri = null;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
//final VariableByteOutputStream ostream = new VariableByteOutputStream(8);
//doc.write(ostream);
//Value key = new CollectionStore.DocumentKey(doc.getCollection().getId(), doc.getResourceType(), doc.getDocId());
//collectionsDb.put(transaction, key, ostream.data(), true);
//Value collectionKey = new CollectionStore.CollectionKey
//collectionsDb.get(Value.EMPTY_VALUE)
//get the collection uri
String collectionUri = null;
if(collectionId == 0) {
collectionUri = "/db";
} else {
for(Value collectionDbKey : collectionsDb.getKeys()) {
if(collectionDbKey.data()[0] == CollectionStore.KEY_TYPE_COLLECTION) {
//Value collectionDbValue = collectionsDb.get(collectionDbKey);
VariableByteInput vbi = collectionsDb.getAsStream(collectionDbKey);
int id = vbi.readInt();
//check if the collection id matches (first 4 bytes)
if(collectionId == id) {
collectionUri = new String(Arrays.copyOfRange(collectionDbKey.data(), 1, collectionDbKey.data().length));
break;
}
}
}
}
//get the resource uri
Value key = new CollectionStore.DocumentKey(collectionId, resourceType, documentId);
VariableByteInput vbi = collectionsDb.getAsStream(key);
vbi.readInt(); //skip doc id
final String resourceUri = vbi.readUTF();
//get the resource
uri = XmldbURI.createInternal(collectionUri + "/" + resourceUri);
} catch (TerminatedException te) {
LOG.error("Query Terminated", te);
return null;
} catch (BTreeException bte) {
LOG.error("Problem reading btree", bte);
return null;
} catch (LockException e) {
LOG.error("Failed to acquire lock on " + collectionsDb.getFile().getName());
return null;
} catch (IOException e) {
LOG.error("IOException while reading recource data", e);
return null;
} finally {
lock.release(Lock.READ_LOCK);
}
return getResource(uri, Permission.READ);
}
/** store Document entry into its collection. */
@Override
public void storeXMLResource(final Txn transaction, final DocumentImpl doc) {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
final VariableByteOutputStream ostream = new VariableByteOutputStream(8);
doc.write(ostream);
Value key = new CollectionStore.DocumentKey(doc.getCollection().getId(), doc.getResourceType(), doc.getDocId());
collectionsDb.put(transaction, key, ostream.data(), true);
//} catch (ReadOnlyException e) {
//LOG.warn(DATABASE_IS_READ_ONLY);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while writing document data", e);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
public void storeMetadata(final Txn transaction, final DocumentImpl doc) throws TriggerException {
Collection col = doc.getCollection();
DocumentTriggersVisitor triggersVisitor = null;
if(col.isTriggersEnabled()) {
triggersVisitor = col.getConfiguration(this).getDocumentTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeUpdateDocumentMetadata(this, transaction, doc);
}
storeXMLResource(transaction, doc);
if (triggersVisitor != null)
triggersVisitor.afterUpdateDocumentMetadata(this, transaction, doc);
}
private File getCollectionFile(File dir,XmldbURI uri,boolean create) throws IOException {
return getCollectionFile(dir,null,uri,create);
}
public File getCollectionBinaryFileFsPath(XmldbURI uri) {
return new File(fsDir, uri.getURI().toString());
}
private File getCollectionFile(File dir,Txn transaction,XmldbURI uri,boolean create)
throws IOException {
if (transaction!=null) {
dir = new File(dir,"txn."+transaction.getId());
if (create && !dir.exists()) {
if (!dir.mkdir()) {
throw new IOException("Cannot make transaction filesystem directory: "+dir);
}
}
}
XmldbURI [] segments = uri.getPathSegments();
File binFile = dir;
int last = segments.length-1;
for (int i=0; i<segments.length; i++) {
binFile = new File(binFile,segments[i].toString());
if (create && i!=last && !binFile.exists()) {
if (!binFile.mkdir()) {
throw new IOException("Cannot make collection filesystem directory: "+binFile);
}
}
}
return binFile;
}
@Deprecated
@Override
public void storeBinaryResource(final Txn transaction, final BinaryDocument blob, final byte[] data)
throws IOException {
blob.setPage(Page.NO_PAGE);
File binFile = getCollectionFile(fsDir,blob.getURI(),true);
File backupFile = null;
boolean exists = binFile.exists();
if (exists) {
backupFile = getCollectionFile(fsBackupDir,transaction,blob.getURI(),true);
if (!binFile.renameTo(backupFile)) {
throw new IOException("Cannot backup binary resource for journal to "+backupFile);
}
}
OutputStream os = new FileOutputStream(binFile);
os.write(data,0,data.length);
os.close();
if (exists) {
Loggable loggable = new UpdateBinaryLoggable(this,transaction,binFile,backupFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
Loggable loggable = new CreateBinaryLoggable(this,transaction,binFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
}
}
@Override
public void storeBinaryResource(final Txn transaction, final BinaryDocument blob, final InputStream is)
throws IOException {
blob.setPage(Page.NO_PAGE);
File binFile = getCollectionFile(fsDir,blob.getURI(),true);
File backupFile = null;
boolean exists = binFile.exists();
if (exists) {
backupFile = getCollectionFile(fsBackupDir,transaction,blob.getURI(),true);
if (!binFile.renameTo(backupFile)) {
throw new IOException("Cannot backup binary resource for journal to "+backupFile);
}
}
byte [] buffer = new byte[65536];
OutputStream os = new FileOutputStream(binFile);
int len;
while ((len = is.read(buffer))>=0) {
if (len>0) {
os.write(buffer,0,len);
}
}
os.close();
if (exists) {
Loggable loggable = new UpdateBinaryLoggable(this,transaction,binFile,backupFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
Loggable loggable = new CreateBinaryLoggable(this,transaction,binFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
}
}
public Document getXMLResource(XmldbURI fileName) throws PermissionDeniedException {
return getResource(fileName, Permission.READ);
}
/**
* get a document by its file name. The document's file name is used to
* identify a document.
*
*@param fileName absolute file name in the database;
*name can be given with or without the leading path /db/shakespeare.
*@return The document value
*@exception PermissionDeniedException
*/
@Override
public DocumentImpl getResource(XmldbURI fileName, int accessType) throws PermissionDeniedException {
fileName = prepend(fileName.toCollectionPathURI());
//TODO : resolve URIs !!!
XmldbURI collUri = fileName.removeLastSegment();
XmldbURI docUri = fileName.lastSegment();
Collection collection = getCollection(collUri);
if (collection == null) {
LOG.debug("collection '" + collUri + "' not found!");
return null;
}
if(!collection.getPermissions().validate(getSubject(), Permission.READ)) {
throw new PermissionDeniedException("Permission denied to read collection '" + collUri + "' by " + getSubject().getName());
}
DocumentImpl doc = collection.getDocument(this, docUri);
if (doc == null) {
LOG.debug("document '" + fileName + "' not found!");
return null;
}
if(!doc.getPermissions().validate(getSubject(), accessType)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' not allowed requested access to document '" + fileName + "'");
}
if (doc.getResourceType() == DocumentImpl.BINARY_FILE) {
BinaryDocument bin = (BinaryDocument)doc;
try {
bin.setContentLength(getBinaryResourceSize(bin));
} catch (IOException ex) {
LOG.fatal("Cannot get content size for "+bin.getURI(),ex);
}
}
return doc;
}
@Override
public DocumentImpl getXMLResource(XmldbURI fileName, int lockMode) throws PermissionDeniedException {
if(fileName==null) {
return null;
}
fileName = prepend(fileName.toCollectionPathURI());
//TODO : resolve URIs !
XmldbURI collUri = fileName.removeLastSegment();
XmldbURI docUri = fileName.lastSegment();
Collection collection = openCollection(collUri, lockMode);
if (collection == null) {
LOG.debug("collection '" + collUri + "' not found!");
return null;
}
try {
if (!collection.getPermissions().validate(getSubject(), Permission.READ))
throw new PermissionDeniedException("Permission denied to read collection '" + collUri + "' by " + getSubject().getName());
DocumentImpl doc = collection.getDocumentWithLock(this, docUri, lockMode);
if (doc == null) {
//LOG.debug("document '" + fileName + "' not found!");
return null;
}
//if (!doc.getMode().validate(getUser(), Permission.READ))
//throw new PermissionDeniedException("not allowed to read document");
if (doc.getResourceType() == DocumentImpl.BINARY_FILE) {
BinaryDocument bin = (BinaryDocument)doc;
try {
bin.setContentLength(getBinaryResourceSize(bin));
} catch (IOException ex) {
LOG.fatal("Cannot get content size for "+bin.getURI(),ex);
}
}
return doc;
} catch (LockException e) {
LOG.warn("Could not acquire lock on document " + fileName, e);
//TODO : exception ? -pb
} finally {
//TOUNDERSTAND : by whom is this lock acquired ? -pb
// If we don't check for the NO_LOCK we'll pop someone else's lock off
if(lockMode != Lock.NO_LOCK)
collection.release(lockMode);
}
return null;
}
@Override
public void readBinaryResource(final BinaryDocument blob, final OutputStream os)
throws IOException {
InputStream is = null;
try {
is = getBinaryResource(blob);
byte [] buffer = new byte[655360];
int len;
while ((len=is.read(buffer))>=0) {
os.write(buffer,0,len);
}
} finally {
if(is != null)
is.close();
}
}
@Override
public long getBinaryResourceSize(final BinaryDocument blob)
throws IOException {
File binFile = getCollectionFile(fsDir,blob.getURI(),false);
return binFile.length();
}
@Override
public InputStream getBinaryResource(final BinaryDocument blob)
throws IOException {
File binFile = getCollectionFile(fsDir,blob.getURI(),false);
return new FileInputStream(binFile);
}
//TODO : consider a better cooperation with Collection -pb
@Override
public void getCollectionResources(Collection.InternalAccess collectionInternalAccess) {
final Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.DocumentKey(collectionInternalAccess.getId());
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, key);
collectionsDb.query(query, new DocumentCallback(collectionInternalAccess));
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while reading document data", e);
} catch (BTreeException e) {
LOG.warn("Exception while reading document data", e);
} catch (TerminatedException e) {
LOG.warn("Exception while reading document data", e);
} finally {
lock.release(Lock.READ_LOCK);
}
}
@Override
public void getResourcesFailsafe(BTreeCallback callback, boolean fullScan) throws TerminatedException {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.DocumentKey();
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, key);
if (fullScan)
collectionsDb.rawScan(query, callback);
else
collectionsDb.query(query, callback);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while reading document data", e);
} catch (BTreeException e) {
LOG.warn("Exception while reading document data", e);
} finally {
lock.release(Lock.READ_LOCK);
}
}
@Override
public void getCollectionsFailsafe(BTreeCallback callback) throws TerminatedException {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.CollectionKey();
IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, key);
collectionsDb.query(query, callback);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while reading document data", e);
} catch (BTreeException e) {
LOG.warn("Exception while reading document data", e);
} finally {
lock.release(Lock.READ_LOCK);
}
}
/**
* Get all the documents in this database matching the given
* document-type's name.
* @return The documentsByDoctype value
*/
@Override
public MutableDocumentSet getXMLResourcesByDoctype(String doctypeName, MutableDocumentSet result) throws PermissionDeniedException {
MutableDocumentSet docs = getAllXMLResources(new DefaultDocumentSet());
for (Iterator<DocumentImpl> i = docs.getDocumentIterator(); i.hasNext();) {
DocumentImpl doc = i.next();
DocumentType doctype = doc.getDoctype();
if (doctype == null)
continue;
if (doctypeName.equals(doctype.getName())
&& doc.getCollection().getPermissions().validate(getSubject(), Permission.READ)
&& doc.getPermissions().validate(getSubject(), Permission.READ))
result.add(doc);
}
return result;
}
/**
* Adds all the documents in the database to the specified DocumentSet.
*
* @param docs a (possibly empty) document set to which the found
* documents are added.
*/
@Override
public MutableDocumentSet getAllXMLResources(MutableDocumentSet docs) throws PermissionDeniedException {
long start = System.currentTimeMillis();
Collection rootCollection = null;
try {
rootCollection = openCollection(XmldbURI.ROOT_COLLECTION_URI, Lock.READ_LOCK);
rootCollection.allDocs(this, docs, true);
if (LOG.isDebugEnabled()) {
LOG.debug("getAllDocuments(DocumentSet) - end - "
+ "loading "
+ docs.getDocumentCount()
+ " documents took "
+ (System.currentTimeMillis() - start)
+ "ms.");
}
return docs;
} finally {
if (rootCollection != null)
rootCollection.release(Lock.READ_LOCK);
}
}
//TODO : consider a better cooperation with Collection -pb
@Override
public void getResourceMetadata(DocumentImpl document) {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
Value key = new CollectionStore.DocumentKey(document.getCollection().getId(), document.getResourceType(), document.getDocId());
VariableByteInput istream = collectionsDb.getAsStream(key);
if(istream != null) {
document.readDocumentMeta(istream);
}
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} catch (IOException e) {
LOG.warn("IOException while reading document data", e);
} finally {
lock.release(Lock.READ_LOCK);
}
}
/**
* @param doc src document
* @param destination destination collection
* @param newName the new name for the document
*/
@Override
public void copyResource(Txn transaction, DocumentImpl doc, Collection destination, XmldbURI newName) throws PermissionDeniedException, LockException, EXistException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
Collection collection = doc.getCollection();
if(!collection.getPermissions().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' has insufficient privileges to copy the resource '" + doc.getFileURI() + "'.");
}
if(!doc.getPermissions().validate(getSubject(), Permission.READ)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' has insufficient privileges to copy the resource '" + doc.getFileURI() + "'.");
}
if(newName == null) {
newName = doc.getFileURI();
}
final CollectionCache collectionsCache = pool.getCollectionsCache();
synchronized(collectionsCache) {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
DocumentImpl oldDoc = destination.getDocument(this, newName);
if(!destination.getPermissions().validate(getSubject(), Permission.EXECUTE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' does not have execute access on the destination collection '" + destination.getURI() + "'.");
}
if(!destination.hasChildCollection(this, newName.lastSegment())) {
throw new EXistException(
"The collection '" + destination.getURI() + "' already has a sub-collection named '" + newName.lastSegment() + "', you cannot create a Document with the same name as an existing collection."
);
}
final XmldbURI newURI = destination.getURI().append(newName);
final XmldbURI oldUri = doc.getURI();
if(oldDoc == null) {
if(!destination.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Account '" + getSubject().getName() + "' does not have write access on the destination collection '" + destination.getURI() + "'.");
}
} else {
//overwrite existing document
if(doc.getDocId() == oldDoc.getDocId()){
throw new EXistException("Cannot copy resource to itself '" +doc.getURI() + "'.");
}
if(!oldDoc.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("A resource with the same name already exists in the target collection '" + oldDoc.getURI() + "', and you do not have write access on that resource.");
}
getDatabase().getDocumentTrigger().beforeDeleteDocument(this, transaction, oldDoc);
getDatabase().getDocumentTrigger().afterDeleteDocument(this, transaction, newURI);
}
getDatabase().getDocumentTrigger().beforeCopyDocument(this, transaction, doc, newURI);
final DocumentTriggersVisitor triggersVisitor = collection.getConfiguration(this).getDocumentTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeCopyDocument(this, transaction, doc, newURI);
DocumentImpl newDocument = null;
if (doc.getResourceType() == DocumentImpl.BINARY_FILE) {
InputStream is = null;
try {
is = getBinaryResource((BinaryDocument) doc);
newDocument = destination.addBinaryResource(transaction, this, newName, is, doc.getMetadata().getMimeType(),-1);
} finally {
if(is != null)
is.close();
}
} else {
DocumentImpl newDoc = new DocumentImpl(pool, destination, newName);
newDoc.copyOf(doc);
newDoc.setDocId(getNextResourceId(transaction, destination));
newDoc.setPermissions(doc.getPermissions());
newDoc.getUpdateLock().acquire(Lock.WRITE_LOCK);
try {
copyXMLResource(transaction, doc, newDoc);
destination.addDocument(transaction, this, newDoc);
storeXMLResource(transaction, newDoc);
} finally {
newDoc.getUpdateLock().release(Lock.WRITE_LOCK);
}
newDocument = newDoc;
}
getDatabase().getDocumentTrigger().afterCopyDocument(this, transaction, newDocument, oldUri);
triggersVisitor.afterCopyDocument(this, transaction, newDocument, oldUri);
} catch (IOException e) {
LOG.warn("An error occurred while copying resource", e);
} catch (TriggerException e) {
throw new PermissionDeniedException(e.getMessage(), e);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
}
private void copyXMLResource(Txn transaction, DocumentImpl oldDoc, DocumentImpl newDoc) {
LOG.debug("Copying document " + oldDoc.getFileURI() + " to " +
newDoc.getURI());
final long start = System.currentTimeMillis();
indexController.setDocument(newDoc, StreamListener.STORE);
StreamListener listener = indexController.getStreamListener();
NodeList nodes = oldDoc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
copyNodes(transaction, iterator, node, new NodePath(), newDoc, false, true, listener);
}
flush();
closeDocument();
LOG.debug("Copy took " + (System.currentTimeMillis() - start) + "ms.");
}
/**
* Move (and/or rename) a Resource to another collection
*
* @param doc source document
* @param destination the destination collection
* @param newName the new name for the resource
*
* @throws TriggerException
*/
@Override
public void moveResource(Txn transaction, DocumentImpl doc, Collection destination, XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
final Account docUser = doc.getUserLock();
if(docUser != null) {
if(!(getSubject().getName()).equals(docUser.getName())) {
throw new PermissionDeniedException("Cannot move '" + doc.getFileURI() + " because is locked by getUser() '" + docUser.getName() + "'");
}
}
final Collection collection = doc.getCollection();
if(!collection.getPermissions().validate(getSubject(), Permission.WRITE | Permission.EXECUTE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on source Collection to move resource " + doc.getFileURI());
}
//must be owner of have execute access for the rename
if(!((doc.getPermissions().getOwner().getId() != getSubject().getId()) | (doc.getPermissions().validate(getSubject(), Permission.EXECUTE)))) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on destination Collection to move resource " + doc.getFileURI());
}
if(!destination.getPermissions().validate(getSubject(), Permission.WRITE | Permission.EXECUTE)) {
throw new PermissionDeniedException("Account "+getSubject().getName()+" have insufficient privileges on destination Collection to move resource " + doc.getFileURI());
}
/* Copy reference to original document */
final File fsOriginalDocument = getCollectionFile(fsDir, doc.getURI(), true);
final XmldbURI oldName = doc.getFileURI();
if(newName == null) {
newName = oldName;
}
try {
if(!destination.hasChildCollection(this, newName.lastSegment())) {
throw new PermissionDeniedException(
"The collection '" + destination.getURI() + "' have collection '" + newName.lastSegment() + "'. " +
"Document with same name can't be created."
);
}
// check if the move would overwrite a collection
//TODO : resolve URIs : destination.getURI().resolve(newName)
final DocumentImpl oldDoc = destination.getDocument(this, newName);
if(oldDoc != null) {
if(doc.getDocId() == oldDoc.getDocId()) {
throw new PermissionDeniedException("Cannot move resource to itself '"+doc.getURI()+"'.");
}
// GNU mv command would prompt for Confirmation here, you can say yes or pass the '-f' flag. As we cant prompt for confirmation we assume OK
/* if(!oldDoc.getPermissions().validate(getSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Resource with same name exists in target collection and write is denied");
}
*/
pool.getDocumentTrigger().beforeDeleteDocument(this, transaction, oldDoc);
pool.getDocumentTrigger().afterDeleteDocument(this, transaction, oldDoc.getURI());
}
boolean renameOnly = collection.getId() == destination.getId();
final XmldbURI oldURI = doc.getURI();
final XmldbURI newURI = destination.getURI().append(newName);
pool.getDocumentTrigger().beforeMoveDocument(this, transaction, doc, newURI);
final DocumentTriggersVisitor triggersVisitor = collection.getConfiguration(this).getDocumentTriggerProxies().instantiateVisitor(this);
triggersVisitor.beforeMoveDocument(this, transaction, doc, newURI);
collection.unlinkDocument(this, doc);
removeResourceMetadata(transaction, doc);
doc.setFileURI(newName);
if(doc.getResourceType() == DocumentImpl.XML_FILE) {
if(!renameOnly) {
//XXX: BUG: doc have new uri here!
dropIndex(transaction, doc);
saveCollection(transaction, collection);
}
doc.setCollection(destination);
destination.addDocument(transaction, this, doc);
if(!renameOnly) {
// reindexing
reindexXMLResource(transaction, doc, NodeProcessor.MODE_REPAIR);
}
} else {
// binary resource
doc.setCollection(destination);
destination.addDocument(transaction, this, doc);
final File colDir = getCollectionFile(fsDir,destination.getURI(),true);
final File binFile = new File(colDir,newName.lastSegment().toString());
final File sourceFile = getCollectionFile(fsDir,doc.getURI(),false);
/* Create required directories */
binFile.getParentFile().mkdirs();
/* Rename original file to new location */
if(fsOriginalDocument.renameTo(binFile)) {
final Loggable loggable = new RenameBinaryLoggable(this,transaction,sourceFile,binFile);
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
} else {
LOG.fatal("Cannot rename "+sourceFile+" to "+binFile+" for journaling of binary resource move.");
}
}
storeXMLResource(transaction, doc);
saveCollection(transaction, destination);
pool.getDocumentTrigger().afterMoveDocument(this, transaction, doc, oldURI);
triggersVisitor.afterMoveDocument(this, transaction, doc, oldURI);
} catch (ReadOnlyException e) {
throw new PermissionDeniedException(e.getMessage(), e);
}
}
@Override
public void removeXMLResource(final Txn transaction, final DocumentImpl document, boolean freeDocId) throws PermissionDeniedException {
if (pool.isReadOnly())
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
try {
if (LOG.isInfoEnabled()) {
LOG.info("Removing document " + document.getFileURI() +
" (" + document.getDocId() + ") ...");
}
if (freeDocId) {
pool.getDocumentTrigger().beforeDeleteDocument(this, transaction, document);
}
dropIndex(transaction, document);
if (LOG.isDebugEnabled()) {
LOG.debug("removeDocument() - removing dom");
}
if (!document.getMetadata().isReferenced()) {
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
StoredNode node = (StoredNode)document.getFirstChild();
domDb.removeAll(transaction, node.getInternalAddress());
return null;
}
}.run();
}
NodeRef ref = new NodeRef(document.getDocId());
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
domDb.remove(transaction, idx, null);
} catch (BTreeException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (IOException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (TerminatedException e) {
LOG.warn("method terminated", e);
}
return null;
}
}.run();
removeResourceMetadata(transaction, document);
if (freeDocId) {
freeResourceId(transaction, document.getDocId());
pool.getDocumentTrigger().afterDeleteDocument(this, transaction, document.getURI());
}
} catch (ReadOnlyException e) {
LOG.warn("removeDocument(String) - " + DATABASE_IS_READ_ONLY);
} catch (TriggerException e) {
LOG.warn(e);
}
}
private void dropIndex(Txn transaction, DocumentImpl document) throws ReadOnlyException {
indexController.setDocument(document, StreamListener.REMOVE_ALL_NODES);
StreamListener listener = indexController.getStreamListener();
NodeList nodes = document.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
scanNodes(transaction, iterator, node, new NodePath(), NodeProcessor.MODE_REMOVE, listener);
}
notifyDropIndex(document);
indexController.flush();
}
@Override
public void removeBinaryResource(final Txn transaction, final BinaryDocument blob) throws PermissionDeniedException,IOException {
if(pool.isReadOnly()) {
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
}
if(LOG.isDebugEnabled()) {
LOG.debug("removing binary resource " + blob.getDocId() + "...");
}
File binFile = getCollectionFile(fsDir,blob.getURI(),false);
if (binFile.exists()) {
File binBackupFile = getCollectionFile(fsBackupDir, transaction, blob.getURI(), true);
Loggable loggable = new RenameBinaryLoggable(this, transaction, binFile, binBackupFile);
if (!binFile.renameTo(binBackupFile)) {
// Workaround for Java bug 6213298 - renameTo() sometimes doesn't work
// See http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6213298
System.gc();
try {
Thread.sleep(50);
} catch (Exception e) {
//ignore
}
if (!binFile.renameTo(binBackupFile)) {
throw new IOException("Cannot move file " + binFile
+ " for delete journal to " + binBackupFile);
}
}
try {
logManager.writeToLog(loggable);
} catch (TransactionException e) {
LOG.warn(e.getMessage(), e);
}
}
removeResourceMetadata(transaction, blob);
getIndexController().setDocument(blob, StreamListener.REMOVE_BINARY);
getIndexController().flush();
}
/**
* @param transaction
* @param document
*/
private void removeResourceMetadata(final Txn transaction, final DocumentImpl document) {
// remove document metadata
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.READ_LOCK);
if (LOG.isDebugEnabled())
LOG.debug("Removing resource metadata for " + document.getDocId());
Value key = new CollectionStore.DocumentKey(document.getCollection().getId(), document.getResourceType(), document.getDocId());
collectionsDb.remove(transaction, key);
//} catch (ReadOnlyException e) {
//LOG.warn(DATABASE_IS_READ_ONLY);
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
} finally {
lock.release(Lock.READ_LOCK);
}
}
/**
* Release the document id reserved for a document so it
* can be reused.
*
* @param id
* @throws PermissionDeniedException
*/
protected void freeResourceId(Txn transaction, int id) throws PermissionDeniedException {
if (incrementalDocIds)
return;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.FREE_DOC_ID_KEY);
Value value = collectionsDb.get(key);
if (value != null) {
byte[] data = value.getData();
byte[] ndata = new byte[data.length + 4];
System.arraycopy(data, 0, ndata, 4, data.length);
ByteConversion.intToByte(id, ndata, 0);
collectionsDb.put(transaction, key, ndata, true);
} else {
byte[] data = new byte[4];
ByteConversion.intToByte(id, data, 0);
collectionsDb.put(transaction, key, data, true);
}
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
//TODO : rethrow ? -pb
//} catch (ReadOnlyException e) {
//throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
} finally {
lock.release(Lock.WRITE_LOCK);
}
}
/**
* Get the next unused document id. If a document is removed, its doc id is
* released, so it can be reused.
*
* @return Next unused document id
* @throws ReadOnlyException
*/
public int getFreeResourceId(Txn transaction) throws ReadOnlyException {
int freeDocId = DocumentImpl.UNKNOWN_DOCUMENT_ID;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.FREE_DOC_ID_KEY);
Value value = collectionsDb.get(key);
if (value != null) {
byte[] data = value.getData();
freeDocId = ByteConversion.byteToInt(data, data.length - 4);
//LOG.debug("reusing document id: " + freeDocId);
if(data.length - 4 > 0) {
byte[] ndata = new byte[data.length - 4];
System.arraycopy(data, 0, ndata, 0, ndata.length);
collectionsDb.put(transaction, key, ndata, true);
} else {
collectionsDb.remove(transaction, key);
}
}
//TODO : maybe something ? -pb
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
return DocumentImpl.UNKNOWN_DOCUMENT_ID;
//TODO : rethrow ? -pb
} finally {
lock.release(Lock.WRITE_LOCK);
}
return freeDocId;
}
/** get next Free Doc Id
* @throws EXistException If there's no free document id */
@Override
public int getNextResourceId(Txn transaction, Collection collection) throws EXistException {
int nextDocId;
try {
nextDocId = getFreeResourceId(transaction);
} catch (ReadOnlyException e) {
//TODO : rethrow ? -pb
return 1;
}
if (nextDocId != DocumentImpl.UNKNOWN_DOCUMENT_ID)
return nextDocId;
nextDocId = 1;
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
Value key = new CollectionStore.CollectionKey(CollectionStore.NEXT_DOC_ID_KEY);
Value data = collectionsDb.get(key);
if (data != null) {
nextDocId = ByteConversion.byteToInt(data.getData(), 0);
++nextDocId;
if (nextDocId == 0x7FFFFFFF) {
pool.setReadOnly();
throw new EXistException("Max. number of document ids reached. Database is set to " +
"read-only state. Please do a complete backup/restore to compact the db and " +
"free document ids.");
}
}
byte[] d = new byte[4];
ByteConversion.intToByte(nextDocId, d, 0);
collectionsDb.put(transaction, key, d, true);
//} catch (ReadOnlyException e) {
//LOG.warn("Database is read-only");
//return DocumentImpl.UNKNOWN_DOCUMENT_ID;
//TODO : rethrow ? -pb
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
//TODO : rethrow ? -pb
} finally {
lock.release(Lock.WRITE_LOCK);
}
return nextDocId;
}
/**
* Reindex the nodes in the document. This method will either reindex all
* descendant nodes of the passed node, or all nodes below some level of
* the document if node is null.
*/
private void reindexXMLResource(Txn transaction, DocumentImpl doc, int mode) {
if(doc.isCollectionConfig())
doc.getCollection().setCollectionConfigEnabled(false);
indexController.setDocument(doc, StreamListener.STORE);
StreamListener listener = indexController.getStreamListener();
NodeList nodes = doc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
scanNodes(transaction, iterator, node, new NodePath(), mode, listener);
}
flush();
if(doc.isCollectionConfig())
doc.getCollection().setCollectionConfigEnabled(true);
}
@Override
public void defragXMLResource(final Txn transaction, final DocumentImpl doc) {
//TODO : use dedicated function in XmldbURI
LOG.debug("============> Defragmenting document " +
doc.getCollection().getURI() + "/" + doc.getFileURI());
final long start = System.currentTimeMillis();
try {
final long firstChild = doc.getFirstChildAddress();
// dropping old structure index
dropIndex(transaction, doc);
// dropping dom index
NodeRef ref = new NodeRef(doc.getDocId());
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
domDb.remove(transaction, idx, null);
domDb.flush();
} catch (BTreeException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (IOException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (TerminatedException e) {
LOG.warn("method terminated", e);
} catch (DBException e) {
LOG.warn("start() - " + "error while removing doc", e);
}
return null;
}
}.run();
// create a copy of the old doc to copy the nodes into it
DocumentImpl tempDoc = new DocumentImpl(pool, doc.getCollection(), doc.getFileURI());
tempDoc.copyOf(doc);
tempDoc.setDocId(doc.getDocId());
indexController.setDocument(doc, StreamListener.STORE);
StreamListener listener = indexController.getStreamListener();
// copy the nodes
NodeList nodes = doc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
copyNodes(transaction, iterator, node, new NodePath(), tempDoc, true, true, listener);
}
flush();
// remove the old nodes
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
domDb.removeAll(transaction, firstChild);
try {
domDb.flush();
} catch (DBException e) {
LOG.warn("start() - " + "error while removing doc", e);
}
return null;
}
}.run();
doc.copyChildren(tempDoc);
doc.getMetadata().setSplitCount(0);
doc.getMetadata().setPageCount(tempDoc.getMetadata().getPageCount());
storeXMLResource(transaction, doc);
closeDocument();
LOG.debug("Defragmentation took " + (System.currentTimeMillis() - start) + "ms.");
} catch (ReadOnlyException e) {
LOG.warn(DATABASE_IS_READ_ONLY, e);
}
}
/** consistency Check of the database; useful after XUpdates;
* called if xupdate.consistency-checks is true in configuration */
@Override
public void checkXMLResourceConsistency(DocumentImpl doc) throws EXistException {
boolean xupdateConsistencyChecks = false;
Object property = pool.getConfiguration().getProperty(PROPERTY_XUPDATE_CONSISTENCY_CHECKS);
if (property != null)
xupdateConsistencyChecks = ((Boolean) property).booleanValue();
if(xupdateConsistencyChecks) {
LOG.debug("Checking document " + doc.getFileURI());
checkXMLResourceTree(doc);
}
}
/** consistency Check of the database; useful after XUpdates;
* called by {@link #checkXMLResourceConsistency(DocumentImpl)} */
@Override
public void checkXMLResourceTree(final DocumentImpl doc) {
LOG.debug("Checking DOM tree for document " + doc.getFileURI());
boolean xupdateConsistencyChecks = false;
Object property = pool.getConfiguration().getProperty(PROPERTY_XUPDATE_CONSISTENCY_CHECKS);
if (property != null)
xupdateConsistencyChecks = ((Boolean) property).booleanValue();
if(xupdateConsistencyChecks) {
new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() throws ReadOnlyException {
LOG.debug("Pages used: " + domDb.debugPages(doc, false));
return null;
}
}.run();
NodeList nodes = doc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
StoredNode node = (StoredNode) nodes.item(i);
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
StringBuilder buf = new StringBuilder();
//Pass buf to the following method to get a dump of all node ids in the document
if (!checkNodeTree(iterator, node, buf)) {
LOG.debug("node tree: " + buf.toString());
throw new RuntimeException("Error in document tree structure");
}
}
NodeRef ref = new NodeRef(doc.getDocId());
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() {
try {
domDb.findKeys(idx);
} catch (BTreeException e) {
LOG.warn("start() - " + "error while removing doc", e);
} catch (IOException e) {
LOG.warn("start() - " + "error while removing doc", e);
}
return null;
}
}.run();
}
}
/**
* Store a node into the database. This method is called by the parser to
* write a node to the storage backend.
*
*@param node the node to be stored
*@param currentPath path expression which points to this node's
* element-parent or to itself if it is an element (currently used by
* the Broker to determine if a node's content should be
* fulltext-indexed). @param index switch to activate fulltext indexation
*/
@Override
public void storeNode(final Txn transaction, final StoredNode node, NodePath currentPath, IndexSpec indexSpec, boolean fullTextIndex) {
checkAvailableMemory();
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
final short nodeType = node.getNodeType();
final byte data[] = node.serialize();
new DOMTransaction(this, domDb, Lock.WRITE_LOCK, doc) {
@Override
public Object start() throws ReadOnlyException {
long address;
if (nodeType == Node.TEXT_NODE
|| nodeType == Node.ATTRIBUTE_NODE
|| nodeType == Node.CDATA_SECTION_NODE
|| node.getNodeId().getTreeLevel() > defaultIndexDepth)
address = domDb.add(transaction, data);
else {
address = domDb.put(transaction, new NodeRef(doc.getDocId(), node.getNodeId()), data);
}
if (address == BFile.UNKNOWN_ADDRESS)
LOG.warn("address is missing");
//TODO : how can we continue here ? -pb
node.setInternalAddress(address);
return null;
}
}.run();
++nodesCount;
ByteArrayPool.releaseByteArray(data);
nodeProcessor.reset(transaction, node, currentPath, indexSpec, fullTextIndex);
nodeProcessor.doIndex();
}
@Override
public void updateNode(final Txn transaction, final StoredNode node, boolean reindex) {
try {
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
final long internalAddress = node.getInternalAddress();
final byte[] data = node.serialize();
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() throws ReadOnlyException {
if (StorageAddress.hasAddress(internalAddress))
domDb.update(transaction, internalAddress, data);
else {
domDb.update(transaction, new NodeRef(doc.getDocId(), node.getNodeId()), data);
}
return null;
}
}.run();
ByteArrayPool.releaseByteArray(data);
} catch (Exception e) {
Value oldVal = domDb.get(node.getInternalAddress());
StoredNode old = StoredNode.deserialize(oldVal.data(),
oldVal.start(), oldVal.getLength(),
(DocumentImpl)node.getOwnerDocument(), false);
LOG.warn(
"Exception while storing "
+ node.getNodeName()
+ "; gid = "
+ node.getNodeId()
+ "; old = " + old.getNodeName(),
e);
}
}
/**
* Physically insert a node into the DOM storage.
*/
@Override
public void insertNodeAfter(final Txn transaction, final StoredNode previous, final StoredNode node) {
final byte data[] = node.serialize();
final DocumentImpl doc = (DocumentImpl)previous.getOwnerDocument();
new DOMTransaction(this, domDb, Lock.WRITE_LOCK, doc) {
@Override
public Object start() {
long address = previous.getInternalAddress();
if (address != BFile.UNKNOWN_ADDRESS) {
address = domDb.insertAfter(transaction, doc, address, data);
} else {
NodeRef ref = new NodeRef(doc.getDocId(), previous.getNodeId());
address = domDb.insertAfter(transaction, doc, ref, data);
}
node.setInternalAddress(address);
return null;
}
}.run();
}
private void copyNodes(Txn transaction, Iterator<StoredNode> iterator, StoredNode node,
NodePath currentPath, DocumentImpl newDoc, boolean defrag, boolean index,
StreamListener listener) {
copyNodes(transaction, iterator, node, currentPath, newDoc, defrag, index, listener, null);
}
private void copyNodes(Txn transaction, Iterator<StoredNode> iterator, StoredNode node,
NodePath currentPath, DocumentImpl newDoc, boolean defrag, boolean index,
StreamListener listener, NodeId oldNodeId) {
if (node.getNodeType() == Node.ELEMENT_NODE)
currentPath.addComponent(node.getQName());
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
final long oldAddress = node.getInternalAddress();
node.setOwnerDocument(newDoc);
node.setInternalAddress(BFile.UNKNOWN_ADDRESS);
storeNode(transaction, node, currentPath, null, index);
if (defrag && oldNodeId != null)
pool.getNotificationService().notifyMove(oldNodeId, node);
if (node.getNodeType() == Node.ELEMENT_NODE) {
//save old value, whatever it is
long address = node.getInternalAddress();
node.setInternalAddress(oldAddress);
endElement(node, currentPath, null);
//restore old value, whatever it was
node.setInternalAddress(address);
node.setDirty(false);
}
if (node.getNodeId().getTreeLevel() == 1)
newDoc.appendChild(node);
node.setOwnerDocument(doc);
if (listener != null) {
switch (node.getNodeType()) {
case Node.TEXT_NODE :
listener.characters(transaction, (TextImpl) node, currentPath);
break;
case Node.ELEMENT_NODE :
listener.startElement(transaction, (ElementImpl) node, currentPath);
break;
case Node.ATTRIBUTE_NODE :
listener.attribute(transaction, (AttrImpl) node, currentPath);
break;
case Node.COMMENT_NODE :
case Node.PROCESSING_INSTRUCTION_NODE :
break;
default :
LOG.debug("Unhandled node type: " + node.getNodeType());
}
}
if (node.hasChildNodes()) {
int count = node.getChildCount();
NodeId nodeId = node.getNodeId();
for (int i = 0; i < count; i++) {
StoredNode child = iterator.next();
oldNodeId = child.getNodeId();
if (defrag) {
if (i == 0)
nodeId = nodeId.newChild();
else
nodeId = nodeId.nextSibling();
child.setNodeId(nodeId);
}
copyNodes(transaction, iterator, child, currentPath, newDoc, defrag, index, listener, oldNodeId);
}
}
if(node.getNodeType() == Node.ELEMENT_NODE) {
if (listener != null)
listener.endElement(transaction, (ElementImpl) node, currentPath);
currentPath.removeLastComponent();
}
}
/** Removes the Node Reference from the database.
* The index will be updated later, i.e. after all nodes have been physically
* removed. See {@link #endRemove(org.exist.storage.txn.Txn)}.
* removeNode() just adds the node ids to the list in elementIndex
* for later removal.
*/
@Override
public void removeNode(final Txn transaction, final StoredNode node, NodePath currentPath,
String content) {
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
new DOMTransaction(this, domDb, Lock.WRITE_LOCK, doc) {
@Override
public Object start() {
final long address = node.getInternalAddress();
if (StorageAddress.hasAddress(address))
domDb.remove(transaction, new NodeRef(doc.getDocId(), node.getNodeId()), address);
else
domDb.remove(transaction, new NodeRef(doc.getDocId(), node.getNodeId()));
return null;
}
}.run();
notifyRemoveNode(node, currentPath, content);
NodeProxy p = new NodeProxy(node);
QName qname;
switch (node.getNodeType()) {
case Node.ELEMENT_NODE :
qname = node.getQName();
qname.setNameType(ElementValue.ELEMENT);
GeneralRangeIndexSpec spec1 = doc.getCollection().getIndexByPathConfiguration(this, currentPath);
if(spec1 != null) {
valueIndex.setDocument(doc);
valueIndex.storeElement((ElementImpl) node, content, spec1.getType(), NativeValueIndex.IDX_GENERIC, false);
}
QNameRangeIndexSpec qnSpec = doc.getCollection().getIndexByQNameConfiguration(this, qname);
if (qnSpec != null) {
valueIndex.setDocument(doc);
valueIndex.storeElement((ElementImpl) node, content, qnSpec.getType(),
NativeValueIndex.IDX_QNAME, false);
}
break;
case Node.ATTRIBUTE_NODE :
qname = node.getQName();
qname.setNameType(ElementValue.ATTRIBUTE);
currentPath.addComponent(qname);
//Strange : does it mean that the node is added 2 times under 2 different identities ?
AttrImpl attr;
attr = (AttrImpl) node;
switch(attr.getType()) {
case AttrImpl.ID:
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, attr.getValue(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.ID, NativeValueIndex.IDX_GENERIC, false);
break;
case AttrImpl.IDREF:
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, attr.getValue(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.IDREF, NativeValueIndex.IDX_GENERIC, false);
break;
case AttrImpl.IDREFS:
valueIndex.setDocument(doc);
StringTokenizer tokenizer = new StringTokenizer(attr.getValue(), " ");
while (tokenizer.hasMoreTokens()) {
valueIndex.storeAttribute(attr, tokenizer.nextToken(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.IDREF, NativeValueIndex.IDX_GENERIC, false);
}
break;
default:
// do nothing special
}
RangeIndexSpec spec2 = doc.getCollection().getIndexByPathConfiguration(this, currentPath);
if(spec2 != null) {
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, null, NativeValueIndex.WITHOUT_PATH, spec2, false);
}
qnSpec = doc.getCollection().getIndexByQNameConfiguration(this, qname);
if (qnSpec != null) {
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, null, NativeValueIndex.WITHOUT_PATH, qnSpec, false);
}
currentPath.removeLastComponent();
break;
case Node.TEXT_NODE :
break;
}
}
@Override
public void removeAllNodes(Txn transaction, StoredNode node, NodePath currentPath,
StreamListener listener) {
Iterator<StoredNode> iterator = getNodeIterator(node);
iterator.next();
Stack<RemovedNode> stack = new Stack<RemovedNode>();
collectNodesForRemoval(transaction, stack, iterator, listener, node, currentPath);
while (!stack.isEmpty()) {
RemovedNode next = stack.pop();
removeNode(transaction, next.node, next.path, next.content);
}
}
private void collectNodesForRemoval(Txn transaction, Stack<RemovedNode> stack,
Iterator<StoredNode> iterator, StreamListener listener, StoredNode node, NodePath currentPath) {
RemovedNode removed;
switch (node.getNodeType()) {
case Node.ELEMENT_NODE:
DocumentImpl doc = node.getDocument();
String content = null;
GeneralRangeIndexSpec spec = doc.getCollection().getIndexByPathConfiguration(this, currentPath);
if (spec != null) {
content = getNodeValue(node, false);
} else {
QNameRangeIndexSpec qnIdx = doc.getCollection().getIndexByQNameConfiguration(this, node.getQName());
if (qnIdx != null) {
content = getNodeValue(node, false);
}
}
removed = new RemovedNode(node, new NodePath(currentPath), content);
stack.push(removed);
if (listener != null) {
listener.startElement(transaction, (ElementImpl) node, currentPath);
}
if (node.hasChildNodes()) {
int childCount = node.getChildCount();
for (int i = 0; i < childCount; i++) {
StoredNode child = iterator.next();
if (child.getNodeType() == Node.ELEMENT_NODE)
currentPath.addComponent(child.getQName());
collectNodesForRemoval(transaction, stack, iterator, listener, child, currentPath);
if (child.getNodeType() == Node.ELEMENT_NODE)
currentPath.removeLastComponent();
}
}
if (listener != null) {
listener.endElement(transaction, (ElementImpl) node, currentPath);
}
break;
case Node.TEXT_NODE :
if (listener != null) {
listener.characters(transaction, (TextImpl) node, currentPath);
}
break;
case Node.ATTRIBUTE_NODE :
if (listener != null) {
listener.attribute(transaction, (AttrImpl) node, currentPath);
}
break;
}
if (node.getNodeType() != Node.ELEMENT_NODE) {
removed = new RemovedNode(node, new NodePath(currentPath), null);
stack.push(removed);
}
}
/**
* Index a single node, which has been added through an XUpdate
* operation. This method is only called if inserting the node is possible
* without changing the node identifiers of sibling or parent nodes. In other
* cases, reindex will be called.
*/
@Override
public void indexNode(Txn transaction, StoredNode node, NodePath currentPath) {
indexNode(transaction, node, currentPath, NodeProcessor.MODE_STORE);
}
public void indexNode(final Txn transaction, final StoredNode node, NodePath currentPath, int repairMode) {
nodeProcessor.reset(transaction, node, currentPath, null, true);
nodeProcessor.setMode(repairMode);
nodeProcessor.index();
}
private boolean checkNodeTree(Iterator<StoredNode> iterator, StoredNode node, StringBuilder buf) {
if (buf != null) {
if (buf.length() > 0)
buf.append(", ");
buf.append(node.getNodeId());
}
boolean docIsValid = true;
if (node.hasChildNodes()) {
int count = node.getChildCount();
if (buf != null)
buf.append('[').append(count).append(']');
StoredNode previous = null;
for (int i = 0; i < count; i++) {
StoredNode child = iterator.next();
if (i > 0 && !(child.getNodeId().isSiblingOf(previous.getNodeId()) &&
child.getNodeId().compareTo(previous.getNodeId()) > 0)) {
LOG.fatal("node " + child.getNodeId() + " cannot be a sibling of " + previous.getNodeId() +
"; node read from " + StorageAddress.toString(child.getInternalAddress()));
docIsValid = false;
}
previous = child;
if(child == null) {
LOG.fatal("child " + i + " not found for node: " + node.getNodeName() +
": " + node.getNodeId() + "; children = " + node.getChildCount());
docIsValid = false;
//TODO : emergency exit ?
}
NodeId parentId = child.getNodeId().getParentId();
if (!parentId.equals(node.getNodeId())) {
LOG.fatal(child.getNodeId() + " is not a child of " + node.getNodeId());
docIsValid = false;
}
boolean check = checkNodeTree(iterator, child, buf);
if (docIsValid)
docIsValid = check;
}
}
return docIsValid;
}
/**
* Called by reindex to walk through all nodes in the tree and reindex them
* if necessary.
*
* @param iterator
* @param node
* @param currentPath
*/
private void scanNodes(Txn transaction, Iterator<StoredNode> iterator, StoredNode node,
NodePath currentPath, int mode, StreamListener listener) {
if (node.getNodeType() == Node.ELEMENT_NODE)
currentPath.addComponent(node.getQName());
indexNode(transaction, node, currentPath, mode);
if (listener != null) {
switch (node.getNodeType()) {
case Node.TEXT_NODE :
case Node.CDATA_SECTION_NODE :
listener.characters(transaction, (CharacterDataImpl) node, currentPath);
break;
case Node.ELEMENT_NODE :
listener.startElement(transaction, (ElementImpl) node, currentPath);
break;
case Node.ATTRIBUTE_NODE :
listener.attribute(transaction, (AttrImpl) node, currentPath);
break;
case Node.COMMENT_NODE :
case Node.PROCESSING_INSTRUCTION_NODE :
break;
default :
LOG.debug("Unhandled node type: " + node.getNodeType());
}
}
if (node.hasChildNodes()) {
final int count = node.getChildCount();
for (int i = 0; i < count; i++) {
StoredNode child = iterator.next();
if (child == null) {
LOG.fatal("child " + i + " not found for node: " + node.getNodeName() +
"; children = " + node.getChildCount());
throw new IllegalStateException("Wrong node id");
}
scanNodes(transaction, iterator, child, currentPath, mode, listener);
}
}
if (node.getNodeType() == Node.ELEMENT_NODE) {
endElement(node, currentPath, null, mode == NodeProcessor.MODE_REMOVE);
if (listener != null)
listener.endElement(transaction, (ElementImpl) node, currentPath);
currentPath.removeLastComponent();
}
}
@Override
public String getNodeValue(final StoredNode node, final boolean addWhitespace) {
return (String) new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() {
return domDb.getNodeValue(NativeBroker.this, node, addWhitespace);
}
}.run();
}
@Override
public StoredNode objectWith(final Document doc, final NodeId nodeId) {
return (StoredNode) new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() {
Value val = domDb.get(NativeBroker.this, new NodeProxy((DocumentImpl) doc, nodeId));
if (val == null) {
if (LOG.isDebugEnabled())
LOG.debug("Node " + nodeId + " not found. This is usually not an error.");
return null;
}
StoredNode node = StoredNode.deserialize(val.getData(), 0, val.getLength(), (DocumentImpl) doc);
node.setOwnerDocument((DocumentImpl)doc);
node.setInternalAddress(val.getAddress());
return node;
}
}.run();
}
@Override
public StoredNode objectWith(final NodeProxy p) {
if (!StorageAddress.hasAddress(p.getInternalAddress()))
return objectWith(p.getDocument(), p.getNodeId());
return (StoredNode) new DOMTransaction(this, domDb, Lock.READ_LOCK) {
@Override
public Object start() {
// DocumentImpl sets the nodeId to DOCUMENT_NODE when it's trying to find its top-level
// children (for which it doesn't persist the actual node ids), so ignore that. Nobody else
// should be passing DOCUMENT_NODE into here.
boolean fakeNodeId = p.getNodeId().equals(NodeId.DOCUMENT_NODE);
Value val = domDb.get(p.getInternalAddress(), false);
if (val == null) {
LOG.debug("Node " + p.getNodeId() + " not found in document " + p.getDocument().getURI() +
"; docId = " + p.getDocument().getDocId() + ": " + StorageAddress.toString(p.getInternalAddress()));
if (fakeNodeId)
return null;
} else {
StoredNode node = StoredNode.deserialize(val.getData(), 0, val.getLength(), p.getDocument());
node.setOwnerDocument((DocumentImpl)p.getOwnerDocument());
node.setInternalAddress(p.getInternalAddress());
if (fakeNodeId) return node;
if (p.getDocument().getDocId() == node.getDocId() &&
p.getNodeId().equals(node.getNodeId())) {
return node;
}
LOG.debug(
"Node " + p.getNodeId() + " not found in document " + p.getDocument().getURI() +
"; docId = " + p.getDocument().getDocId() + ": " + StorageAddress.toString(p.getInternalAddress()) +
"; found node " + node.getNodeId() + " instead"
);
}
// retry based on nodeid
StoredNode node = objectWith(p.getDocument(), p.getNodeId());
if (node != null) p.setInternalAddress(node.getInternalAddress()); // update proxy with correct address
return node;
}
}.run();
}
@Override
public void repair() throws PermissionDeniedException {
if (pool.isReadOnly())
throw new PermissionDeniedException(DATABASE_IS_READ_ONLY);
LOG.info("Removing index files ...");
notifyCloseAndRemove();
try {
pool.getIndexManager().removeIndexes();
} catch (DBException e) {
LOG.warn("Failed to remove index failes during repair: " + e.getMessage(), e);
}
LOG.info("Recreating index files ...");
try {
valueIndex = new NativeValueIndex(this, VALUES_DBX_ID, dataDir, config);
} catch (DBException e) {
LOG.warn("Exception during repair: " + e.getMessage(), e);
}
try {
pool.getIndexManager().reopenIndexes();
} catch (DatabaseConfigurationException e) {
LOG.warn("Failed to reopen index files after repair: " + e.getMessage(), e);
}
initIndexModules();
LOG.info("Reindexing database files ...");
//Reindex from root collection
reindexCollection(null, getCollection(XmldbURI.ROOT_COLLECTION_URI), NodeProcessor.MODE_REPAIR);
}
@Override
public void flush() {
notifyFlush();
try {
pool.getSymbols().flush();
} catch (EXistException e) {
LOG.warn(e);
}
indexController.flush();
nodesCount = 0;
}
@Override
public void sync(int syncEvent) {
if (isReadOnly())
return;
try {
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
try {
domDb.flush();
} catch (DBException e) {
LOG.warn("error while flushing dom.dbx", e);
}
return null;
}
}.run();
if(syncEvent == Sync.MAJOR_SYNC) {
Lock lock = collectionsDb.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
collectionsDb.flush();
} catch (LockException e) {
LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName(), e);
} finally {
lock.release(Lock.WRITE_LOCK);
}
notifySync();
pool.getIndexManager().sync();
NumberFormat nf = NumberFormat.getNumberInstance();
LOGSTATS.info("Memory: " + nf.format(run.totalMemory() / 1024) + "K total; " +
nf.format(run.maxMemory() / 1024) + "K max; " +
nf.format(run.freeMemory() / 1024) + "K free");
domDb.printStatistics();
collectionsDb.printStatistics();
notifyPrintStatistics();
}
} catch (DBException dbe) {
dbe.printStackTrace();
LOG.warn(dbe);
}
}
@Override
public void shutdown() {
try {
flush();
sync(Sync.MAJOR_SYNC);
domDb.close();
collectionsDb.close();
notifyClose();
} catch (Exception e) {
LOG.warn(e.getMessage(), e);
}
super.shutdown();
}
/** check available memory */
@Override
public void checkAvailableMemory() {
if (nodesCountThreshold <= 0) {
if (nodesCount > DEFAULT_NODES_BEFORE_MEMORY_CHECK) {
if (run.totalMemory() >= run.maxMemory() && run.freeMemory() < pool.getReservedMem()) {
NumberFormat nf = NumberFormat.getNumberInstance();
LOG.info("total memory: " + nf.format(run.totalMemory()) +
"; max: " + nf.format(run.maxMemory()) +
"; free: " + nf.format(run.freeMemory()) +
"; reserved: " + nf.format(pool.getReservedMem()) +
"; used: " + nf.format(pool.getCacheManager().getSizeInBytes()));
flush();
System.gc();
}
nodesCount = 0;
}
} else if (nodesCount > nodesCountThreshold) {
flush();
nodesCount = 0;
}
}
//TOUNDERSTAND : why not use shutdown ? -pb
@Override
public void closeDocument() {
new DOMTransaction(this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() {
domDb.closeDocument();
return null;
}
}.run();
}
public final static class NodeRef extends Value {
public static int OFFSET_DOCUMENT_ID = 0;
public static int OFFSET_NODE_ID = OFFSET_DOCUMENT_ID + DocumentImpl.LENGTH_DOCUMENT_ID;
public NodeRef(int docId) {
len = DocumentImpl.LENGTH_DOCUMENT_ID;
data = new byte[len];
ByteConversion.intToByte(docId, data, OFFSET_DOCUMENT_ID);
pos = OFFSET_DOCUMENT_ID;
}
public NodeRef(int docId, NodeId nodeId) {
len = DocumentImpl.LENGTH_DOCUMENT_ID + nodeId.size();
data = new byte[len];
ByteConversion.intToByte(docId, data, OFFSET_DOCUMENT_ID);
nodeId.serialize(data, OFFSET_NODE_ID);
pos = OFFSET_DOCUMENT_ID;
}
int getDocId() {
return ByteConversion.byteToInt(data, OFFSET_DOCUMENT_ID);
}
}
private final static class RemovedNode {
StoredNode node;
String content;
NodePath path;
RemovedNode(StoredNode node, NodePath path, String content) {
this.node = node;
this.path = path;
this.content = content;
}
}
/** Delegate for Node Processings : indexing */
private class NodeProcessor {
final static int MODE_STORE = 0;
final static int MODE_REPAIR = 1;
final static int MODE_REMOVE = 2;
private Txn transaction;
private StoredNode node;
private NodePath currentPath;
/** work variables */
private DocumentImpl doc;
private long address;
private IndexSpec idxSpec;
//private FulltextIndexSpec ftIdx;
private int level;
private int mode = MODE_STORE;
/** overall switch to activate fulltext indexation */
private boolean fullTextIndex = true;
NodeProcessor() {
//ignore
}
public void reset(Txn transaction, StoredNode node, NodePath currentPath, IndexSpec indexSpec, boolean fullTextIndex) {
if (node.getNodeId() == null)
LOG.warn("illegal node: " + node.getNodeName());
//TODO : why continue processing ? return ? -pb
this.transaction = transaction;
this.node = node;
this.currentPath = currentPath;
this.mode = MODE_STORE;
doc = (DocumentImpl) node.getOwnerDocument();
address = node.getInternalAddress();
if (indexSpec == null)
indexSpec = doc.getCollection().getIndexConfiguration(NativeBroker.this);
idxSpec = indexSpec;
//ftIdx = idxSpec == null ? null : idxSpec.getFulltextIndexSpec();
level = node.getNodeId().getTreeLevel();
this.fullTextIndex = fullTextIndex;
}
public void setMode(int mode) {
this.mode = mode;
}
/** Updates the various indices */
public void doIndex() {
//TODO : resolve URI !
final boolean isTemp = XmldbURI.TEMP_COLLECTION_URI.equalsInternal(((DocumentImpl)node.getOwnerDocument()).getCollection().getURI());
int indexType;
switch (node.getNodeType()) {
case Node.ELEMENT_NODE :
//Compute index type
//TODO : let indexers OR it themselves
//we'd need to notify the ElementIndexer at the very end then...
indexType = RangeIndexSpec.NO_INDEX;
if (idxSpec != null && idxSpec.getIndexByPath(currentPath) != null) {
indexType |= idxSpec.getIndexByPath(currentPath).getIndexType();
}
if (idxSpec != null) {
QNameRangeIndexSpec qnIdx = idxSpec.getIndexByQName(node.getQName());
if (qnIdx != null) {
indexType |= RangeIndexSpec.QNAME_INDEX;
if (!RangeIndexSpec.hasRangeIndex(indexType))
indexType |= qnIdx.getIndexType();
}
}
((ElementImpl) node).setIndexType(indexType);
//notifyStartElement((ElementImpl)node, currentPath, fullTextIndex);
break;
case Node.ATTRIBUTE_NODE :
QName qname = node.getQName();
if (currentPath != null)
currentPath.addComponent(qname);
//Compute index type
//TODO : let indexers OR it themselves
//we'd need to notify the ElementIndexer at the very end then...
indexType = RangeIndexSpec.NO_INDEX;
if (idxSpec != null) {
RangeIndexSpec rangeSpec = idxSpec.getIndexByPath(currentPath);
if (rangeSpec != null) {
indexType |= rangeSpec.getIndexType();
}
if (rangeSpec != null) {
valueIndex.setDocument((DocumentImpl)node.getOwnerDocument());
//Oh dear : is it the right semantics then ?
valueIndex.storeAttribute((AttrImpl) node, currentPath,
NativeValueIndex.WITHOUT_PATH,
rangeSpec, mode == MODE_REMOVE);
}
QNameRangeIndexSpec qnIdx = idxSpec.getIndexByQName(node.getQName());
if (qnIdx != null) {
indexType |= RangeIndexSpec.QNAME_INDEX;
if (!RangeIndexSpec.hasRangeIndex(indexType))
indexType |= qnIdx.getIndexType();
valueIndex.setDocument((DocumentImpl)node.getOwnerDocument());
//Oh dear : is it the right semantics then ?
valueIndex.storeAttribute((AttrImpl) node, currentPath, NativeValueIndex.WITHOUT_PATH,
qnIdx, mode == MODE_REMOVE);
}
}
final NodeProxy tempProxy = new NodeProxy(doc, node.getNodeId(), address);
tempProxy.setIndexType(indexType);
qname.setNameType(ElementValue.ATTRIBUTE);
AttrImpl attr = (AttrImpl) node;
attr.setIndexType(indexType);
switch(attr.getType()) {
case AttrImpl.ID:
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, attr.getValue(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.ID, NativeValueIndex.IDX_GENERIC, mode == MODE_REMOVE);
break;
case AttrImpl.IDREF:
valueIndex.setDocument(doc);
valueIndex.storeAttribute(attr, attr.getValue(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.IDREF, NativeValueIndex.IDX_GENERIC, mode == MODE_REMOVE);
break;
case AttrImpl.IDREFS:
valueIndex.setDocument(doc);
StringTokenizer tokenizer = new StringTokenizer(attr.getValue(), " ");
while (tokenizer.hasMoreTokens()) {
valueIndex.storeAttribute(attr, tokenizer.nextToken(), currentPath, NativeValueIndex.WITHOUT_PATH, Type.IDREF, NativeValueIndex.IDX_GENERIC, mode == MODE_REMOVE);
}
break;
default:
// do nothing special
}
if (currentPath != null)
currentPath.removeLastComponent();
break;
case Node.TEXT_NODE:
notifyStoreText( (TextImpl)node, currentPath,
fullTextIndex ? NativeTextEngine.DO_NOT_TOKENIZE : NativeTextEngine.TOKENIZE);
break;
}
}
/** Stores this node into the database, if it's an element */
public void store() {
final DocumentImpl doc = (DocumentImpl)node.getOwnerDocument();
if (mode == MODE_STORE && node.getNodeType() == Node.ELEMENT_NODE && level <= defaultIndexDepth) {
//TODO : used to be this, but NativeBroker.this avoids an owner change
new DOMTransaction(NativeBroker.this, domDb, Lock.WRITE_LOCK) {
@Override
public Object start() throws ReadOnlyException {
try {
domDb.addValue(transaction, new NodeRef(doc.getDocId(), node.getNodeId()), address);
} catch (BTreeException e) {
LOG.warn(EXCEPTION_DURING_REINDEX, e);
} catch (IOException e) {
LOG.warn(EXCEPTION_DURING_REINDEX, e);
}
return null;
}
}.run();
}
}
/** check available memory */
private void checkAvailableMemory() {
if (mode != MODE_REMOVE && nodesCount > DEFAULT_NODES_BEFORE_MEMORY_CHECK) {
if (run.totalMemory() >= run.maxMemory() && run.freeMemory() < pool.getReservedMem()) {
//LOG.info("total memory: " + run.totalMemory() + "; free: " + run.freeMemory());
flush();
System.gc();
LOG.info("total memory: " + run.totalMemory() + "; free: " + run.freeMemory());
}
nodesCount = 0;
}
}
/** Updates the various indices and stores this node into the database */
public void index() {
++nodesCount;
checkAvailableMemory();
doIndex();
store();
}
}
private final class DocumentCallback implements BTreeCallback {
private final Collection.InternalAccess collectionInternalAccess;
private DocumentCallback(final Collection.InternalAccess collectionInternalAccess) {
this.collectionInternalAccess = collectionInternalAccess;
}
@Override
public boolean indexInfo(final Value key, final long pointer) throws TerminatedException {
try {
final byte type = key.data()[key.start() + Collection.LENGTH_COLLECTION_ID + DocumentImpl.LENGTH_DOCUMENT_TYPE];
final VariableByteInput istream = collectionsDb.getAsStream(pointer);
final DocumentImpl doc;
if (type == DocumentImpl.BINARY_FILE) {
doc = new BinaryDocument(pool);
} else {
doc = new DocumentImpl(pool);
}
doc.read(istream);
collectionInternalAccess.addDocument(doc);
} catch (EOFException e) {
LOG.error("EOFException while reading document data", e);
} catch (IOException e) {
LOG.error("IOException while reading document data", e);
} catch(EXistException ee) {
LOG.error("EXistException while reading document data", ee);
}
return true;
}
}
}
| [ignore] typos
svn path=/trunk/eXist/; revision=17839
| src/org/exist/storage/NativeBroker.java | [ignore] typos |
|
Java | lgpl-2.1 | 2933315082218ce1b42de50f58bc1a2964ccf6dc | 0 | RockManJoe64/swingx,RockManJoe64/swingx | /*
* $Id$
*
* Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*/
package org.jdesktop.swingx.util;
import java.awt.Component;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
import java.awt.GridBagConstraints;
import java.awt.Insets;
import java.awt.MouseInfo;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Window;
import javax.swing.JComponent;
import javax.swing.JDesktopPane;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JInternalFrame;
import javax.swing.RootPaneContainer;
import org.jdesktop.swingx.util.Spatial;
/**
* Encapsulates various utilities for windows (ie: <code>Frame</code> and
* <code>Dialog</code> objects and descendants, in particular).
* @author Richard Bair
*/
public final class WindowUtils {
/**
* Hide the constructor - don't wan't anybody creating an instance of this
*/
private WindowUtils() {
}
/**
* <p>
* Returns the <code>Point</code> at which a window should be placed to
* center that window on the screen.
* </p>
* <p>
* Some thought was taken as to whether to implement a method such as this,
* or to simply make a method that, given a window, will center it. It was
* decided that it is better to not alter an object within a method.
* </p>
* @param window The window to calculate the center point for. This object
* can not be null.
* @return the <code>Point</code> at which the window should be placed to
* center that window on the screen.
*/
public static Point getPointForCentering(Window window) {
//assert window != null;
try {
Point mousePoint = MouseInfo.getPointerInfo().getLocation();
GraphicsDevice[] devices = GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices();
for (GraphicsDevice device : devices) {
Rectangle bounds = device.getDefaultConfiguration().getBounds();
//check to see if the mouse cursor is within these bounds
if (mousePoint.x >= bounds.x && mousePoint.y >= bounds.y
&& mousePoint.x <= (bounds.x + bounds.width)
&& mousePoint.y <= (bounds.y + bounds.height)) {
//this is it
int screenWidth = bounds.width;
int screenHeight = bounds.height;
int width = window.getWidth();
int height = window.getHeight();
Point p = new Point(((screenWidth - width) / 2) + bounds.x, ((screenHeight - height) / 2) + bounds.y);
return p;
}
}
} catch (Exception e) {
//this can occur do to a Security exception in sandboxed apps
e.printStackTrace();
}
return new Point(0,0);
}
/**
* <p>
* Returns the <code>Point</code> at which a window should be placed to
* center that window on the given desktop.
* </p>
* <p>
* Some thought was taken as to whether to implement a method such as this,
* or to simply make a method that, given a window, will center it. It was
* decided that it is better to not alter an object within a method.
* </p>
* @param window The window (JInternalFrame) to calculate the center point
* for. This object can not be null.
* @param desktop The JDesktopPane that houses this window.
* @return the <code>Point</code> at which the window should be placed to
* center that window on the given desktop
*/
public static Point getPointForCentering(JInternalFrame window, JDesktopPane desktop) {
try {
//assert window != null;
Point mousePoint = MouseInfo.getPointerInfo().getLocation();
GraphicsDevice[] devices = GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices();
for (GraphicsDevice device : devices) {
Rectangle bounds = device.getDefaultConfiguration().getBounds();
//check to see if the mouse cursor is within these bounds
if (mousePoint.x >= bounds.x && mousePoint.y >= bounds.y
&& mousePoint.x <= (bounds.x + bounds.width)
&& mousePoint.y <= (bounds.y + bounds.height)) {
//this is it
int screenWidth = bounds.width;
int screenHeight = bounds.height;
int width = window.getWidth();
int height = window.getHeight();
Point p = new Point(((screenWidth - width) / 2) + bounds.x, ((screenHeight - height) / 2) + bounds.y);
return p;
}
}
} catch (Exception e) {
//this can occur do to a Security exception in sandboxed apps
e.printStackTrace();
}
return new Point(0,0);
}
/**
* Utility method used to load a GridBagConstraints object (param gbc) with the
* data in the other parameters. This method saves code space over doing the
* assignments by hand, and also allows you to reuse the same GridBagConstraints
* object reducing temporary object creating (at the expense of a method call.
* Go figure).
*/
public static void setConstraints(GridBagConstraints gbc, int gridx, int gridy, int gridwidth, int gridheight,
double weightx, double weighty, int anchor, int fill, int top, int left, int bottom, int right) {
gbc.gridx = gridx;
gbc.gridy = gridy;
gbc.gridwidth = gridwidth;
gbc.gridheight = gridheight;
gbc.weightx = weightx;
gbc.weighty = weighty;
gbc.anchor = anchor;
gbc.fill = fill;
gbc.insets = new Insets(top, left, bottom, right);
}
/**
* Get a <code>Spatial</code> object representing the given window's position and
* magnitude in space.
* @param win The window to get a Spatial object for
* @return a Spatial object. @see com.jgui.Spatial
*/
public static Spatial getSpatial(Window win) {
Spatial spatial = new Spatial(win.getY(), win.getX(), win.getWidth(), win.getHeight());
return spatial;
}
/**
* Get a <code>Spatial</code> object representing the given JComponent's position and
* magnitude in space.
* @param comp The JComponent to get a Spatial object for
* @return a Spatial object. @see com.jgui.Spatial
*/
public static Spatial getSpatial(JComponent comp) {
Spatial spatial = new Spatial(comp.getY(), comp.getX(), comp.getWidth(), comp.getHeight());
return spatial;
}
/**
* Locates the RootPaneContainer for the given component
* @param c
* @return
*/
public static RootPaneContainer findRootPaneContainer(Component c) {
if (c == null) {
return null;
} else if (c instanceof RootPaneContainer) {
return (RootPaneContainer)c;
} else {
return findRootPaneContainer(c.getParent());
}
}
/**
* Locates the JFrame for the given component
* @param c
* @return
*/
public static JFrame findJFrame(Component c) {
if (c == null) {
return null;
} else if (c instanceof RootPaneContainer) {
return (JFrame)c;
} else {
return findJFrame(c.getParent());
}
}
/**
* Locates the JDialog for the given component
* @param c
* @return
*/
public static JDialog findJDialog(Component c) {
if (c == null) {
return null;
} else if (c instanceof JDialog) {
return (JDialog)c;
} else {
return findJDialog(c.getParent());
}
}
}
| src/java/org/jdesktop/swingx/util/WindowUtils.java | /*
* $Id$
*
* Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*/
package org.jdesktop.swingx.util;
import java.awt.Component;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
import java.awt.GridBagConstraints;
import java.awt.Insets;
import java.awt.MouseInfo;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Window;
import javax.swing.JComponent;
import javax.swing.JDesktopPane;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JInternalFrame;
import javax.swing.RootPaneContainer;
import org.jdesktop.swingx.util.Spatial;
/**
* Encapsulates various utilities for windows (ie: <code>Frame</code> and
* <code>Dialog</code> objects and descendants, in particular).
* @author Richard Bair
*/
public final class WindowUtils {
/**
* Hide the constructor - don't wan't anybody creating an instance of this
*/
private WindowUtils() {
}
/**
* <p>
* Returns the <code>Point</code> at which a window should be placed to
* center that window on the screen.
* </p>
* <p>
* Some thought was taken as to whether to implement a method such as this,
* or to simply make a method that, given a window, will center it. It was
* decided that it is better to not alter an object within a method.
* </p>
* @param window The window to calculate the center point for. This object
* can not be null.
* @return the <code>Point</code> at which the window should be placed to
* center that window on the screen.
*/
public static Point getPointForCentering(Window window) {
//assert window != null;
Point mousePoint = MouseInfo.getPointerInfo().getLocation();
GraphicsDevice[] devices = GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices();
for (GraphicsDevice device : devices) {
Rectangle bounds = device.getDefaultConfiguration().getBounds();
//check to see if the mouse cursor is within these bounds
if (mousePoint.x >= bounds.x && mousePoint.y >= bounds.y
&& mousePoint.x <= (bounds.x + bounds.width)
&& mousePoint.y <= (bounds.y + bounds.height)) {
//this is it
int screenWidth = bounds.width;
int screenHeight = bounds.height;
int width = window.getWidth();
int height = window.getHeight();
Point p = new Point(((screenWidth - width) / 2) + bounds.x, ((screenHeight - height) / 2) + bounds.y);
return p;
}
}
return new Point(0,0);
}
/**
* <p>
* Returns the <code>Point</code> at which a window should be placed to
* center that window on the given desktop.
* </p>
* <p>
* Some thought was taken as to whether to implement a method such as this,
* or to simply make a method that, given a window, will center it. It was
* decided that it is better to not alter an object within a method.
* </p>
* @param window The window (JInternalFrame) to calculate the center point
* for. This object can not be null.
* @param desktop The JDesktopPane that houses this window.
* @return the <code>Point</code> at which the window should be placed to
* center that window on the given desktop
*/
public static Point getPointForCentering(JInternalFrame window, JDesktopPane desktop) {
//assert window != null;
Point mousePoint = MouseInfo.getPointerInfo().getLocation();
GraphicsDevice[] devices = GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices();
for (GraphicsDevice device : devices) {
Rectangle bounds = device.getDefaultConfiguration().getBounds();
//check to see if the mouse cursor is within these bounds
if (mousePoint.x >= bounds.x && mousePoint.y >= bounds.y
&& mousePoint.x <= (bounds.x + bounds.width)
&& mousePoint.y <= (bounds.y + bounds.height)) {
//this is it
int screenWidth = bounds.width;
int screenHeight = bounds.height;
int width = window.getWidth();
int height = window.getHeight();
Point p = new Point(((screenWidth - width) / 2) + bounds.x, ((screenHeight - height) / 2) + bounds.y);
return p;
}
}
return new Point(0,0);
}
/**
* Utility method used to load a GridBagConstraints object (param gbc) with the
* data in the other parameters. This method saves code space over doing the
* assignments by hand, and also allows you to reuse the same GridBagConstraints
* object reducing temporary object creating (at the expense of a method call.
* Go figure).
*/
public static void setConstraints(GridBagConstraints gbc, int gridx, int gridy, int gridwidth, int gridheight,
double weightx, double weighty, int anchor, int fill, int top, int left, int bottom, int right) {
gbc.gridx = gridx;
gbc.gridy = gridy;
gbc.gridwidth = gridwidth;
gbc.gridheight = gridheight;
gbc.weightx = weightx;
gbc.weighty = weighty;
gbc.anchor = anchor;
gbc.fill = fill;
gbc.insets = new Insets(top, left, bottom, right);
}
/**
* Get a <code>Spatial</code> object representing the given window's position and
* magnitude in space.
* @param win The window to get a Spatial object for
* @return a Spatial object. @see com.jgui.Spatial
*/
public static Spatial getSpatial(Window win) {
Spatial spatial = new Spatial(win.getY(), win.getX(), win.getWidth(), win.getHeight());
return spatial;
}
/**
* Get a <code>Spatial</code> object representing the given JComponent's position and
* magnitude in space.
* @param comp The JComponent to get a Spatial object for
* @return a Spatial object. @see com.jgui.Spatial
*/
public static Spatial getSpatial(JComponent comp) {
Spatial spatial = new Spatial(comp.getY(), comp.getX(), comp.getWidth(), comp.getHeight());
return spatial;
}
/**
* Locates the RootPaneContainer for the given component
* @param c
* @return
*/
public static RootPaneContainer findRootPaneContainer(Component c) {
if (c == null) {
return null;
} else if (c instanceof RootPaneContainer) {
return (RootPaneContainer)c;
} else {
return findRootPaneContainer(c.getParent());
}
}
/**
* Locates the JFrame for the given component
* @param c
* @return
*/
public static JFrame findJFrame(Component c) {
if (c == null) {
return null;
} else if (c instanceof RootPaneContainer) {
return (JFrame)c;
} else {
return findJFrame(c.getParent());
}
}
/**
* Locates the JDialog for the given component
* @param c
* @return
*/
public static JDialog findJDialog(Component c) {
if (c == null) {
return null;
} else if (c instanceof JDialog) {
return (JDialog)c;
} else {
return findJDialog(c.getParent());
}
}
}
| Added try/catch block to getPointForCentering methods due to an access exception -- I can't get the mouse pointer location while within the sandbox
| src/java/org/jdesktop/swingx/util/WindowUtils.java | Added try/catch block to getPointForCentering methods due to an access exception -- I can't get the mouse pointer location while within the sandbox |
|
Java | apache-2.0 | 9638acdd7a171be2e3d3f258c8fdbb560d5e376c | 0 | shils/incubator-groovy,shils/groovy,traneHead/groovy-core,jwagenleitner/groovy,jwagenleitner/incubator-groovy,shils/groovy,shils/incubator-groovy,apache/groovy,paulk-asert/incubator-groovy,paulk-asert/groovy,shils/incubator-groovy,russel/groovy,apache/groovy,jwagenleitner/incubator-groovy,traneHead/groovy-core,armsargis/groovy,russel/groovy,paulk-asert/incubator-groovy,paulk-asert/incubator-groovy,armsargis/groovy,paulk-asert/groovy,armsargis/groovy,apache/groovy,traneHead/groovy-core,russel/incubator-groovy,russel/incubator-groovy,apache/incubator-groovy,shils/groovy,apache/groovy,shils/groovy,paulk-asert/groovy,paulk-asert/incubator-groovy,apache/incubator-groovy,russel/incubator-groovy,jwagenleitner/incubator-groovy,russel/incubator-groovy,traneHead/groovy-core,jwagenleitner/groovy,russel/groovy,paulk-asert/groovy,shils/incubator-groovy,apache/incubator-groovy,russel/groovy,jwagenleitner/incubator-groovy,jwagenleitner/groovy,armsargis/groovy,paulk-asert/incubator-groovy,apache/incubator-groovy,jwagenleitner/groovy | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.sql;
import groovy.lang.Tuple;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Extracts and indexes named parameters from a sql string.
*
* This class is package-private scoped and is only intended for internal use.
*
* @see groovy.sql.Sql
*/
class ExtractIndexAndSql {
private static final Pattern NAMED_QUERY_PATTERN = Pattern.compile("(?<!:)(:)(\\w+)|\\?(\\d*)(?:\\.(\\w+))?");
private static final char QUOTE = '\'';
private final String sql;
private List<Tuple> indexPropList;
private String newSql;
/**
* Used to track the current position within the sql while parsing
*/
private int index = 0;
/**
* Static factory method used to create a new instance. Since parsing of the input
* is required, this ensures the object is fully initialized.
*
* @param sql statement to be parsed
* @return an instance of {@link ExtractIndexAndSql}
*/
static ExtractIndexAndSql from(String sql) {
return new ExtractIndexAndSql(sql).invoke();
}
/**
* Checks a sql statement to determine whether it contains parameters.
*
* @param sql statement
* @return {@code true} if the statement contains named parameters, otherwise {@code false}
*/
static boolean hasNamedParameters(String sql) {
return NAMED_QUERY_PATTERN.matcher(sql).find();
}
private ExtractIndexAndSql(String sql) {
this.sql = sql;
}
List<Tuple> getIndexPropList() {
return indexPropList;
}
String getNewSql() {
return newSql;
}
private ExtractIndexAndSql invoke() {
indexPropList = new ArrayList<Tuple>();
StringBuilder sb = new StringBuilder();
StringBuilder currentChunk = new StringBuilder();
while (index < sql.length()) {
switch (sql.charAt(index)) {
case QUOTE:
sb.append(adaptForNamedParams(currentChunk.toString(), indexPropList));
currentChunk = new StringBuilder();
appendToEndOfString(sb);
break;
case '-':
if (next() == '-') {
sb.append(adaptForNamedParams(currentChunk.toString(), indexPropList));
currentChunk = new StringBuilder();
appendToEndOfLine(sb);
} else {
currentChunk.append(sql.charAt(index));
}
break;
case '/':
if (next() == '*') {
sb.append(adaptForNamedParams(currentChunk.toString(), indexPropList));
currentChunk = new StringBuilder();
appendToEndOfComment(sb);
} else {
currentChunk.append(sql.charAt(index));
}
break;
default:
currentChunk.append(sql.charAt(index));
}
index++;
}
sb.append(adaptForNamedParams(currentChunk.toString(), indexPropList));
newSql = sb.toString();
return this;
}
private void appendToEndOfString(StringBuilder buffer) {
buffer.append(QUOTE);
int startQuoteIndex = index;
++index;
boolean foundClosingQuote = false;
while (index < sql.length()) {
char c = sql.charAt(index);
buffer.append(c);
if (c == QUOTE && next() != QUOTE) {
if (startQuoteIndex == (index - 1)) { // empty quote ''
foundClosingQuote = true;
break;
}
int previousQuotes = countPreviousRepeatingChars(QUOTE);
if (previousQuotes == 0 ||
(previousQuotes % 2 == 0 && (index - previousQuotes) != startQuoteIndex) ||
(previousQuotes % 2 != 0 && (index - previousQuotes) == startQuoteIndex)) {
foundClosingQuote = true;
break;
}
}
++index;
}
if (!foundClosingQuote) {
throw new IllegalStateException("Failed to process query. Unterminated ' character?");
}
}
private int countPreviousRepeatingChars(char c) {
int pos = index - 1;
while (pos >= 0) {
if (sql.charAt(pos) != c) {
break;
}
--pos;
}
return (index - 1) - pos;
}
private void appendToEndOfComment(StringBuilder buffer) {
while (index < sql.length()) {
char c = sql.charAt(index);
buffer.append(c);
if (c == '*' && next() == '/') {
buffer.append('/');
++index;
break;
}
++index;
}
}
private void appendToEndOfLine(StringBuilder buffer) {
while (index < sql.length()) {
char c = sql.charAt(index);
buffer.append(c);
if (c == '\n' || c == '\r') {
break;
}
++index;
}
}
private char next() {
return ((index + 1) < sql.length()) ? sql.charAt(index + 1) : '\0';
}
private static String adaptForNamedParams(String sql, List<Tuple> indexPropList) {
StringBuilder newSql = new StringBuilder();
int txtIndex = 0;
Matcher matcher = NAMED_QUERY_PATTERN.matcher(sql);
while (matcher.find()) {
newSql.append(sql, txtIndex, matcher.start()).append('?');
String indexStr = matcher.group(1);
if (indexStr == null) indexStr = matcher.group(3);
int index = (indexStr == null || indexStr.length() == 0 || ":".equals(indexStr)) ? 0 : Integer.parseInt(indexStr) - 1;
String prop = matcher.group(2);
if (prop == null) prop = matcher.group(4);
indexPropList.add(new Tuple(index, prop == null || prop.length() == 0 ? "<this>" : prop));
txtIndex = matcher.end();
}
newSql.append(sql.substring(txtIndex)); // append ending SQL after last param.
return newSql.toString();
}
}
| subprojects/groovy-sql/src/main/java/groovy/sql/ExtractIndexAndSql.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.sql;
import groovy.lang.Tuple;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Extracts and indexes named parameters from a sql string.
*
* This class is package-private scoped and is only intended for internal use.
*
* @see groovy.sql.Sql
*/
class ExtractIndexAndSql {
private static final Pattern NAMED_QUERY_PATTERN = Pattern.compile("(?<!:)(:)(\\w+)|\\?(\\d*)(?:\\.(\\w+))?");
private static final char QUOTE = '\'';
private final String sql;
private List<Tuple> indexPropList;
private String newSql;
/**
* Used to track the current position within the sql while parsing
*/
private int index = 0;
/**
* Static factory method used to create a new instance. Since parsing of the input
* is required, this ensures the object is fully initialized.
*
* @param sql statement to be parsed
* @return an instance of {@link ExtractIndexAndSql}
*/
static ExtractIndexAndSql from(String sql) {
return new ExtractIndexAndSql(sql).invoke();
}
/**
* Checks a sql statement to determine whether it contains parameters.
*
* @param sql statement
* @return {@code true} if the statement contains named parameters, otherwise {@code false}
*/
static boolean hasNamedParameters(String sql) {
return NAMED_QUERY_PATTERN.matcher(sql).find();
}
private ExtractIndexAndSql(String sql) {
this.sql = sql;
}
List<Tuple> getIndexPropList() {
return indexPropList;
}
String getNewSql() {
return newSql;
}
private ExtractIndexAndSql invoke() {
indexPropList = new ArrayList<Tuple>();
StringBuilder sb = new StringBuilder();
StringBuilder currentChunk = new StringBuilder();
while (index < sql.length()) {
switch (sql.charAt(index)) {
case QUOTE:
sb.append(adaptForNamedParams(currentChunk.toString(), indexPropList));
currentChunk = new StringBuilder();
appendToEndOfString(sb);
break;
case '-':
if (next() == '-') {
sb.append(adaptForNamedParams(currentChunk.toString(), indexPropList));
currentChunk = new StringBuilder();
appendToEndOfLine(sb);
} else {
currentChunk.append(sql.charAt(index));
}
break;
case '/':
if (next() == '*') {
sb.append(adaptForNamedParams(currentChunk.toString(), indexPropList));
currentChunk = new StringBuilder();
appendToEndOfComment(sb);
} else {
currentChunk.append(sql.charAt(index));
}
break;
default:
currentChunk.append(sql.charAt(index));
}
index++;
}
sb.append(adaptForNamedParams(currentChunk.toString(), indexPropList));
newSql = sb.toString();
return this;
}
private void appendToEndOfString(StringBuilder buffer) {
buffer.append(QUOTE);
int startQuoteIndex = index;
++index;
boolean foundClosingQuote = false;
while (index < sql.length()) {
char c = sql.charAt(index);
buffer.append(c);
if (c == QUOTE && next() != QUOTE) {
if (startQuoteIndex == (index - 1)) { // empty quote ''
foundClosingQuote = true;
break;
}
int previousQuotes = countPreviousRepeatingChars(QUOTE);
if (previousQuotes == 0 ||
(previousQuotes % 2 == 0 && (index - previousQuotes) != startQuoteIndex) ||
(previousQuotes % 2 != 0 && (index - previousQuotes) == startQuoteIndex)) {
foundClosingQuote = true;
break;
}
}
++index;
}
if (!foundClosingQuote) {
throw new IllegalStateException("Failed to process query. Unterminated ' character?");
}
}
private int countPreviousRepeatingChars(char c) {
int pos = index - 1;
while (pos >= 0) {
if (sql.charAt(pos) != c) {
break;
}
--pos;
}
return (index - 1) - pos;
}
private void appendToEndOfComment(StringBuilder buffer) {
while (index < sql.length()) {
char c = sql.charAt(index);
buffer.append(c);
if (c == '*' && next() == '/') {
buffer.append('/');
++index;
break;
}
++index;
}
}
private void appendToEndOfLine(StringBuilder buffer) {
while (index < sql.length()) {
char c = sql.charAt(index);
buffer.append(c);
if (c == '\n' || c == '\r') {
break;
}
++index;
}
}
private char next() {
return ((index + 1) < sql.length()) ? sql.charAt(index + 1) : '\0';
}
private static String adaptForNamedParams(String sql, List<Tuple> indexPropList) {
StringBuilder newSql = new StringBuilder();
int txtIndex = 0;
Matcher matcher = NAMED_QUERY_PATTERN.matcher(sql);
while (matcher.find()) {
newSql.append(sql.substring(txtIndex, matcher.start())).append('?');
String indexStr = matcher.group(1);
if (indexStr == null) indexStr = matcher.group(3);
int index = (indexStr == null || indexStr.length() == 0 || ":".equals(indexStr)) ? 0 : Integer.parseInt(indexStr) - 1;
String prop = matcher.group(2);
if (prop == null) prop = matcher.group(4);
indexPropList.add(new Tuple(index, prop == null || prop.length() == 0 ? "<this>" : prop));
txtIndex = matcher.end();
}
newSql.append(sql.substring(txtIndex)); // append ending SQL after last param.
return newSql.toString();
}
}
| Trivial refactoring: remove redundant String operation
| subprojects/groovy-sql/src/main/java/groovy/sql/ExtractIndexAndSql.java | Trivial refactoring: remove redundant String operation |
|
Java | apache-2.0 | 2ad4a8ac2ac9e4a6bcef62ee59d17131dfc5048d | 0 | ojai/ojai,ojai/ojai | /**
* Copyright (c) 2015 MapR, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ojai.store;
import java.math.BigDecimal;
import org.ojai.Document;
import org.ojai.DocumentStream;
import org.ojai.FieldPath;
import org.ojai.Value;
import org.ojai.annotation.API;
import org.ojai.annotation.API.NonNullable;
import org.ojai.store.exceptions.DocumentExistsException;
import org.ojai.store.exceptions.DocumentNotFoundException;
import org.ojai.store.exceptions.MultiOpException;
import org.ojai.store.exceptions.StoreException;
@API.Public
@API.NotThreadSafe
public interface DocumentStore extends AutoCloseable {
/**
* Returns {@code true} if this Document store does not support any write
* operations like insert/update/delete, etc.
*/
public boolean isReadOnly();
/**
* Flushes any buffered writes operations for this DocumentStore.
*
* @throws StoreException if the flush failed or if the flush of any
* buffered operation resulted in an error.
*/
public void flush() throws StoreException;
/**
* Begin tracking a commit-context over the ensuing write operations performed through
* this instance of {@link DocumentStore}.
*
* @see #commitAndGetContext()
* @see #clearCommitContext()
* @see Query#setCommitContext(String)
*
* @throws IllegalStateException if a beginCommitContext() was already called
* and a corresponding commitAndGetContext()/clearCommitContext() wasn't.
*/
public void beginCommitContext() throws StoreException;
/**
* Begin tracking a commit-context over the ensuing write operations performed through
* this instance of {@link DocumentStore}.
*
* @param previousContext a previous commit-context that was retrieved from this document-store,
* including through other DocumentStore instances. The tracking begins by using this
* context as the base state.
*
* @see #commitAndGetContext()
* @see #clearCommitContext()
* @see Query#setCommitContext(String)
*
* @throws NullPointerException if the previous commit context is {@code null}
* @throws IllegalStateException if a beginCommitContext() was already called
* and a corresponding commitAndGetContext()/clearCommitContext() wasn't.
* @throws IllegalArgumentException if the specified commit-context can not be parsed
* or was not obtained from this document-store.
*/
public void beginCommitContext(@NonNullable String previousContext) throws StoreException;
/**
* Flushes any buffered writes operations for this DocumentStore and returns a commit-context
* which can be used to ensure that such writes are visible to ensuing queries.
* <p/>
* The commit-context is cleared and tracking is stopped.
* <p/>
* This call does not isolates the writes originating from this instance of DocumentStore
* from other instances and as a side-effect other writes issued to the same document-store
* through other DocumentStore instances could get flushed.
*
* @see #beginCommitContext()
* @see #clearCommitContext()
* @see Query#setCommitContext(String)
*
* @return An encoded string representing the commit-context of all writes issued,
* until now, through this instance of {@link DocumentStore}.
*
* @throws StoreException if the flush failed or if the flush of any
* buffered operation resulted in an error.
* @throws IllegalStateException if a corresponding {@link #beginCommitContext()} was not
* called before calling this method.
*/
public String commitAndGetContext() throws StoreException;
/**
* Stop the commit tracking and clear any state on this {@link DocumentStore} instance.
* <p/>
* This API should be called to stop tracking the commit context in case where
* {@link #beginCommitContext()} was previously called but a commit context is not needed
* anymore, for example in case of an error in any of the mutation.
*
* @throws IllegalStateException if a corresponding {@link #beginCommitContext()} was not
* called before calling this method.
*/
public void clearCommitContext() throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore.
*
* @param _id document id
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(String _id) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore.
*
* @param _id Document _id
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(Value _id) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore. The returned Document will include only the specified
* fields.
*
* @param _id Document _id
* @param fieldPaths Array of of field paths that should be returned
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(String _id, String...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore. The returned Document will include only the specified
* fields.
*
* @param _id Document _id
* @param fieldPaths Array of of field paths that should be returned
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(String _id, FieldPath...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore. The returned Document will include only the specified
* fields.
*
* @param _id Document _id
* @param fieldPaths Array of of field paths that should be returned
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(Value _id, String...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore. The returned Document will include only the specified
* fields.
*
* @param _id Document _id
* @param fieldPaths Array of of field paths that should be returned
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(Value _id, FieldPath...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned.
*
* @param _id document id
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(String _id, QueryCondition condition) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned.
*
* @param _id document id
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(Value _id, QueryCondition condition) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned. The returned Document will include only the specified fields.
*
* @param _id document id
* @param fieldPaths list of fields that should be returned in the read document
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(String _id, QueryCondition condition, String...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned. The returned Document will include only the specified fields.
*
* @param _id document id
* @param fieldPaths list of fields that should be returned in the read document
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(String _id, QueryCondition condition, FieldPath...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned. The returned Document will include only the specified fields.
*
* @param _id document id
* @param fieldPaths list of fields that should be returned in the read document
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(Value _id, QueryCondition condition, String...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned. The returned Document will include only the specified fields.
*
* @param _id document id
* @param fieldPaths list of fields that should be returned in the read document
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(Value _id, QueryCondition condition, FieldPath...fieldPaths) throws StoreException;
/**
* <p>Executes a query to return all Documents in the DocumentStore.
* <p>The returned DocumentStream must be closed after retrieving the documents.
*
* @return A DocumentStream of all documents in the this DocumentStore.
*
* @throws StoreException
*/
public DocumentStream find() throws StoreException;
/**
* <p>Executes the specified query on the DocumentStore and return a DocumentStream of the result.
* <p>The returned DocumentStream must be closed after retrieving the documents.
*
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
* @throws StoreException
*/
public DocumentStream findQuery(@NonNullable Query query) throws StoreException;
/**
* <p>Executes the specified query on the DocumentStore and return a DocumentStream of the result.
* <p>The returned DocumentStream must be closed after retrieving the documents.
*
* @param queryJSON a Json string representation of OJAI Query.
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
* @throws StoreException
*/
public DocumentStream findQuery(@NonNullable String queryJSON) throws StoreException;
/**
* <p>Executes a query to return all Documents in the DocumentStore.
* <p>Each Document will contain only those field paths that are specified in the
* argument. If no fields are specified then it returns a full document.
*
* @param fieldPaths list of fields that should be returned in the read document
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
* @throws StoreException
*/
public DocumentStream find(@NonNullable String... fieldPaths) throws StoreException;
/**
* <p>Executes a query to return all Documents in the DocumentStore.
* <p>Each Document will contain only those field paths that are specified in the
* argument. If no fields are specified then it returns a full document.
*
* @param fieldPaths list of fields that should be returned in the read document
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
* @throws StoreException
*/
public DocumentStream find(@NonNullable FieldPath... fieldPaths) throws StoreException;
/**
* Returns a DocumentStream with all the documents in the DocumentStore that
* satisfies the QueryCondition.
*
* @param condition The QueryCondition to match the documents
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
*/
public DocumentStream find(@NonNullable QueryCondition condition) throws StoreException;
/**
* <p>Execute a query on the DocumentStore and return a DocumentStream of the Document
* matching the specified QueryCondition.
* <p>Each Document will contain only those field paths that are specified in the
* argument. If no fields are specified then it returns a full document.
*
* @param condition The QueryCondition to match the documents
* @param fieldPaths list of fields that should be returned in the read document
*
* @return A DocumentStream that can be used to retrieve the documents in the result.
* @throws StoreException
*/
public DocumentStream find(@NonNullable QueryCondition condition, @NonNullable String...fieldPaths)
throws StoreException;
/**
* <p>Execute a query on the DocumentStore and return a DocumentStream of the Document
* matching the specified QueryCondition.
* <p>Each Document will contain only those field paths that are specified in the
* argument. If no fields are specified then it returns a full document.
*
* @param condition The QueryCondition to match the documents
* @param fieldPaths list of fields that should be returned in the read document
*
* @return A DocumentStream that can be used to retrieve the documents in the result.
* @throws StoreException
*/
public DocumentStream find(@NonNullable QueryCondition condition, @NonNullable FieldPath... fieldPaths)
throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore.
* <br/><br/>
* The specified Document must contain an {@code "_id"} field or the operation
* will fail.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @throws StoreException
*/
public void insertOrReplace(@NonNullable Document doc) throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore with the given _id.
* <br/><br/>
* The specified document should either not contain an {@code "_id"} field or
* its value should be same as the specified _id or the operation will fail.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @param _id value to be used as the _id for this document
* @throws StoreException
*/
public void insertOrReplace(@NonNullable String _id, @NonNullable Document r) throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore with the given _id.
* <br/><br/>
* The specified document should either not contain an {@code "_id"} field or
* its value should be same as the specified _id or the operation will fail.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @param _id value to be used as the _id for this document
* @throws StoreException
*/
public void insertOrReplace(@NonNullable Value _id, @NonNullable Document doc) throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore with the value of
* the specified Field as the {@code _id}.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @param fieldAsKey document's field to be used as the key when an id is not
* passed in and the document doesn't have an "_id" field or
* a different field is desired to be used as _id.
* @throws StoreException
*/
public void insertOrReplace(@NonNullable Document doc, @NonNullable FieldPath fieldAsKey) throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore with the value of
* the specified Field as the {@code _id}.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @param fieldAsKey document's field to be used as the key when an id is not
* passed in and the document doesn't have an "_id" field or
* a different field is desired to be used as _id.
* @throws StoreException
*/
public void insertOrReplace(@NonNullable Document doc, @NonNullable String fieldAsKey) throws StoreException;
/**
* Inserts all documents from the specified DocumentStream into this DocumentStore.
* <br/><br/>
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error has
* occurred while storing the documents. Each document read from the DocumentStream
* must have a field "_id"; otherwise, the operation will fail.
*
* If there is an error in reading from the stream or in writing to the DocumentStore
* then a MultiOpException will be thrown containing the list of documents that
* failed to be stored in the DocumentStore. Reading from a stream stops on the
* first read error. If only write errors occur, the iterator will stop and the
* rest of the documents will remain un-consumed in the DocumentStream.
*
* @param stream The DocumentStream to read the documents from.
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors.
*/
public void insertOrReplace(@NonNullable DocumentStream stream) throws MultiOpException;
/**
* Inserts all documents from the specified DocumentStream into this DocumentStore
* using the field specified by parameter {@code fieldAsKey} as the "_id" field.
* If an "_id" field is present in the documents, an exception will be thrown.
* <br/><br/>
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error has
* occurred while storing the documents. Each document read from the DocumentStream
* must have a field "_id"; otherwise, the operation will fail.
*
* If there is an error in reading from the stream or in writing to the DocumentStore
* then a MultiOpException will be thrown containing the list of documents that
* failed to be stored in the DocumentStore. Reading from a stream stops on the
* first read error. If only write errors occur, the iterator will stop and the
* rest of the documents will remain un-consumed in the DocumentStream.
*
* @param stream The DocumentStream to read the documents from.
* @param fieldAsKey field from each document whose value is to be used as
* the document key for insertion
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors.
*/
public void insertOrReplace(@NonNullable DocumentStream stream, @NonNullable FieldPath fieldAsKey)
throws MultiOpException;
/**
* Inserts all documents from the specified DocumentStream into this DocumentStore
* using the field specified by parameter {@code fieldAsKey} as the "_id" field.
* If an "_id" field is present in the documents, an exception will be thrown.
* <br/><br/>
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error has
* occurred while storing the documents. Each document read from the DocumentStream
* must have a field "_id"; otherwise, the operation will fail.
*
* If there is an error in reading from the stream or in writing to the DocumentStore
* then a MultiOpException will be thrown containing the list of documents that
* failed to be stored in the DocumentStore. Reading from a stream stops on the
* first read error. If only write errors occur, the iterator will stop and the
* rest of the documents will remain un-consumed in the DocumentStream.
*
* @param stream The DocumentStream to read the documents from.
* @param fieldAsKey field from each document whose value is to be used as
* the document key for insertion
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors.
*/
public void insertOrReplace(@NonNullable DocumentStream stream, @NonNullable String fieldAsKey)
throws MultiOpException;
/**
* Applies a mutation on the document identified by the document id.<br/><br/>
* All updates specified by the mutation object should be applied atomically,
* and consistently meaning either all of the updates in mutation are applied
* or none of them is applied and a partial update should not be visible to an
* observer.
*
* @param _id document id
* @param m a mutation object specifying the mutation operations on the document
* @throws StoreException
*/
public void update(@NonNullable String _id, @NonNullable DocumentMutation mutation) throws StoreException;
/**
* Applies a mutation on the document identified by the document id.<br/><br/>
* All updates specified by the mutation object should be applied atomically,
* and consistently meaning either all of the updates in mutation are applied
* or none of them is applied and a partial update should not be visible to an
* observer.
*
* @param _id document id
* @param m a mutation object specifying the mutation operations on the document
* @throws StoreException
*/
public void update(@NonNullable Value _id, @NonNullable DocumentMutation mutation) throws StoreException;
/**
* Deletes a document with the given id. This operation is successful even
* when the document with the given id doesn't exist.
*
* If the parameter {@code fieldAsKey} is provided, its value will be used as
* the "_id" to delete the document.
*
* @param _id document id
* @param doc JSON document to be deleted
* @param fieldAsKey document's field to be used as the key when an id is not
* passed in and a document doesn't have an "_id" field
* @throws StoreException
*/
public void delete(@NonNullable String _id) throws StoreException;
public void delete(@NonNullable Value _id) throws StoreException;
public void delete(@NonNullable Document doc) throws StoreException;
public void delete(@NonNullable Document doc, @NonNullable FieldPath fieldAsKey) throws StoreException;
public void delete(@NonNullable Document doc, @NonNullable String fieldAsKey) throws StoreException;
/**
* Deletes a set of documents from the DocumentStore represented by the DocumentStream.
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error occurs while
* writing the documents. Each document read from the DocumentStream must have a
* field "_id" of type BINARY or UTF8-string; otherwise the operation will fail.
*
* If there is an error in reading from the stream or in writing to
* the DocumentStore then a MultiOpException will be thrown that contains a list of
* documents that failed to write to the DocumentStore. Reading from a stream stops on
* the first read error. If only write errors occur, the iterator will stop
* and the current list of failed document is returned in a multi op exception.
* The untouched documents will remain in the DocumentStream.
*
* @param stream DocumentStream
* @param fieldAsKey a field from each document whose value is to be used as
* the document key for deletion
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors
*/
public void delete(@NonNullable DocumentStream stream) throws MultiOpException;
public void delete(@NonNullable DocumentStream stream, @NonNullable FieldPath fieldAsKey)
throws MultiOpException;
public void delete(@NonNullable DocumentStream stream, @NonNullable String fieldAsKey)
throws MultiOpException;
/**
* Inserts a document with the given id. This operation is successful only
* when the document with the given id doesn't exist.
*
* "fieldAsKey", when provided, will also be stored as the "_id" field in the
* written document for the document. If "_id" already existed in the document, then
* an error will be thrown. When reading the document back from the DB, the
* key will be returned back as usual as the "_id" field.
*
* Note that an insertOrReplace() operation would be more efficient than an
* insert() call.
*
* @param doc JSON document as the new value for the given document
* @param _id to be used as the key for the document
* @param fieldAsKey document's field to be used as the key when the id is not
* passed in and document doesn't have an "_id" field
* @throws TableNotFoundException when a DocumentStore does not exist to add this document
* @throws ReadOnlyException when a DocumentStore is not accepting writes
* @throws OpNotPermittedException when the server returned EPERM
* @throws DocumentExistsException when a document with id already exists in DocumentStore
*/
public void insert(@NonNullable String _id, @NonNullable Document doc) throws StoreException;
public void insert(@NonNullable Value _id, @NonNullable Document doc) throws StoreException;
public void insert(@NonNullable Document doc) throws StoreException;
public void insert(@NonNullable Document doc, @NonNullable FieldPath fieldAsKey) throws StoreException;
public void insert(@NonNullable Document doc, @NonNullable String fieldAsKey) throws StoreException;
/**
* Inserts a set of documents represented by the DocumentStream into the DocumentStore.
* This is a synchronous API that won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error occurs while
* writing the documents. Each document read from the DocumentStream must have a
* field "_id" of type BINARY or UTF8-string; otherwise, the operation will
* fail or it will be of the Document type.
*
* If a document with the given key exists on the server then it throws a document
* exists exception, similar to the non-DocumentStream based insert() API.
*
* If there is an error in reading from the stream or in writing to
* the DocumentStore then a MultiOpException will be thrown that contains a list of
* documents that failed to write to the DocumentStore. Reading from a stream stops on
* the first read error. If only write errors occur, the iterator will stop
* and the current list of failed document is returned in a multi op exception.
* The untouched documents will remain in the DocumentStream.
*
* @param stream DocumentStream
* @param fieldAsKey a field from each document whose value is to be used as
* the document key for deletion
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors
*/
public void insert(@NonNullable DocumentStream stream) throws MultiOpException;
public void insert(@NonNullable DocumentStream stream, @NonNullable FieldPath fieldAsKey)
throws MultiOpException;
public void insert(@NonNullable DocumentStream stream, @NonNullable String fieldAsKey)
throws MultiOpException;
/**
* Replaces a document in the DocumentStore. The document id is either explicitly specified
* as parameter "id" or it is implicitly specified as the field "_id" in the
* passed document. If the document id is explicitly passed then the document should
* not contain "_id" field or its value should be the same as the explicitly
* specified id; otherwise, the operation will fail.
*
* If the document with the given key does not exist on the server then it will
* throw DocumentNotFoundException.
*
* "fieldAsKey", when provided, will also be stored as the "_id" field in the
* written document for the document. If "_id" already existed in the document, then
* an error will be thrown. When reading the document back from the DB, the
* key would be returned back as usual as "_id" field.
*
* Note that an insertOrReplace() operation would be more efficient than an
* replace() call.
* @param doc JSON document as the new value for the given document
* @param _id to be used as the key for the document
* @param fieldAsKey document's field to be used as the key when an id is not
* passed in and document doesn't have an "_id" field
* @throws TableNotFoundException when a DocumentStore does not exist to which to add this document
* @throws ReadOnlyException when a DocumentStore is not accepting writes
* @throws OpNotPermittedException when the server returns EPERM
* @throws DocumentNotFoundException when a document with the id does not exist in DocumentStore
*/
public void replace(@NonNullable String _id, @NonNullable Document doc) throws StoreException;
public void replace(@NonNullable Value _id, @NonNullable Document doc) throws StoreException;
public void replace(@NonNullable Document doc) throws StoreException;
public void replace(@NonNullable Document doc, @NonNullable FieldPath fieldAsKey) throws StoreException;
public void replace(@NonNullable Document doc, @NonNullable String fieldAsKey) throws StoreException;
/**
* Replaces a set of documents represented by the DocumentStream into the DocumentStore.
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error occurs while
* writing the documents. Each document read from the DocumentStream must have a
* field "_id" of type BINARY or UTF8-string; otherwise, the operation will
* fail or it will be of Document type.
*
* If the document with the given key does not exist on the server then it throws,
* a document not exists exception, similar to the non-DocumentStream based
* replace() API.
*
* If there is an error in reading from the stream or in writing to
* the DocumentStore then a MultiOpException will be thrown that contains a list of
* documents that failed to write to the DocumentStore. Reading from a stream stops on
* the first read error. If only write errors occur, the iterator will stop
* and the current list of failed document is returned in a multi op exception.
* The untouched documents will remain in the DocumentStream.
*
* @param stream A DocumentStream to read the documents from
* @param fieldAsKey field from each document whose value is to be used as
* the document key for deletion
*
* @throws MultiOpException which has list of write-failed documents and
* their errors
*/
public void replace(@NonNullable DocumentStream stream) throws MultiOpException;
public void replace(@NonNullable DocumentStream stream, @NonNullable FieldPath fieldAsKey)
throws MultiOpException;
public void replace(@NonNullable DocumentStream stream, @NonNullable String fieldAsKey)
throws MultiOpException;
/**
* Atomically applies an increment to a given field (in dot separated notation)
* of the given document id. If the field doesn't exist on the server
* then it will be created with the type of the incremental value.
* The increment operation can be applied on any of the numeric
* types, such as byte, short, int, long, float, double, or decimal,
* of a field. The operation will fail if the increment is applied to a
* field that is of a non-numeric type.
*
* If an id doesn't exist, it gets created (similar to the insertOrReplace
* behavior). And it is created, with the value of 'inc' parameter. The same
* logic applies to intermittent paths in the path: they get created top to
* bottom.
*
* If the type is different than the field in the operation, it fails.
*
* The increment operation won't change the type of existing value stored in
* the given field for the document. The resultant value of the field will be
* truncated based on the original type of the field.
*
* For example, if a field 'score' is of type int and contains 60 and an
* increment of double '5.675' is applied, then the resultant value of the
* field will be 65 (65.675 will be truncated to 65).
*
* If the type to which the increment is applied is a byte, short, or int,
* then it needs to use long as the operation.
*
* @param _id document id
* @param field the field name in dot separated notation
* @param inc increment to apply to a field. Can be positive or negative
* @throws StoreException
*/
public void increment(@NonNullable String _id,
@NonNullable String field, byte inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, short inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, int inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, long inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, float inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, double inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, @NonNullable BigDecimal inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, byte inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, short inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, int inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, long inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, float inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, double inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, @NonNullable BigDecimal inc) throws StoreException;
/**
* Atomically evaluates the condition on a given document and if the
* condition holds true for the document then a mutation is applied on the document.
*
* If an id doesn't exist, the function returns false (no exception is thrown).
* If the mutation operation fails, it throws exception.
*
* @param _id document id
* @param condition the condition to evaluate on the document
* @param m mutation to apply on the document
* @return True if the condition is true for the document; otherwise, false
* @throws StoreException if the condition passes but the mutate fails
*/
public boolean checkAndMutate(@NonNullable String _id,
@NonNullable QueryCondition condition, @NonNullable DocumentMutation mutation) throws StoreException;
public boolean checkAndMutate(@NonNullable Value _id, @NonNullable QueryCondition condition,
@NonNullable DocumentMutation mutation) throws StoreException;
/**
* Atomically evaluates the condition on given document and if the
* condition holds true for the document then it is atomically deleted.
*
* If id doesnt exist, returns false (no exception is thrown).
* If deletion operation fails, it throws exception.
*
* @param _id document id
* @param condition condition to evaluate on the document
* @return True if the condition is valid for the document, otherwise false.
* @throws StoreException if the condition passes but the delete fails
*/
public boolean checkAndDelete(@NonNullable String _id, @NonNullable
QueryCondition condition) throws StoreException;
public boolean checkAndDelete(@NonNullable Value _id, @NonNullable
QueryCondition condition) throws StoreException;
/**
* Atomically evaluates the condition on the given document and if the
* condition holds true for the document then it atomically replaces the document
* with the given document.
*
* If the id doesn't exist, the function returns false (no exception is thrown).
* If the replace operation fails, it throws an exception.
*
* @param _id document id
* @param condition the condition to evaluate on the document
* @param doc document to replace
* @return True if the condition is true for the document otherwise false
* @throws StoreException if the condition passes but the replace fails
*/
public boolean checkAndReplace(@NonNullable String _id,
@NonNullable QueryCondition condition, @NonNullable Document doc) throws StoreException;
public boolean checkAndReplace(@NonNullable Value _id,
@NonNullable QueryCondition condition, @NonNullable Document doc) throws StoreException;
/**
* Override {@link AutoCloseable#close()} to avoid declaring a checked exception.
*/
void close() throws StoreException;
}
| core/src/main/java/org/ojai/store/DocumentStore.java | /**
* Copyright (c) 2015 MapR, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ojai.store;
import java.math.BigDecimal;
import org.ojai.Document;
import org.ojai.DocumentStream;
import org.ojai.FieldPath;
import org.ojai.Value;
import org.ojai.annotation.API;
import org.ojai.annotation.API.NonNullable;
import org.ojai.store.exceptions.DocumentExistsException;
import org.ojai.store.exceptions.DocumentNotFoundException;
import org.ojai.store.exceptions.MultiOpException;
import org.ojai.store.exceptions.StoreException;
@API.Public
@API.NotThreadSafe
public interface DocumentStore extends AutoCloseable {
/**
* Returns {@code true} if this Document store does not support any write
* operations like insert/update/delete, etc.
*/
public boolean isReadOnly();
/**
* Flushes any buffered writes operations for this DocumentStore.
*
* @throws StoreException if the flush failed or if the flush of any
* buffered operation resulted in an error.
*/
public void flush() throws StoreException;
/**
* Begin tracking a commit-context over the ensuing write operations performed through
* this instance of {@link DocumentStore}.
*
* @see #commitAndGetContext()
* @see #clearCommitContext()
* @see Query#setCommitContext(String)
*
* @throws IllegalStateException if a beginCommitContext() was already called
* and a corresponding commitAndGetContext()/clearCommitContext() wasn't.
*/
public void beginCommitContext() throws StoreException;
/**
* Begin tracking a commit-context over the ensuing write operations performed through
* this instance of {@link DocumentStore}.
*
* @param previousContext a previous commit-context that was retrieved from this document-store,
* including through other DocumentStore instances. The tracking begins by using this
* context as the base state.
*
* @see #commitAndGetContext()
* @see #clearCommitContext()
* @see Query#setCommitContext(String)
*
* @throws NullPointerException if the previous commit context is {@code null}
* @throws IllegalStateException if a beginCommitContext() was already called
* and a corresponding commitAndGetContext()/clearCommitContext() wasn't.
* @throws IllegalArgumentException if the specified commit-context can not be parsed
* or was not obtained from this document-store.
*/
public void beginCommitContext(@NonNullable String previousContext) throws StoreException;
/**
* Flushes any buffered writes operations for this DocumentStore and returns a commit-context
* which can be used to ensure that such writes are visible to ensuing queries.
* <p/>
* The commit-context is cleared and tracking is stopped.
* <p/>
* This call does not isolates the writes originating from this instance of DocumentStore
* from other instances and as a side-effect other writes issued to the same document-store
* through other DocumentStore instances could get flushed.
*
* @see #beginCommitContext()
* @see #clearCommitContext()
* @see Query#setCommitContext(String)
*
* @return An encoded string representing the commit-context of all writes issued,
* until now, through this instance of {@link DocumentStore}.
*
* @throws StoreException if the flush failed or if the flush of any
* buffered operation resulted in an error.
* @throws IllegalStateException if a corresponding {@link #beginCommitContext()} was not
* called before calling this method.
*/
public String commitAndGetContext() throws StoreException;
/**
* Stop the commit tracking and clear any state on this {@link DocumentStore} instance.
*
* @throws IllegalStateException if a corresponding {@link #beginCommitContext()} was not
* called before calling this method.
*/
public void clearCommitContext() throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore.
*
* @param _id document id
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(String _id) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore.
*
* @param _id Document _id
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(Value _id) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore. The returned Document will include only the specified
* fields.
*
* @param _id Document _id
* @param fieldPaths Array of of field paths that should be returned
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(String _id, String...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore. The returned Document will include only the specified
* fields.
*
* @param _id Document _id
* @param fieldPaths Array of of field paths that should be returned
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(String _id, FieldPath...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore. The returned Document will include only the specified
* fields.
*
* @param _id Document _id
* @param fieldPaths Array of of field paths that should be returned
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(Value _id, String...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` or {@code null} if the document with that `_id`
* doesn't exist in this DocumentStore. The returned Document will include only the specified
* fields.
*
* @param _id Document _id
* @param fieldPaths Array of of field paths that should be returned
*
* @return An OJAI Document with the specified id or {@code null} if one does not exist in
* this DocumentStore.
*
* @throws StoreException
*/
public Document findById(Value _id, FieldPath...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned.
*
* @param _id document id
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(String _id, QueryCondition condition) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned.
*
* @param _id document id
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(Value _id, QueryCondition condition) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned. The returned Document will include only the specified fields.
*
* @param _id document id
* @param fieldPaths list of fields that should be returned in the read document
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(String _id, QueryCondition condition, String...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned. The returned Document will include only the specified fields.
*
* @param _id document id
* @param fieldPaths list of fields that should be returned in the read document
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(String _id, QueryCondition condition, FieldPath...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned. The returned Document will include only the specified fields.
*
* @param _id document id
* @param fieldPaths list of fields that should be returned in the read document
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(Value _id, QueryCondition condition, String...fieldPaths) throws StoreException;
/**
* Return the Document with the given `_id` if it matches the specified condition. If a Document
* with the specified `_id` does not exist in this DocumentStore or does not satisfy the specified
* condition, {@code null} is returned. The returned Document will include only the specified fields.
*
* @param _id document id
* @param fieldPaths list of fields that should be returned in the read document
* @param condition query condition to test the document
*
* @return An OJAI Document with the specified _id
*
* @throws StoreException
*/
public Document findById(Value _id, QueryCondition condition, FieldPath...fieldPaths) throws StoreException;
/**
* <p>Executes a query to return all Documents in the DocumentStore.
* <p>The returned DocumentStream must be closed after retrieving the documents.
*
* @return A DocumentStream of all documents in the this DocumentStore.
*
* @throws StoreException
*/
public DocumentStream find() throws StoreException;
/**
* <p>Executes the specified query on the DocumentStore and return a DocumentStream of the result.
* <p>The returned DocumentStream must be closed after retrieving the documents.
*
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
* @throws StoreException
*/
public DocumentStream findQuery(@NonNullable Query query) throws StoreException;
/**
* <p>Executes the specified query on the DocumentStore and return a DocumentStream of the result.
* <p>The returned DocumentStream must be closed after retrieving the documents.
*
* @param queryJSON a Json string representation of OJAI Query.
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
* @throws StoreException
*/
public DocumentStream findQuery(@NonNullable String queryJSON) throws StoreException;
/**
* <p>Executes a query to return all Documents in the DocumentStore.
* <p>Each Document will contain only those field paths that are specified in the
* argument. If no fields are specified then it returns a full document.
*
* @param fieldPaths list of fields that should be returned in the read document
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
* @throws StoreException
*/
public DocumentStream find(@NonNullable String... fieldPaths) throws StoreException;
/**
* <p>Executes a query to return all Documents in the DocumentStore.
* <p>Each Document will contain only those field paths that are specified in the
* argument. If no fields are specified then it returns a full document.
*
* @param fieldPaths list of fields that should be returned in the read document
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
* @throws StoreException
*/
public DocumentStream find(@NonNullable FieldPath... fieldPaths) throws StoreException;
/**
* Returns a DocumentStream with all the documents in the DocumentStore that
* satisfies the QueryCondition.
*
* @param condition The QueryCondition to match the documents
* @return A DocumentStream that can be used to retrieve the documents in the result.
*
*/
public DocumentStream find(@NonNullable QueryCondition condition) throws StoreException;
/**
* <p>Execute a query on the DocumentStore and return a DocumentStream of the Document
* matching the specified QueryCondition.
* <p>Each Document will contain only those field paths that are specified in the
* argument. If no fields are specified then it returns a full document.
*
* @param condition The QueryCondition to match the documents
* @param fieldPaths list of fields that should be returned in the read document
*
* @return A DocumentStream that can be used to retrieve the documents in the result.
* @throws StoreException
*/
public DocumentStream find(@NonNullable QueryCondition condition, @NonNullable String...fieldPaths)
throws StoreException;
/**
* <p>Execute a query on the DocumentStore and return a DocumentStream of the Document
* matching the specified QueryCondition.
* <p>Each Document will contain only those field paths that are specified in the
* argument. If no fields are specified then it returns a full document.
*
* @param condition The QueryCondition to match the documents
* @param fieldPaths list of fields that should be returned in the read document
*
* @return A DocumentStream that can be used to retrieve the documents in the result.
* @throws StoreException
*/
public DocumentStream find(@NonNullable QueryCondition condition, @NonNullable FieldPath... fieldPaths)
throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore.
* <br/><br/>
* The specified Document must contain an {@code "_id"} field or the operation
* will fail.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @throws StoreException
*/
public void insertOrReplace(@NonNullable Document doc) throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore with the given _id.
* <br/><br/>
* The specified document should either not contain an {@code "_id"} field or
* its value should be same as the specified _id or the operation will fail.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @param _id value to be used as the _id for this document
* @throws StoreException
*/
public void insertOrReplace(@NonNullable String _id, @NonNullable Document r) throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore with the given _id.
* <br/><br/>
* The specified document should either not contain an {@code "_id"} field or
* its value should be same as the specified _id or the operation will fail.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @param _id value to be used as the _id for this document
* @throws StoreException
*/
public void insertOrReplace(@NonNullable Value _id, @NonNullable Document doc) throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore with the value of
* the specified Field as the {@code _id}.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @param fieldAsKey document's field to be used as the key when an id is not
* passed in and the document doesn't have an "_id" field or
* a different field is desired to be used as _id.
* @throws StoreException
*/
public void insertOrReplace(@NonNullable Document doc, @NonNullable FieldPath fieldAsKey) throws StoreException;
/**
* Inserts or replace a new document in this DocumentStore with the value of
* the specified Field as the {@code _id}.
* <br/><br/>
* If the document with the given _id exists in the DocumentStore then that
* document will be replaced by the specified document.
*
* @param doc The Document to be inserted or replaced in the DocumentStore.
* @param fieldAsKey document's field to be used as the key when an id is not
* passed in and the document doesn't have an "_id" field or
* a different field is desired to be used as _id.
* @throws StoreException
*/
public void insertOrReplace(@NonNullable Document doc, @NonNullable String fieldAsKey) throws StoreException;
/**
* Inserts all documents from the specified DocumentStream into this DocumentStore.
* <br/><br/>
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error has
* occurred while storing the documents. Each document read from the DocumentStream
* must have a field "_id"; otherwise, the operation will fail.
*
* If there is an error in reading from the stream or in writing to the DocumentStore
* then a MultiOpException will be thrown containing the list of documents that
* failed to be stored in the DocumentStore. Reading from a stream stops on the
* first read error. If only write errors occur, the iterator will stop and the
* rest of the documents will remain un-consumed in the DocumentStream.
*
* @param stream The DocumentStream to read the documents from.
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors.
*/
public void insertOrReplace(@NonNullable DocumentStream stream) throws MultiOpException;
/**
* Inserts all documents from the specified DocumentStream into this DocumentStore
* using the field specified by parameter {@code fieldAsKey} as the "_id" field.
* If an "_id" field is present in the documents, an exception will be thrown.
* <br/><br/>
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error has
* occurred while storing the documents. Each document read from the DocumentStream
* must have a field "_id"; otherwise, the operation will fail.
*
* If there is an error in reading from the stream or in writing to the DocumentStore
* then a MultiOpException will be thrown containing the list of documents that
* failed to be stored in the DocumentStore. Reading from a stream stops on the
* first read error. If only write errors occur, the iterator will stop and the
* rest of the documents will remain un-consumed in the DocumentStream.
*
* @param stream The DocumentStream to read the documents from.
* @param fieldAsKey field from each document whose value is to be used as
* the document key for insertion
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors.
*/
public void insertOrReplace(@NonNullable DocumentStream stream, @NonNullable FieldPath fieldAsKey)
throws MultiOpException;
/**
* Inserts all documents from the specified DocumentStream into this DocumentStore
* using the field specified by parameter {@code fieldAsKey} as the "_id" field.
* If an "_id" field is present in the documents, an exception will be thrown.
* <br/><br/>
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error has
* occurred while storing the documents. Each document read from the DocumentStream
* must have a field "_id"; otherwise, the operation will fail.
*
* If there is an error in reading from the stream or in writing to the DocumentStore
* then a MultiOpException will be thrown containing the list of documents that
* failed to be stored in the DocumentStore. Reading from a stream stops on the
* first read error. If only write errors occur, the iterator will stop and the
* rest of the documents will remain un-consumed in the DocumentStream.
*
* @param stream The DocumentStream to read the documents from.
* @param fieldAsKey field from each document whose value is to be used as
* the document key for insertion
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors.
*/
public void insertOrReplace(@NonNullable DocumentStream stream, @NonNullable String fieldAsKey)
throws MultiOpException;
/**
* Applies a mutation on the document identified by the document id.<br/><br/>
* All updates specified by the mutation object should be applied atomically,
* and consistently meaning either all of the updates in mutation are applied
* or none of them is applied and a partial update should not be visible to an
* observer.
*
* @param _id document id
* @param m a mutation object specifying the mutation operations on the document
* @throws StoreException
*/
public void update(@NonNullable String _id, @NonNullable DocumentMutation mutation) throws StoreException;
/**
* Applies a mutation on the document identified by the document id.<br/><br/>
* All updates specified by the mutation object should be applied atomically,
* and consistently meaning either all of the updates in mutation are applied
* or none of them is applied and a partial update should not be visible to an
* observer.
*
* @param _id document id
* @param m a mutation object specifying the mutation operations on the document
* @throws StoreException
*/
public void update(@NonNullable Value _id, @NonNullable DocumentMutation mutation) throws StoreException;
/**
* Deletes a document with the given id. This operation is successful even
* when the document with the given id doesn't exist.
*
* If the parameter {@code fieldAsKey} is provided, its value will be used as
* the "_id" to delete the document.
*
* @param _id document id
* @param doc JSON document to be deleted
* @param fieldAsKey document's field to be used as the key when an id is not
* passed in and a document doesn't have an "_id" field
* @throws StoreException
*/
public void delete(@NonNullable String _id) throws StoreException;
public void delete(@NonNullable Value _id) throws StoreException;
public void delete(@NonNullable Document doc) throws StoreException;
public void delete(@NonNullable Document doc, @NonNullable FieldPath fieldAsKey) throws StoreException;
public void delete(@NonNullable Document doc, @NonNullable String fieldAsKey) throws StoreException;
/**
* Deletes a set of documents from the DocumentStore represented by the DocumentStream.
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error occurs while
* writing the documents. Each document read from the DocumentStream must have a
* field "_id" of type BINARY or UTF8-string; otherwise the operation will fail.
*
* If there is an error in reading from the stream or in writing to
* the DocumentStore then a MultiOpException will be thrown that contains a list of
* documents that failed to write to the DocumentStore. Reading from a stream stops on
* the first read error. If only write errors occur, the iterator will stop
* and the current list of failed document is returned in a multi op exception.
* The untouched documents will remain in the DocumentStream.
*
* @param stream DocumentStream
* @param fieldAsKey a field from each document whose value is to be used as
* the document key for deletion
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors
*/
public void delete(@NonNullable DocumentStream stream) throws MultiOpException;
public void delete(@NonNullable DocumentStream stream, @NonNullable FieldPath fieldAsKey)
throws MultiOpException;
public void delete(@NonNullable DocumentStream stream, @NonNullable String fieldAsKey)
throws MultiOpException;
/**
* Inserts a document with the given id. This operation is successful only
* when the document with the given id doesn't exist.
*
* "fieldAsKey", when provided, will also be stored as the "_id" field in the
* written document for the document. If "_id" already existed in the document, then
* an error will be thrown. When reading the document back from the DB, the
* key will be returned back as usual as the "_id" field.
*
* Note that an insertOrReplace() operation would be more efficient than an
* insert() call.
*
* @param doc JSON document as the new value for the given document
* @param _id to be used as the key for the document
* @param fieldAsKey document's field to be used as the key when the id is not
* passed in and document doesn't have an "_id" field
* @throws TableNotFoundException when a DocumentStore does not exist to add this document
* @throws ReadOnlyException when a DocumentStore is not accepting writes
* @throws OpNotPermittedException when the server returned EPERM
* @throws DocumentExistsException when a document with id already exists in DocumentStore
*/
public void insert(@NonNullable String _id, @NonNullable Document doc) throws StoreException;
public void insert(@NonNullable Value _id, @NonNullable Document doc) throws StoreException;
public void insert(@NonNullable Document doc) throws StoreException;
public void insert(@NonNullable Document doc, @NonNullable FieldPath fieldAsKey) throws StoreException;
public void insert(@NonNullable Document doc, @NonNullable String fieldAsKey) throws StoreException;
/**
* Inserts a set of documents represented by the DocumentStream into the DocumentStore.
* This is a synchronous API that won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error occurs while
* writing the documents. Each document read from the DocumentStream must have a
* field "_id" of type BINARY or UTF8-string; otherwise, the operation will
* fail or it will be of the Document type.
*
* If a document with the given key exists on the server then it throws a document
* exists exception, similar to the non-DocumentStream based insert() API.
*
* If there is an error in reading from the stream or in writing to
* the DocumentStore then a MultiOpException will be thrown that contains a list of
* documents that failed to write to the DocumentStore. Reading from a stream stops on
* the first read error. If only write errors occur, the iterator will stop
* and the current list of failed document is returned in a multi op exception.
* The untouched documents will remain in the DocumentStream.
*
* @param stream DocumentStream
* @param fieldAsKey a field from each document whose value is to be used as
* the document key for deletion
*
* @throws MultiOpException which has a list of write-failed documents and
* their errors
*/
public void insert(@NonNullable DocumentStream stream) throws MultiOpException;
public void insert(@NonNullable DocumentStream stream, @NonNullable FieldPath fieldAsKey)
throws MultiOpException;
public void insert(@NonNullable DocumentStream stream, @NonNullable String fieldAsKey)
throws MultiOpException;
/**
* Replaces a document in the DocumentStore. The document id is either explicitly specified
* as parameter "id" or it is implicitly specified as the field "_id" in the
* passed document. If the document id is explicitly passed then the document should
* not contain "_id" field or its value should be the same as the explicitly
* specified id; otherwise, the operation will fail.
*
* If the document with the given key does not exist on the server then it will
* throw DocumentNotFoundException.
*
* "fieldAsKey", when provided, will also be stored as the "_id" field in the
* written document for the document. If "_id" already existed in the document, then
* an error will be thrown. When reading the document back from the DB, the
* key would be returned back as usual as "_id" field.
*
* Note that an insertOrReplace() operation would be more efficient than an
* replace() call.
* @param doc JSON document as the new value for the given document
* @param _id to be used as the key for the document
* @param fieldAsKey document's field to be used as the key when an id is not
* passed in and document doesn't have an "_id" field
* @throws TableNotFoundException when a DocumentStore does not exist to which to add this document
* @throws ReadOnlyException when a DocumentStore is not accepting writes
* @throws OpNotPermittedException when the server returns EPERM
* @throws DocumentNotFoundException when a document with the id does not exist in DocumentStore
*/
public void replace(@NonNullable String _id, @NonNullable Document doc) throws StoreException;
public void replace(@NonNullable Value _id, @NonNullable Document doc) throws StoreException;
public void replace(@NonNullable Document doc) throws StoreException;
public void replace(@NonNullable Document doc, @NonNullable FieldPath fieldAsKey) throws StoreException;
public void replace(@NonNullable Document doc, @NonNullable String fieldAsKey) throws StoreException;
/**
* Replaces a set of documents represented by the DocumentStream into the DocumentStore.
* This is a synchronous API and it won't return until all the documents
* in the DocumentStream are written to the DocumentStore or some error occurs while
* writing the documents. Each document read from the DocumentStream must have a
* field "_id" of type BINARY or UTF8-string; otherwise, the operation will
* fail or it will be of Document type.
*
* If the document with the given key does not exist on the server then it throws,
* a document not exists exception, similar to the non-DocumentStream based
* replace() API.
*
* If there is an error in reading from the stream or in writing to
* the DocumentStore then a MultiOpException will be thrown that contains a list of
* documents that failed to write to the DocumentStore. Reading from a stream stops on
* the first read error. If only write errors occur, the iterator will stop
* and the current list of failed document is returned in a multi op exception.
* The untouched documents will remain in the DocumentStream.
*
* @param stream A DocumentStream to read the documents from
* @param fieldAsKey field from each document whose value is to be used as
* the document key for deletion
*
* @throws MultiOpException which has list of write-failed documents and
* their errors
*/
public void replace(@NonNullable DocumentStream stream) throws MultiOpException;
public void replace(@NonNullable DocumentStream stream, @NonNullable FieldPath fieldAsKey)
throws MultiOpException;
public void replace(@NonNullable DocumentStream stream, @NonNullable String fieldAsKey)
throws MultiOpException;
/**
* Atomically applies an increment to a given field (in dot separated notation)
* of the given document id. If the field doesn't exist on the server
* then it will be created with the type of the incremental value.
* The increment operation can be applied on any of the numeric
* types, such as byte, short, int, long, float, double, or decimal,
* of a field. The operation will fail if the increment is applied to a
* field that is of a non-numeric type.
*
* If an id doesn't exist, it gets created (similar to the insertOrReplace
* behavior). And it is created, with the value of 'inc' parameter. The same
* logic applies to intermittent paths in the path: they get created top to
* bottom.
*
* If the type is different than the field in the operation, it fails.
*
* The increment operation won't change the type of existing value stored in
* the given field for the document. The resultant value of the field will be
* truncated based on the original type of the field.
*
* For example, if a field 'score' is of type int and contains 60 and an
* increment of double '5.675' is applied, then the resultant value of the
* field will be 65 (65.675 will be truncated to 65).
*
* If the type to which the increment is applied is a byte, short, or int,
* then it needs to use long as the operation.
*
* @param _id document id
* @param field the field name in dot separated notation
* @param inc increment to apply to a field. Can be positive or negative
* @throws StoreException
*/
public void increment(@NonNullable String _id,
@NonNullable String field, byte inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, short inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, int inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, long inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, float inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, double inc) throws StoreException;
public void increment(@NonNullable String _id,
@NonNullable String field, @NonNullable BigDecimal inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, byte inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, short inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, int inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, long inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, float inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, double inc) throws StoreException;
public void increment(@NonNullable Value _id,
@NonNullable String field, @NonNullable BigDecimal inc) throws StoreException;
/**
* Atomically evaluates the condition on a given document and if the
* condition holds true for the document then a mutation is applied on the document.
*
* If an id doesn't exist, the function returns false (no exception is thrown).
* If the mutation operation fails, it throws exception.
*
* @param _id document id
* @param condition the condition to evaluate on the document
* @param m mutation to apply on the document
* @return True if the condition is true for the document; otherwise, false
* @throws StoreException if the condition passes but the mutate fails
*/
public boolean checkAndMutate(@NonNullable String _id,
@NonNullable QueryCondition condition, @NonNullable DocumentMutation mutation) throws StoreException;
public boolean checkAndMutate(@NonNullable Value _id, @NonNullable QueryCondition condition,
@NonNullable DocumentMutation mutation) throws StoreException;
/**
* Atomically evaluates the condition on given document and if the
* condition holds true for the document then it is atomically deleted.
*
* If id doesnt exist, returns false (no exception is thrown).
* If deletion operation fails, it throws exception.
*
* @param _id document id
* @param condition condition to evaluate on the document
* @return True if the condition is valid for the document, otherwise false.
* @throws StoreException if the condition passes but the delete fails
*/
public boolean checkAndDelete(@NonNullable String _id, @NonNullable
QueryCondition condition) throws StoreException;
public boolean checkAndDelete(@NonNullable Value _id, @NonNullable
QueryCondition condition) throws StoreException;
/**
* Atomically evaluates the condition on the given document and if the
* condition holds true for the document then it atomically replaces the document
* with the given document.
*
* If the id doesn't exist, the function returns false (no exception is thrown).
* If the replace operation fails, it throws an exception.
*
* @param _id document id
* @param condition the condition to evaluate on the document
* @param doc document to replace
* @return True if the condition is true for the document otherwise false
* @throws StoreException if the condition passes but the replace fails
*/
public boolean checkAndReplace(@NonNullable String _id,
@NonNullable QueryCondition condition, @NonNullable Document doc) throws StoreException;
public boolean checkAndReplace(@NonNullable Value _id,
@NonNullable QueryCondition condition, @NonNullable Document doc) throws StoreException;
/**
* Override {@link AutoCloseable#close()} to avoid declaring a checked exception.
*/
void close() throws StoreException;
}
| Updated java-doc for DocumentStore.clearCommitContext()
| core/src/main/java/org/ojai/store/DocumentStore.java | Updated java-doc for DocumentStore.clearCommitContext() |
|
Java | apache-2.0 | c5fbfd97c4b7bd19ad75a88161b09d00d23b4b87 | 0 | cjduffett/synthea,synthetichealth/synthea,synthetichealth/synthea,cjduffett/synthea,synthetichealth/synthea | package org.mitre.synthea.world.agents;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.gson.internal.LinkedTreeMap;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Point;
public class Provider {
public static final String AMBULATORY = "ambulatory";
public static final String INPATIENT = "inpatient";
public static final String EMERGENCY = "emergency";
public static final String ENCOUNTERS = "encounters";
public static final String PROCEDURES = "procedures";
public static final String LABS = "labs";
public static final String PRESCRIPTIONS = "prescriptions";
// ArrayList of all providers imported
private static ArrayList<Provider> providerList = new ArrayList<Provider>();
// Hash of services to Providers that provide them
private static HashMap<String, ArrayList<Provider>> services = new HashMap<String, ArrayList<Provider>>();
public Map<String,Object> attributes;
private Point coordinates;
private ArrayList<String> services_provided;
private Table<Integer, String, AtomicInteger> utilization; // row: year, column: type, value: count
protected Provider()
{
// no-arg constructor provided for subclasses
attributes = new LinkedTreeMap<>();
utilization = HashBasedTable.create();
services_provided = new ArrayList<String>();
}
public Provider(LinkedTreeMap p) {
this();
attributes = (LinkedTreeMap) p.get("properties");
String resourceID = (String) p.get("resourceID");
attributes.put("resourceID", resourceID);
ArrayList<Double> coorList = (ArrayList<Double>) p.get("coordinates");
Point coor = new GeometryFactory().createPoint(new Coordinate(coorList.get(0), coorList.get(1)));
coordinates = coor;
String[] servicesList = ( (String) attributes.get("services_provided") ).split(" ");
for(String s : servicesList){
services_provided.add(s);
// add provider to hash of services
if (services.containsKey(s)){
ArrayList<Provider> l = services.get(s);
l.add(this);
} else{
ArrayList<Provider> l = new ArrayList<Provider>();
l.add(this);
services.put(s, l);
}
}
}
public static void clear(){
providerList.clear();
services.clear();
}
public String getResourceID(){
return attributes.get("resourceID").toString();
}
public Map<String,Object> getAttributes(){
return attributes;
}
public Point getCoordinates(){
return coordinates;
}
public boolean hasService(String service){
return services_provided.contains(service);
}
public void incrementEncounters(String encounterType, int year)
{
increment(year, ENCOUNTERS);
increment(year, ENCOUNTERS + "-" + encounterType);
}
public void incrementProcedures(int year)
{
increment(year, PROCEDURES);
}
// TODO: increment labs when there are reports
public void incrementLabs(int year)
{
increment(year, LABS);
}
public void incrementPrescriptions(int year)
{
increment(year, PRESCRIPTIONS);
}
private synchronized void increment(Integer year, String key)
{
if (!utilization.contains(year, key))
{
utilization.put(year, key, new AtomicInteger(0));
}
utilization.get(year, key).incrementAndGet();
}
public Table<Integer, String, AtomicInteger> getUtilization(){
return utilization;
}
public Integer getBedCount(){
if(attributes.containsKey("bed_count")){
return Integer.parseInt(attributes.get("bed_count").toString());
} else {
return null;
}
}
public static Provider findClosestService(Person person, String service){
if( service.equals("outpatient") || service.equals("wellness")){
service = AMBULATORY;
}
switch(service) {
case AMBULATORY :
if( person.getAmbulatoryProvider() == null ){
person.setAmbulatoryProvider();
}
return person.getAmbulatoryProvider();
case INPATIENT :
if( person.getInpatientProvider() == null ){
person.setInpatientProvider();
}
return person.getInpatientProvider();
case EMERGENCY :
if( person.getEmergencyProvider() == null ){
person.setEmergencyProvider();
}
return person.getEmergencyProvider();
}
// if service is null or not supported by simulation, patient goes to ambulatory hospital
return person.getAmbulatoryProvider();
}
public static HashMap<String, ArrayList<Provider>> getServices(){
return services;
}
} | src/main/java/org/mitre/synthea/world/agents/Provider.java | package org.mitre.synthea.world.agents;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.gson.internal.LinkedTreeMap;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Point;
public class Provider {
public static final String AMBULATORY = "ambulatory";
public static final String INPATIENT = "inpatient";
public static final String EMERGENCY = "emergency";
public static final String ENCOUNTERS = "encounters";
public static final String PROCEDURES = "procedures";
public static final String LABS = "labs";
public static final String PRESCRIPTIONS = "prescriptions";
// ArrayList of all providers imported
private static ArrayList<Provider> providerList = new ArrayList<Provider>();
// Hash of services to Providers that provide them
private static HashMap<String, ArrayList<Provider>> services = new HashMap<String, ArrayList<Provider>>();
public Map<String,Object> attributes;
private Point coordinates;
private ArrayList<String> services_provided;
private Table<Integer, String, AtomicInteger> utilization; // row: year, column: type, value: count
protected Provider()
{
// no-arg constructor provided for subclasses
attributes = new LinkedTreeMap<>();
utilization = HashBasedTable.create();
services_provided = new ArrayList<String>();
}
public Provider(LinkedTreeMap p) {
this();
attributes = (LinkedTreeMap) p.get("properties");
String resourceID = (String) p.get("resourceID");
attributes.put("resourceID", resourceID);
ArrayList<Double> coorList = (ArrayList<Double>) p.get("coordinates");
Point coor = new GeometryFactory().createPoint(new Coordinate(coorList.get(0), coorList.get(1)));
coordinates = coor;
String[] servicesList = ( (String) attributes.get("services_provided") ).split(" ");
for(String s : servicesList){
services_provided.add(s);
// add provider to hash of services
if (services.containsKey(s)){
ArrayList<Provider> l = services.get(s);
l.add(this);
} else{
ArrayList<Provider> l = new ArrayList<Provider>();
l.add(this);
services.put(s, l);
}
}
}
public static void clear(){
providerList.clear();
services.clear();
}
public String getResourceID(){
return attributes.get("resourceID").toString();
}
public Map<String,Object> getAttributes(){
return attributes;
}
public Point getCoordinates(){
return coordinates;
}
public boolean hasService(String service){
return services_provided.contains(service);
}
public void incrementEncounters(String encounterType, int year)
{
increment(year, ENCOUNTERS);
increment(year, ENCOUNTERS + "-" + encounterType);
}
public void incrementProcedures(int year)
{
increment(year, PROCEDURES);
}
// TODO: increment labs when there are reports
public void incrementLabs(int year)
{
increment(year, LABS);
}
public void incrementPrescriptions(int year)
{
increment(year, PRESCRIPTIONS);
}
private void increment(Integer year, String key)
{
if (!utilization.contains(year, key))
{
utilization.put(year, key, new AtomicInteger(0));
}
utilization.get(year, key).incrementAndGet();
}
public Table<Integer, String, AtomicInteger> getUtilization(){
return utilization;
}
public Integer getBedCount(){
if(attributes.containsKey("bed_count")){
return Integer.parseInt(attributes.get("bed_count").toString());
} else {
return null;
}
}
public static Provider findClosestService(Person person, String service){
if( service.equals("outpatient") || service.equals("wellness")){
service = AMBULATORY;
}
switch(service) {
case AMBULATORY :
if( person.getAmbulatoryProvider() == null ){
person.setAmbulatoryProvider();
}
return person.getAmbulatoryProvider();
case INPATIENT :
if( person.getInpatientProvider() == null ){
person.setInpatientProvider();
}
return person.getInpatientProvider();
case EMERGENCY :
if( person.getEmergencyProvider() == null ){
person.setEmergencyProvider();
}
return person.getEmergencyProvider();
}
// if service is null or not supported by simulation, patient goes to ambulatory hospital
return person.getAmbulatoryProvider();
}
public static HashMap<String, ArrayList<Provider>> getServices(){
return services;
}
} | Fix transient null pointer exception.
| src/main/java/org/mitre/synthea/world/agents/Provider.java | Fix transient null pointer exception. |
|
Java | apache-2.0 | cd72cfedae1087053ff19fd0b0b06b89869e832f | 0 | KernelHaven/KernelHaven,KernelHaven/KernelHaven | package net.ssehub.kernel_haven.analysis;
import java.io.File;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import net.ssehub.kernel_haven.SetUpException;
import net.ssehub.kernel_haven.build_model.BuildModel;
import net.ssehub.kernel_haven.code_model.SourceFile;
import net.ssehub.kernel_haven.config.Configuration;
import net.ssehub.kernel_haven.config.DefaultSettings;
import net.ssehub.kernel_haven.provider.AbstractProvider;
import net.ssehub.kernel_haven.util.ExtractorException;
import net.ssehub.kernel_haven.util.Timestamp;
import net.ssehub.kernel_haven.util.io.ITableCollection;
import net.ssehub.kernel_haven.util.io.ITableWriter;
import net.ssehub.kernel_haven.util.io.TableCollectionWriterFactory;
import net.ssehub.kernel_haven.util.io.csv.CsvFileCollection;
import net.ssehub.kernel_haven.util.null_checks.NonNull;
import net.ssehub.kernel_haven.variability_model.VariabilityModel;
/**
* An analysis that is a pipeline consisting of {@link AnalysisComponent}s.
*
* @author Adam
*/
public abstract class PipelineAnalysis extends AbstractAnalysis {
private static PipelineAnalysis instance;
private ITableCollection resultCollection;
private ExtractorDataDuplicator<VariabilityModel> vmStarter;
private ExtractorDataDuplicator<BuildModel> bmStarter;
private ExtractorDataDuplicator<SourceFile> cmStarter;
/**
* Creates a new {@link PipelineAnalysis}.
*
* @param config The global configuration.
*/
public PipelineAnalysis(@NonNull Configuration config) {
super(config);
}
/**
* The {@link PipelineAnalysis} that is the current main analysis in this execution. May be null if no
* {@link PipelineAnalysis} is the main analysis component.
*
* @return The current {@link PipelineAnalysis} instance.
*/
static PipelineAnalysis getInstance() {
return instance;
}
/**
* Returns the {@link AnalysisComponent} that provides the variability model from the extractors.
*
* @return The {@link AnalysisComponent} that provides the variability model.
*/
protected @NonNull AnalysisComponent<VariabilityModel> getVmComponent() {
return vmStarter.createNewStartingComponent(config);
}
/**
* Returns the {@link AnalysisComponent} that provides the build model from the extractors.
*
* @return The {@link AnalysisComponent} that provides the build model.
*/
protected @NonNull AnalysisComponent<BuildModel> getBmComponent() {
return bmStarter.createNewStartingComponent(config);
}
/**
* Returns the {@link AnalysisComponent} that provides the code model from the extractors.
*
* @return The {@link AnalysisComponent} that provides the code model.
*/
protected @NonNull AnalysisComponent<SourceFile> getCmComponent() {
return cmStarter.createNewStartingComponent(config);
}
/**
* The collection that {@link AnalysisComponent}s should write their intermediate output to.
*
* @return The {@link ITableCollection} to write output to.
*/
ITableCollection getResultCollection() {
return resultCollection;
}
/**
* Creates the result collection from the user settings.
*
* @return The result collection to store files in.
*
* @throws SetUpException If creating the result collection fails.
*/
private ITableCollection createResultCollection() throws SetUpException {
String outputSuffix = config.getValue(DefaultSettings.ANALYSIS_RESULT);
File outputFile = new File(getOutputDir(), Timestamp.INSTANCE.getFilename("Analysis", outputSuffix));
try {
return TableCollectionWriterFactory.INSTANCE.createCollection(outputFile);
} catch (IOException e) {
throw new SetUpException("Can't create output for suffix " + outputSuffix, e);
}
}
/**
* Creates the pipeline.
*
* @return The "main" (i.e. the last) component of the pipeline.
*
* @throws SetUpException If setting up the pipeline fails.
*/
protected abstract @NonNull AnalysisComponent<?> createPipeline() throws SetUpException;
@Override
public void run() {
Thread.currentThread().setName("AnalysisPipelineController");
try {
vmStarter = new ExtractorDataDuplicator<>(vmProvider, false);
bmStarter = new ExtractorDataDuplicator<>(bmProvider, false);
cmStarter = new ExtractorDataDuplicator<>(cmProvider, true);
try {
resultCollection = createResultCollection();
} catch (SetUpException e) {
LOGGER.logException("Couldn't create output collection based on user configuration; "
+ "falling back to CSV", e);
resultCollection = new CsvFileCollection(new File(getOutputDir(),
"Analysis_" + Timestamp.INSTANCE.getFileTimestamp()));
}
instance = this;
AnalysisComponent<?> mainComponent = createPipeline();
if (config.getValue(DefaultSettings.ANALYSIS_PIPELINE_START_EXTRACTORS)) {
// start all extractors; this is needed here because the analysis components will most likely poll them
// in order, which means that the extractors would not run in parallel
vmStarter.start();
bmStarter.start();
cmStarter.start();
}
if (mainComponent instanceof JoinComponent) {
joinSpliComponent((JoinComponent) mainComponent);
} else {
pollAndWriteOutput(mainComponent);
}
LOGGER.logDebug("Analysis components done");
try {
LOGGER.logDebug("Closing result collection");
resultCollection.close();
for (File file : resultCollection.getFiles()) {
addOutputFile(file);
}
} catch (IOException e) {
LOGGER.logException("Exception while closing output file", e);
}
} catch (SetUpException e) {
LOGGER.logException("Exception while setting up", e);
}
}
/**
* Part of {@link #run()} to handle {@link JoinComponent}s.
* @param mainComponent The an analysis, which is joining results of multiple other components.
*/
private void joinSpliComponent(JoinComponent mainComponent) {
int maxThreads = config.getValue(DefaultSettings.ANALYSIS_SPLITCOMPONENT_MAX_THREADS);
ThreadRenamer thReanmer = new ThreadRenamer(mainComponent.getResultName());
// List<Thread> threads = new LinkedList<>();
ThreadPoolExecutor thPool = (ThreadPoolExecutor)
((maxThreads > 0) ? Executors.newFixedThreadPool(maxThreads) : Executors.newCachedThreadPool());
int totalNoOfThreads = 0;
AtomicInteger nThreadsProcessed = new AtomicInteger(0);
for (AnalysisComponent<?> component : ((JoinComponent) mainComponent).getInputs()) {
totalNoOfThreads++;
NamedRunnable run = new NamedRunnable() {
@Override
public void run() {
thReanmer.rename();
pollAndWriteOutput(component);
nThreadsProcessed.incrementAndGet();
}
@Override
public String getName() {
return component.getResultName();
}
};
thPool.execute(run);
}
LOGGER.logInfo2("Joining ", totalNoOfThreads, " analysis components; ", thPool.getActiveCount(),
" components already started");
thPool.shutdown();
final int submittedThreads = totalNoOfThreads;
Runnable monitor = () -> {
while (!thPool.isTerminated()) {
LOGGER.logInfo("Joining components:",
"Total: " + submittedThreads,
"Finished: " + nThreadsProcessed.get(),
"Processing: " + thPool.getActiveCount());
try {
Thread.sleep(3 * 60 * 1000);
} catch (InterruptedException exc) {
LOGGER.logException("", exc);
}
}
};
Thread th = new Thread(monitor, getClass().getSimpleName());
th.start();
try {
thPool.awaitTermination(96L, TimeUnit.HOURS);
} catch (InterruptedException e) {
LOGGER.logException("", e);
}
LOGGER.logInfo2("All analysis components joined.");
// for (AnalysisComponent<?> component : ((JoinComponent) mainComponent).getInputs()) {
// Thread th = new Thread(() -> {
// pollAndWriteOutput(component);
// }, "AnalysisPipelineControllerOutputThread");
// threads.add(th);
// th.start();
// }
// for (Thread th : threads) {
// try {
// th.join();
// } catch (InterruptedException e) {
// }
// }
}
/**
* Polls all output from the given component and writes it to the output file.
*
* @param component The component to read the output from.
*/
private void pollAndWriteOutput(@NonNull AnalysisComponent<?> component) {
LOGGER.logDebug("Starting and polling output of analysis component (" + component.getClass().getSimpleName()
+ ")...");
try (ITableWriter writer = resultCollection.getWriter(component.getResultName())) {
Object result;
while ((result = component.getNextResult()) != null) {
LOGGER.logDebug("Got analysis result: " + result.toString());
writer.writeObject(result);
}
} catch (IOException e) {
LOGGER.logException("Exception while writing output file", e);
}
}
/**
* A class for duplicating the extractor data. This way, multiple analysis components can have the same models
* as their input data.
*
* @param <T> The type of model to duplicate.
*/
private static class ExtractorDataDuplicator<T> implements Runnable {
private @NonNull AbstractProvider<T> provider;
private boolean multiple;
private @NonNull List<@NonNull StartingComponent<T>> startingComponents;
private boolean started;
/**
* Creates a new ExtractorDataDuplicator.
*
* @param provider The provider to get the data from.
* @param multiple Whether the provider should be polled multiple times or just once.
*/
public ExtractorDataDuplicator(@NonNull AbstractProvider<T> provider, boolean multiple) {
this.provider = provider;
this.multiple = multiple;
startingComponents = new LinkedList<>();
}
/**
* Creates a new starting component that will get its own copy of the data from us.
*
* @param config The configuration to create the component with.
*
* @return The starting component that can be used as input data for other analysis components.
*/
public @NonNull StartingComponent<T> createNewStartingComponent(@NonNull Configuration config) {
StartingComponent<T> component = new StartingComponent<>(config, this);
startingComponents.add(component);
return component;
}
/**
* Adds the given data element to all starting components.
*
* @param data The data to add.
*/
private void addToAllComponents(@NonNull T data) {
for (StartingComponent<T> component : startingComponents) {
component.addResult(data);
}
}
/**
* Starts a new thread that copies the extractor data to all stating components created up until now.
* This method ensures that this thread is only started once, no matter how often this method is called.
*/
public void start() {
synchronized (this) {
if (!started) {
new Thread(this, "ExtractorDataDuplicator").start();
started = true;
}
}
}
@Override
public void run() {
if (multiple) {
T data;
while ((data = provider.getNextResult()) != null) {
addToAllComponents(data);
}
ExtractorException exc;
while ((exc = provider.getNextException()) != null) {
LOGGER.logException("Got extractor exception", exc);
}
} else {
T data = provider.getResult();
if (data != null) {
addToAllComponents(data);
}
ExtractorException exc = provider.getException();
if (exc != null) {
LOGGER.logException("Got extractor exception", exc);
}
}
for (StartingComponent<T> component : startingComponents) {
synchronized (component) {
component.done = true;
component.notifyAll();
}
}
}
}
/**
* A starting component for the analysis pipeline. This is used to pass the extractor data to the analysis
* components. This class does nothing; it is only used by {@link ExtractorDataDuplicator}.
*
* @param <T> The type of result data that this produces.
*/
private static class StartingComponent<T> extends AnalysisComponent<T> {
private boolean done = false;
private @NonNull ExtractorDataDuplicator<T> duplicator;
/**
* Creates a new starting component.
*
* @param config The global configuration.
* @param duplicator The {@link ExtractorDataDuplicator} to start when this component is started
* (start on demand).
*/
public StartingComponent(@NonNull Configuration config, @NonNull ExtractorDataDuplicator<T> duplicator) {
super(config);
this.duplicator = duplicator;
}
@Override
protected void execute() {
duplicator.start();
// wait until the duplicator tells us that we are done
synchronized (this) {
while (!done) {
try {
wait();
} catch (InterruptedException e) {
}
}
}
}
@Override
public String getResultName() {
return "StartingComponent";
}
@Override
boolean isInternalHelperComponent() {
return true;
}
}
}
| src/net/ssehub/kernel_haven/analysis/PipelineAnalysis.java | package net.ssehub.kernel_haven.analysis;
import java.io.File;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import net.ssehub.kernel_haven.SetUpException;
import net.ssehub.kernel_haven.build_model.BuildModel;
import net.ssehub.kernel_haven.code_model.SourceFile;
import net.ssehub.kernel_haven.config.Configuration;
import net.ssehub.kernel_haven.config.DefaultSettings;
import net.ssehub.kernel_haven.provider.AbstractProvider;
import net.ssehub.kernel_haven.util.ExtractorException;
import net.ssehub.kernel_haven.util.Timestamp;
import net.ssehub.kernel_haven.util.io.ITableCollection;
import net.ssehub.kernel_haven.util.io.ITableWriter;
import net.ssehub.kernel_haven.util.io.TableCollectionWriterFactory;
import net.ssehub.kernel_haven.util.io.csv.CsvFileCollection;
import net.ssehub.kernel_haven.util.null_checks.NonNull;
import net.ssehub.kernel_haven.variability_model.VariabilityModel;
/**
* An analysis that is a pipeline consisting of {@link AnalysisComponent}s.
*
* @author Adam
*/
public abstract class PipelineAnalysis extends AbstractAnalysis {
private static PipelineAnalysis instance;
private ITableCollection resultCollection;
private ExtractorDataDuplicator<VariabilityModel> vmStarter;
private ExtractorDataDuplicator<BuildModel> bmStarter;
private ExtractorDataDuplicator<SourceFile> cmStarter;
/**
* Creates a new {@link PipelineAnalysis}.
*
* @param config The global configuration.
*/
public PipelineAnalysis(@NonNull Configuration config) {
super(config);
}
/**
* The {@link PipelineAnalysis} that is the current main analysis in this execution. May be null if no
* {@link PipelineAnalysis} is the main analysis component.
*
* @return The current {@link PipelineAnalysis} instance.
*/
static PipelineAnalysis getInstance() {
return instance;
}
/**
* Returns the {@link AnalysisComponent} that provides the variability model from the extractors.
*
* @return The {@link AnalysisComponent} that provides the variability model.
*/
protected @NonNull AnalysisComponent<VariabilityModel> getVmComponent() {
return vmStarter.createNewStartingComponent(config);
}
/**
* Returns the {@link AnalysisComponent} that provides the build model from the extractors.
*
* @return The {@link AnalysisComponent} that provides the build model.
*/
protected @NonNull AnalysisComponent<BuildModel> getBmComponent() {
return bmStarter.createNewStartingComponent(config);
}
/**
* Returns the {@link AnalysisComponent} that provides the code model from the extractors.
*
* @return The {@link AnalysisComponent} that provides the code model.
*/
protected @NonNull AnalysisComponent<SourceFile> getCmComponent() {
return cmStarter.createNewStartingComponent(config);
}
/**
* The collection that {@link AnalysisComponent}s should write their intermediate output to.
*
* @return The {@link ITableCollection} to write output to.
*/
ITableCollection getResultCollection() {
return resultCollection;
}
/**
* Creates the result collection from the user settings.
*
* @return The result collection to store files in.
*
* @throws SetUpException If creating the result collection fails.
*/
private ITableCollection createResultCollection() throws SetUpException {
String outputSuffix = config.getValue(DefaultSettings.ANALYSIS_RESULT);
File outputFile = new File(getOutputDir(), Timestamp.INSTANCE.getFilename("Analysis", outputSuffix));
try {
return TableCollectionWriterFactory.INSTANCE.createCollection(outputFile);
} catch (IOException e) {
throw new SetUpException("Can't create output for suffix " + outputSuffix, e);
}
}
/**
* Creates the pipeline.
*
* @return The "main" (i.e. the last) component of the pipeline.
*
* @throws SetUpException If setting up the pipeline fails.
*/
protected abstract @NonNull AnalysisComponent<?> createPipeline() throws SetUpException;
@Override
public void run() {
Thread.currentThread().setName("AnalysisPipelineController");
try {
vmStarter = new ExtractorDataDuplicator<>(vmProvider, false);
bmStarter = new ExtractorDataDuplicator<>(bmProvider, false);
cmStarter = new ExtractorDataDuplicator<>(cmProvider, true);
try {
resultCollection = createResultCollection();
} catch (SetUpException e) {
LOGGER.logException("Couldn't create output collection based on user configuration; "
+ "falling back to CSV", e);
resultCollection = new CsvFileCollection(new File(getOutputDir(),
"Analysis_" + Timestamp.INSTANCE.getFileTimestamp()));
}
instance = this;
AnalysisComponent<?> mainComponent = createPipeline();
if (config.getValue(DefaultSettings.ANALYSIS_PIPELINE_START_EXTRACTORS)) {
// start all extractors; this is needed here because the analysis components will most likely poll them
// in order, which means that the extractors would not run in parallel
vmStarter.start();
bmStarter.start();
cmStarter.start();
}
if (mainComponent instanceof JoinComponent) {
joinSpliComponent((JoinComponent) mainComponent);
} else {
pollAndWriteOutput(mainComponent);
}
LOGGER.logDebug("Analysis components done");
try {
LOGGER.logDebug("Closing result collection");
resultCollection.close();
for (File file : resultCollection.getFiles()) {
addOutputFile(file);
}
} catch (IOException e) {
LOGGER.logException("Exception while closing output file", e);
}
} catch (SetUpException e) {
LOGGER.logException("Exception while setting up", e);
}
}
/**
* Part of {@link #run()} to handle {@link JoinComponent}s.
* @param mainComponent The an analysis, which is joining results of multiple other components.
*/
private void joinSpliComponent(JoinComponent mainComponent) {
int maxThreads = config.getValue(DefaultSettings.ANALYSIS_SPLITCOMPONENT_MAX_THREADS);
ThreadRenamer thReanmer = new ThreadRenamer(mainComponent.getResultName());
// List<Thread> threads = new LinkedList<>();
ThreadPoolExecutor thPool = (ThreadPoolExecutor)
((maxThreads > 0) ? Executors.newFixedThreadPool(maxThreads) : Executors.newCachedThreadPool());
int totalNoOfThreads = 0;
for (AnalysisComponent<?> component : ((JoinComponent) mainComponent).getInputs()) {
totalNoOfThreads++;
NamedRunnable run = new NamedRunnable() {
@Override
public void run() {
thReanmer.rename();
pollAndWriteOutput(component);
}
@Override
public String getName() {
return component.getResultName();
}
};
thPool.execute(run);
}
LOGGER.logInfo2("Joining ", totalNoOfThreads, " analysis components; ", thPool.getActiveCount(),
" components already started");
thPool.shutdown();
Runnable monitor = () -> {
while (!thPool.isTerminated()) {
LOGGER.logInfo2("Currently there are ", thPool.getActiveCount(), " components in execution.");
try {
Thread.sleep(3 * 60 * 1000);
} catch (InterruptedException exc) {
LOGGER.logException("", exc);
}
}
};
Thread th = new Thread(monitor, getClass().getSimpleName());
th.start();
try {
thPool.awaitTermination(96L, TimeUnit.HOURS);
} catch (InterruptedException e) {
LOGGER.logException("", e);
}
LOGGER.logInfo2("All analysis components joined.");
// for (AnalysisComponent<?> component : ((JoinComponent) mainComponent).getInputs()) {
// Thread th = new Thread(() -> {
// pollAndWriteOutput(component);
// }, "AnalysisPipelineControllerOutputThread");
// threads.add(th);
// th.start();
// }
//
// for (Thread th : threads) {
// try {
// th.join();
// } catch (InterruptedException e) {
// }
// }
}
/**
* Polls all output from the given component and writes it to the output file.
*
* @param component The component to read the output from.
*/
private void pollAndWriteOutput(@NonNull AnalysisComponent<?> component) {
LOGGER.logDebug("Starting and polling output of analysis component (" + component.getClass().getSimpleName()
+ ")...");
try (ITableWriter writer = resultCollection.getWriter(component.getResultName())) {
Object result;
while ((result = component.getNextResult()) != null) {
LOGGER.logDebug("Got analysis result: " + result.toString());
writer.writeObject(result);
}
} catch (IOException e) {
LOGGER.logException("Exception while writing output file", e);
}
}
/**
* A class for duplicating the extractor data. This way, multiple analysis components can have the same models
* as their input data.
*
* @param <T> The type of model to duplicate.
*/
private static class ExtractorDataDuplicator<T> implements Runnable {
private @NonNull AbstractProvider<T> provider;
private boolean multiple;
private @NonNull List<@NonNull StartingComponent<T>> startingComponents;
private boolean started;
/**
* Creates a new ExtractorDataDuplicator.
*
* @param provider The provider to get the data from.
* @param multiple Whether the provider should be polled multiple times or just once.
*/
public ExtractorDataDuplicator(@NonNull AbstractProvider<T> provider, boolean multiple) {
this.provider = provider;
this.multiple = multiple;
startingComponents = new LinkedList<>();
}
/**
* Creates a new starting component that will get its own copy of the data from us.
*
* @param config The configuration to create the component with.
*
* @return The starting component that can be used as input data for other analysis components.
*/
public @NonNull StartingComponent<T> createNewStartingComponent(@NonNull Configuration config) {
StartingComponent<T> component = new StartingComponent<>(config, this);
startingComponents.add(component);
return component;
}
/**
* Adds the given data element to all starting components.
*
* @param data The data to add.
*/
private void addToAllComponents(@NonNull T data) {
for (StartingComponent<T> component : startingComponents) {
component.addResult(data);
}
}
/**
* Starts a new thread that copies the extractor data to all stating components created up until now.
* This method ensures that this thread is only started once, no matter how often this method is called.
*/
public void start() {
synchronized (this) {
if (!started) {
new Thread(this, "ExtractorDataDuplicator").start();
started = true;
}
}
}
@Override
public void run() {
if (multiple) {
T data;
while ((data = provider.getNextResult()) != null) {
addToAllComponents(data);
}
ExtractorException exc;
while ((exc = provider.getNextException()) != null) {
LOGGER.logException("Got extractor exception", exc);
}
} else {
T data = provider.getResult();
if (data != null) {
addToAllComponents(data);
}
ExtractorException exc = provider.getException();
if (exc != null) {
LOGGER.logException("Got extractor exception", exc);
}
}
for (StartingComponent<T> component : startingComponents) {
synchronized (component) {
component.done = true;
component.notifyAll();
}
}
}
}
/**
* A starting component for the analysis pipeline. This is used to pass the extractor data to the analysis
* components. This class does nothing; it is only used by {@link ExtractorDataDuplicator}.
*
* @param <T> The type of result data that this produces.
*/
private static class StartingComponent<T> extends AnalysisComponent<T> {
private boolean done = false;
private @NonNull ExtractorDataDuplicator<T> duplicator;
/**
* Creates a new starting component.
*
* @param config The global configuration.
* @param duplicator The {@link ExtractorDataDuplicator} to start when this component is started
* (start on demand).
*/
public StartingComponent(@NonNull Configuration config, @NonNull ExtractorDataDuplicator<T> duplicator) {
super(config);
this.duplicator = duplicator;
}
@Override
protected void execute() {
duplicator.start();
// wait until the duplicator tells us that we are done
synchronized (this) {
while (!done) {
try {
wait();
} catch (InterruptedException e) {
}
}
}
}
@Override
public String getResultName() {
return "StartingComponent";
}
@Override
boolean isInternalHelperComponent() {
return true;
}
}
}
| Detailed log status about joining threads. | src/net/ssehub/kernel_haven/analysis/PipelineAnalysis.java | Detailed log status about joining threads. |
|
Java | apache-2.0 | 6a44229683d6f25c1d120cdb43266cef9ddc34f2 | 0 | cs3343Yr2015Gp14/chess-game | package chessGame;
public class ChessMonitoringSystem {
private static ChessPiece[] allChessPieces;
private static ChessMonitoringSystem instance;
private ChessMonitoringSystem() {
// TODO - implement ChessMonitoringSystem.ChessMonitoringSystem
throw new UnsupportedOperationException();
}
public static ChessMonitoringSystem getInstance() {
return instance;
}
public void initializeChessboard() {
// TODO - implement ChessMonitoringSystem.initializeChessboard
throw new UnsupportedOperationException();
}
public void initializeChessPieces(ChessPlayer player1, ChessPlayer player2) {
// TODO - implement ChessMonitoringSystem.initializeChessPieces
allChessPieces = new ChessPiece[32];
ChessPlayer[] chessPlayerList = {player1, player2};
int initialPosition = 1;
int pawnPos = 2;
for (int i = 0; i<2; i++)
{
allChessPieces[i*16] = new Rook(chessPlayerList[i], "a"+initialPosition);
allChessPieces[i*16+1] = new Knight(chessPlayerList[i], "b"+initialPosition);
allChessPieces[i*16+2] = new Bishop(chessPlayerList[i], "c"+initialPosition);
allChessPieces[i*16+3] = new King(chessPlayerList[i], "d"+initialPosition);
allChessPieces[i*16+4] = new Queen(chessPlayerList[i], "e"+initialPosition);
allChessPieces[i*16+5] = new Bishop(chessPlayerList[i], "f"+initialPosition);
allChessPieces[i*16+6] = new Knight(chessPlayerList[i], "g"+initialPosition);
allChessPieces[i*16+7] = new Rook(chessPlayerList[i], "h"+initialPosition);
//8 pawns
int xPosCharCode = 97;
for (int j = 0; j<8; j++)
{
String xPawnPos= ""+(char)xPosCharCode; //x-coordinate of pawns' position
allChessPieces[i*16+8+j] = new Pawn(chessPlayerList[i], xPawnPos+pawnPos);
xPosCharCode++;
}
pawnPos = 7; //inverted y-coordinate for Pawns
initialPosition = 8; //invert y-coordinate for oponent's chess pieces
}
throw new UnsupportedOperationException();
}
public void showAllChessPiecesPosition() {
// TODO - implement ChessMonitoringSystem.showAllChessPiecesPosition
String [][] chessPiecesPosition = new String[8][8]; //virtual chess board
String tempPos = null;
int xPosInNum = 0;
int yPosInNum = 0;
//initialize chess board
for (int i = 0; i<8; i++)
for (int j = 0; j<8; j++)
chessPiecesPosition[i][j] = "O"; //or "\u25A1", a white square
for (ChessPiece c: allChessPieces)
{
tempPos = c.getPosition();
xPosInNum = (int)tempPos.charAt(0)-96;
yPosInNum = (int)tempPos.charAt(1);
chessPiecesPosition[yPosInNum][xPosInNum] = c.toString();
}
System.out.println(" a b c d e f g h");
for (int i = 0; i<8; i++)
{
System.out.print(i+1+" ");
for (int j = 0; j<8; j++)
{
System.out.print(chessPiecesPosition[i][j]);
}
System.out.println();
}
throw new UnsupportedOperationException();
}
/**
*
* @param move
* @param playerId
*/
public void moveChessPiece(String move, ChessPlayer player) {
// TODO - implement ChessMonitoringSystem.moveChessPiece
throw new UnsupportedOperationException();
}
private boolean chessPieceIsCaptured() {
// TODO - implement ChessMonitoringSystem.chessPieceIsCaptured
throw new UnsupportedOperationException();
}
/**
*
* @param position
*/
private void removeChessPiece(String position) {
// TODO - implement ChessMonitoringSystem.removeChessPiece
throw new UnsupportedOperationException();
}
public void checkGameResult() {
// TODO - implement ChessMonitoringSystem.checkGameResult
throw new UnsupportedOperationException();
}
}
| ChessGame/src/chessGame/ChessMonitoringSystem.java | package chessGame;
public class ChessMonitoringSystem {
private static ChessPiece[] allChessPieces;
private static ChessMonitoringSystem instance;
private ChessMonitoringSystem() {
// TODO - implement ChessMonitoringSystem.ChessMonitoringSystem
throw new UnsupportedOperationException();
}
public static ChessMonitoringSystem getInstance() {
return instance;
}
public void initializeChessboard() {
// TODO - implement ChessMonitoringSystem.initializeChessboard
throw new UnsupportedOperationException();
}
public void initializeChessPieces(ChessPlayer player1, ChessPlayer player2) {
// TODO - implement ChessMonitoringSystem.initializeChessPieces
allChessPieces = new ChessPiece[32];
ChessPlayer[] chessPlayerList = {player1, player2};
int initialPosition = 1;
int pawnPos = 2;
for (int i = 0; i<2; i++)
{
allChessPieces[i*16] = new Rook(chessPlayerList[i], "a"+initialPosition);
allChessPieces[i*16+1] = new Knight(chessPlayerList[i], "b"+initialPosition);
allChessPieces[i*16+2] = new Bishop(chessPlayerList[i], "c"+initialPosition);
allChessPieces[i*16+3] = new King(chessPlayerList[i], "d"+initialPosition);
allChessPieces[i*16+4] = new Queen(chessPlayerList[i], "e"+initialPosition);
allChessPieces[i*16+5] = new Bishop(chessPlayerList[i], "f"+initialPosition);
allChessPieces[i*16+6] = new Knight(chessPlayerList[i], "g"+initialPosition);
allChessPieces[i*16+7] = new Rook(chessPlayerList[i], "h"+initialPosition);
//8 pawns
int xPosCharCode = 97;
for (int j = 0; j<8; j++)
{
String xPawnPos= ""+(char)xPosCharCode; //x-coordinate of pawns' position
allChessPieces[i*16+8+j] = new Pawn(chessPlayerList[i], xPawnPos+pawnPos);
xPosCharCode++;
}
pawnPos = 7; //inverted y-coordinate for Pawns
initialPosition = 8; //invert y-coordinate for oponent's chess pieces
}
throw new UnsupportedOperationException();
}
public void showAllChessPiecesPosition() {
// TODO - implement ChessMonitoringSystem.showAllChessPiecesPosition
String [][] chessPiecesPosition = new String[8][8]; //virtual chess board
String tempPos = null;
int xPosInNum = 0;
int yPosInNum = 0;
//initialize chess board
for (int i = 0; i<8; i++)
for (int j = 0; j<8; j++)
chessPiecesPosition[i][j] = "O"; //or "\u25A1", a white square
for (ChessPiece c: allChessPieces)
{
tempPos = c.getPosition();
xPosInNum = (int)tempPos.charAt(0)-97;
yPosInNum = (int)tempPos.charAt(1);
chessPiecesPosition[yPosInNum][xPosInNum] = c.toString();
}
System.out.println(" a b c d e f g h");
for (int i = 0; i<8; i++)
{
System.out.print(i+1+" ");
for (int j = 0; j<8; j++)
{
System.out.print(chessPiecesPosition[i][j]);
}
System.out.println();
}
throw new UnsupportedOperationException();
}
/**
*
* @param move
* @param playerId
*/
public void moveChessPiece(String move, ChessPlayer player) {
// TODO - implement ChessMonitoringSystem.moveChessPiece
throw new UnsupportedOperationException();
}
private boolean chessPieceIsCaptured() {
// TODO - implement ChessMonitoringSystem.chessPieceIsCaptured
throw new UnsupportedOperationException();
}
/**
*
* @param position
*/
private void removeChessPiece(String position) {
// TODO - implement ChessMonitoringSystem.removeChessPiece
throw new UnsupportedOperationException();
}
public void checkGameResult() {
// TODO - implement ChessMonitoringSystem.checkGameResult
throw new UnsupportedOperationException();
}
} | Update ChessMonitoringSystem.java | ChessGame/src/chessGame/ChessMonitoringSystem.java | Update ChessMonitoringSystem.java |
|
Java | apache-2.0 | 1919185d7d898878d3cb5feb65454261d73094f4 | 0 | infotexture/dita-ot,shaneataylor/dita-ot,doctales/dita-ot,shaneataylor/dita-ot,dita-ot/dita-ot,dita-ot/dita-ot,robander/dita-ot,doctales/dita-ot,doctales/dita-ot,infotexture/dita-ot,infotexture/dita-ot,drmacro/dita-ot,drmacro/dita-ot,dita-ot/dita-ot,robander/dita-ot,doctales/dita-ot,drmacro/dita-ot,shaneataylor/dita-ot,shaneataylor/dita-ot,infotexture/dita-ot,drmacro/dita-ot,robander/dita-ot,drmacro/dita-ot,infotexture/dita-ot,shaneataylor/dita-ot,dita-ot/dita-ot,robander/dita-ot,robander/dita-ot,dita-ot/dita-ot | /*
* This file is part of the DITA Open Toolkit project.
* See the accompanying license.txt file for applicable licenses.
*/
package org.dita.dost.util;
import static org.dita.dost.util.Constants.*;
import static org.dita.dost.util.URLUtils.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Field;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.dita.dost.util.Job.FileInfo.Filter;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
/**
* Definition of current job.
*
* <p>Instances are thread-safe.</p>
*
* @since 1.5.4
*/
public final class Job {
private static final String JOB_FILE = ".job.xml";
private static final String ELEMENT_JOB = "job";
private static final String ATTRIBUTE_KEY = "key";
private static final String ELEMENT_ENTRY = "entry";
private static final String ELEMENT_MAP = "map";
private static final String ELEMENT_SET = "set";
private static final String ELEMENT_STRING = "string";
private static final String ATTRIBUTE_NAME = "name";
private static final String ELEMENT_PROPERTY = "property";
private static final String ELEMENT_FILES = "files";
private static final String ELEMENT_FILE = "file";
private static final String ATTRIBUTE_SRC = "src";
private static final String ATTRIBUTE_URI = "uri";
private static final String ATTRIBUTE_PATH = "path";
private static final String ATTRIBUTE_RESULT = "result";
private static final String ATTRIBUTE_FORMAT = "format";
private static final String ATTRIBUTE_CHUNKED = "chunked";
private static final String ATTRIBUTE_HAS_CONREF = "has-conref";
private static final String ATTRIBUTE_HAS_KEYREF = "has-keyref";
private static final String ATTRIBUTE_HAS_CODEREF = "has-coderef";
private static final String ATTRIBUTE_RESOURCE_ONLY = "resource-only";
private static final String ATTRIBUTE_TARGET = "target";
private static final String ATTRIBUTE_CONREF_TARGET = "conref-target";
private static final String ATTRIBUTE_NON_CONREF_TARGET = "non-conref-target";
private static final String ATTRIBUTE_CONREF_PUSH = "conrefpush";
private static final String ATTRIBUTE_SUBJECT_SCHEME = "subjectscheme";
private static final String ATTRIBUTE_HAS_LINK = "has-link";
private static final String ATTRIBUTE_COPYTO_SOURCE_LIST = "copy-to-source";
private static final String ATTRIBUTE_OUT_DITA_FILES_LIST = "out-dita";
private static final String ATTRIBUTE_CHUNKED_DITAMAP_LIST = "chunked-ditamap";
private static final String ATTRIBUTE_FLAG_IMAGE_LIST = "flag-image";
private static final String ATTRIBUTE_SUBSIDIARY_TARGET_LIST = "subtarget";
private static final String ATTRIBUTE_CHUNK_TOPIC_LIST = "skip-chunk";
private static final String PROPERTY_OUTER_CONTROL = ANT_INVOKER_EXT_PARAM_OUTTERCONTROL;
private static final String PROPERTY_ONLY_TOPIC_IN_MAP = ANT_INVOKER_EXT_PARAM_ONLYTOPICINMAP;
private static final String PROPERTY_GENERATE_COPY_OUTER = ANT_INVOKER_EXT_PARAM_GENERATECOPYOUTTER;
private static final String PROPERTY_OUTPUT_DIR = ANT_INVOKER_EXT_PARAM_OUTPUTDIR;
/** Deprecated since 2.2 */
@Deprecated
private static final String PROPERTY_INPUT_MAP = "InputMapDir";
private static final String PROPERTY_INPUT_MAP_URI = "InputMapDir.uri";
/** File name for key definition file */
public static final String KEYDEF_LIST_FILE = "keydef.xml";
/** File name for key definition file */
public static final String SUBJECT_SCHEME_KEYDEF_LIST_FILE = "schemekeydef.xml";
/** File name for temporary input file list file */
public static final String USER_INPUT_FILE_LIST_FILE = "usr.input.file.list";
/** Map of serialization attributes to file info boolean fields. */
private static final Map<String, Field> attrToFieldMap= new HashMap<>();
static {
try {
attrToFieldMap.put(ATTRIBUTE_CHUNKED, FileInfo.class.getField("isChunked"));
attrToFieldMap.put(ATTRIBUTE_HAS_LINK, FileInfo.class.getField("hasLink"));
attrToFieldMap.put(ATTRIBUTE_HAS_CONREF, FileInfo.class.getField("hasConref"));
attrToFieldMap.put(ATTRIBUTE_HAS_KEYREF, FileInfo.class.getField("hasKeyref"));
attrToFieldMap.put(ATTRIBUTE_HAS_CODEREF, FileInfo.class.getField("hasCoderef"));
attrToFieldMap.put(ATTRIBUTE_RESOURCE_ONLY, FileInfo.class.getField("isResourceOnly"));
attrToFieldMap.put(ATTRIBUTE_TARGET, FileInfo.class.getField("isTarget"));
attrToFieldMap.put(ATTRIBUTE_NON_CONREF_TARGET, FileInfo.class.getField("isNonConrefTarget"));
attrToFieldMap.put(ATTRIBUTE_CONREF_PUSH, FileInfo.class.getField("isConrefPush"));
attrToFieldMap.put(ATTRIBUTE_SUBJECT_SCHEME, FileInfo.class.getField("isSubjectScheme"));
attrToFieldMap.put(ATTRIBUTE_OUT_DITA_FILES_LIST, FileInfo.class.getField("isOutDita"));
attrToFieldMap.put(ATTRIBUTE_FLAG_IMAGE_LIST, FileInfo.class.getField("isFlagImage"));
attrToFieldMap.put(ATTRIBUTE_SUBSIDIARY_TARGET_LIST, FileInfo.class.getField("isSubtarget"));
attrToFieldMap.put(ATTRIBUTE_CHUNK_TOPIC_LIST, FileInfo.class.getField("isSkipChunk"));
} catch (final NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
private final Map<String, Object> prop;
public final File tempDir;
private final File jobFile;
private final ConcurrentMap<URI, FileInfo> files = new ConcurrentHashMap<>();
private long lastModified;
/**
* Create new job configuration instance. Initialise by reading temporary configuration files.
*
* @param tempDir temporary directory
* @throws IOException if reading configuration files failed
* @throws IllegalStateException if configuration files are missing
*/
public Job(final File tempDir) throws IOException {
if (!tempDir.isAbsolute()) {
throw new IllegalArgumentException("Temporary directory " + tempDir + " must be absolute");
}
this.tempDir = tempDir;
jobFile = new File(tempDir, JOB_FILE);
prop = new HashMap<>();
read();
}
/**
* Test if serialized configuration file has been updated.
* @return {@code true} if configuration file has been update after this object has been created or serialized
*/
public boolean isStale() {
return jobFile.lastModified() > lastModified;
}
/**
* Read temporary configuration files. If configuration files are not found,
* assume an empty job object is being created.
*
* @throws IOException if reading configuration files failed
* @throws IllegalStateException if configuration files are missing
*/
private void read() throws IOException {
lastModified = jobFile.lastModified();
if (jobFile.exists()) {
try (final InputStream in = new FileInputStream(jobFile)) {
final XMLReader parser = XMLUtils.getXMLReader();
parser.setContentHandler(new JobHandler(prop, files));
parser.parse(new InputSource(in));
} catch (final SAXException e) {
throw new IOException("Failed to read job file: " + e.getMessage());
}
} else {
// defaults
prop.put(PROPERTY_GENERATE_COPY_OUTER, Generate.NOT_GENERATEOUTTER.toString());
prop.put(PROPERTY_ONLY_TOPIC_IN_MAP, Boolean.toString(false));
prop.put(PROPERTY_OUTER_CONTROL, OutterControl.WARN.toString());
}
}
private final static class JobHandler extends DefaultHandler {
private final Map<String, Object> prop;
private final Map<URI, FileInfo> files;
private StringBuilder buf;
private String name;
private String key;
private Set<String> set;
private Map<String, String> map;
JobHandler(final Map<String, Object> prop, final Map<URI, FileInfo> files) {
this.prop = prop;
this.files = files;
}
@Override
public void characters(final char[] ch, final int start, final int length) throws SAXException {
if (buf != null) {
buf.append(ch, start, length);
}
}
@Override
public void ignorableWhitespace(final char[] ch, final int start, final int length) throws SAXException {
if (buf != null) {
buf.append(ch, start, length);
}
}
@Override
public void startElement(final String ns, final String localName, final String qName, final Attributes atts) throws SAXException {
final String n = localName != null ? localName : qName;
switch (n) {
case ELEMENT_PROPERTY:
name = atts.getValue(ATTRIBUTE_NAME);
break;
case ELEMENT_STRING:
buf = new StringBuilder();
break;
case ELEMENT_SET:
set = new HashSet<>();
break;
case ELEMENT_MAP:
map = new HashMap<>();
break;
case ELEMENT_ENTRY:
key = atts.getValue(ATTRIBUTE_KEY);
break;
case ELEMENT_FILE:
final URI src = toURI(atts.getValue(ATTRIBUTE_SRC));
final URI uri = toURI(atts.getValue(ATTRIBUTE_URI));
final File path = toFile(atts.getValue(ATTRIBUTE_PATH));
FileInfo i;
if (uri != null) {
i = new FileInfo(src, uri, toFile(uri));
} else {
i = new FileInfo(src, toURI(path), path);
}
i.result = toURI(atts.getValue(ATTRIBUTE_RESULT));
if (i.result == null) {
i.result = src;
}
i.format = atts.getValue(ATTRIBUTE_FORMAT);
try {
for (Map.Entry<String, Field> e : attrToFieldMap.entrySet()) {
e.getValue().setBoolean(i, Boolean.parseBoolean(atts.getValue(e.getKey())));
}
} catch (final IllegalAccessException ex) {
throw new RuntimeException(ex);
}
files.put(i.uri, i);
break;
}
}
@Override
public void endElement(final String uri, final String localName, final String qName) throws SAXException {
final String n = localName != null ? localName : qName;
switch (n) {
case ELEMENT_PROPERTY:
name = null;
break;
case ELEMENT_STRING:
if (set != null) {
set.add(buf.toString());
} else if (map != null) {
map.put(key, buf.toString());
} else {
prop.put(name, buf.toString());
}
buf = null;
break;
case ELEMENT_SET:
prop.put(name, set);
set = null;
break;
case ELEMENT_MAP:
prop.put(name, map);
map = null;
break;
case ELEMENT_ENTRY:
key = null;
break;
}
}
}
/**
* Store job into temporary configuration files.
*
* @throws IOException if writing configuration files failed
*/
public void write() throws IOException {
OutputStream outStream = null;
XMLStreamWriter out = null;
try {
outStream = new FileOutputStream(jobFile);
out = XMLOutputFactory.newInstance().createXMLStreamWriter(outStream, "UTF-8");
out.writeStartDocument();
out.writeStartElement(ELEMENT_JOB);
for (final Map.Entry<String, Object> e: prop.entrySet()) {
out.writeStartElement(ELEMENT_PROPERTY);
out.writeAttribute(ATTRIBUTE_NAME, e.getKey());
if (e.getValue() instanceof String) {
out.writeStartElement(ELEMENT_STRING);
out.writeCharacters(e.getValue().toString());
out.writeEndElement(); //string
} else if (e.getValue() instanceof Set) {
out.writeStartElement(ELEMENT_SET);
final Set<?> s = (Set<?>) e.getValue();
for (final Object o: s) {
out.writeStartElement(ELEMENT_STRING);
out.writeCharacters(o.toString());
out.writeEndElement(); //string
}
out.writeEndElement(); //set
} else if (e.getValue() instanceof Map) {
out.writeStartElement(ELEMENT_MAP);
final Map<?, ?> s = (Map<?, ?>) e.getValue();
for (final Map.Entry<?, ?> o: s.entrySet()) {
out.writeStartElement(ELEMENT_ENTRY);
out.writeAttribute(ATTRIBUTE_KEY, o.getKey().toString());
out.writeStartElement(ELEMENT_STRING);
out.writeCharacters(o.getValue().toString());
out.writeEndElement(); //string
out.writeEndElement(); //entry
}
out.writeEndElement(); //string
} else {
out.writeStartElement(e.getValue().getClass().getName());
out.writeCharacters(e.getValue().toString());
out.writeEndElement(); //string
}
out.writeEndElement(); //property
}
out.writeStartElement(ELEMENT_FILES);
for (final FileInfo i: files.values()) {
out.writeStartElement(ELEMENT_FILE);
if (i.src != null) {
out.writeAttribute(ATTRIBUTE_SRC, i.src.toString());
}
out.writeAttribute(ATTRIBUTE_URI, i.uri.toString());
out.writeAttribute(ATTRIBUTE_PATH, i.file.getPath());
if (i.result != null) {
out.writeAttribute(ATTRIBUTE_RESULT, i.result.toString());
}
if (i.format != null) {
out.writeAttribute(ATTRIBUTE_FORMAT, i.format);
}
try {
for (Map.Entry<String, Field> e: attrToFieldMap.entrySet()) {
final boolean v = e.getValue().getBoolean(i);
if (v) {
out.writeAttribute(e.getKey(), Boolean.TRUE.toString());
}
}
} catch (final IllegalAccessException ex) {
throw new RuntimeException(ex);
}
out.writeEndElement(); //file
}
out.writeEndElement(); //files
out.writeEndElement(); //job
out.writeEndDocument();
} catch (final IOException e) {
throw new IOException("Failed to write file: " + e.getMessage());
} catch (final XMLStreamException e) {
throw new IOException("Failed to serialize job file: " + e.getMessage());
} finally {
if (out != null) {
try {
out.close();
} catch (final XMLStreamException e) {
throw new IOException("Failed to close file: " + e.getMessage());
}
}
if (outStream != null) {
try {
outStream.close();
} catch (final IOException e) {
throw new IOException("Failed to close file: " + e.getMessage());
}
}
}
lastModified = jobFile.lastModified();
}
/**
* Add file info. If file info with the same file already exists, it will be replaced.
*/
public void add(final FileInfo fileInfo) {
files.put(fileInfo.uri, fileInfo);
}
/**
* Remove file info.
*
* @return removed file info, {@code null} if not found
*/
public FileInfo remove(final FileInfo fileInfo) {
return files.remove(fileInfo.uri);
}
/**
* Searches for the property with the specified key in this property list.
*
* @param key property key
* @return the value in this property list with the specified key value, {@code null} if not found
*/
public String getProperty(final String key) {
return (String) prop.get(key);
}
/**
* Get a map of string properties.
*
* @return map of properties, may be an empty map
*/
public Map<String, String> getProperties() {
final Map<String, String> res = new HashMap<>();
for (final Map.Entry<String, Object> e: prop.entrySet()) {
if (e.getValue() instanceof String) {
res.put(e.getKey(), (String) e.getValue());
}
}
return Collections.unmodifiableMap(res);
}
/**
* Set property value.
*
* @param key property key
* @param value property value
* @return the previous value of the specified key in this property list, or {@code null} if it did not have one
*/
public Object setProperty(final String key, final String value) {
return prop.put(key, value);
}
/**
* Get input file
*
* @return input file path relative to input directory
*/
public URI getInputMap() {
return toURI(getProperty(INPUT_DITAMAP_URI));
}
/**
* Get input directory.
*
* @return absolute input directory path
*/
public URI getInputDir() {
return toURI(getProperty(INPUT_DIR_URI));
}
/**
* Get all file info objects as a map
*
* @return map of file info objects, where the key is the {@link FileInfo#file} value. May be empty
*/
public Map<File, FileInfo> getFileInfoMap() {
final Map<File, FileInfo> ret = new HashMap<>();
for (final Map.Entry<URI, FileInfo> e: files.entrySet()) {
ret.put(e.getValue().file, e.getValue());
}
return Collections.unmodifiableMap(ret);
}
/**
* Get all file info objects
*
* @return collection of file info objects, may be empty
*/
public Collection<FileInfo> getFileInfo() {
return Collections.unmodifiableCollection(new ArrayList<>(files.values()));
}
/**
* Get file info objects that pass the filter
*
* @param filter filter file info object must pass
* @return collection of file info objects that pass the filter, may be empty
*/
public Collection<FileInfo> getFileInfo(final Filter filter) {
final Collection<FileInfo> ret = new ArrayList<>();
for (final FileInfo f: files.values()) {
if (filter.accept(f)) {
ret.add(f);
}
}
return ret;
}
/**
* Get file info object
*
* @param file file URI
* @return file info object, {@code null} if not found
*/
public FileInfo getFileInfo(final URI file) {
if (file == null) {
return null;
} else if (files.containsKey(file)) {
return files.get(file);
} else if (file.isAbsolute() && file.toString().startsWith(tempDir.toURI().toString())) {
final URI relative = getRelativePath(jobFile.toURI(), file);
return files.get(relative);
} else {
return files.values().stream()
.filter(fileInfo -> file.equals(fileInfo.src) || file.equals(fileInfo.result))
.findFirst()
.orElse(null);
}
}
/**
* Get or create FileInfo for given path.
* @param file relative URI to temporary directory
* @return created or existing file info object
*/
public FileInfo getOrCreateFileInfo(final URI file) {
assert file.getFragment() == null;
URI f = file.normalize();
if (f.isAbsolute()) {
f = tempDir.toURI().relativize(f);
}
FileInfo i = getFileInfo(file);
if (i == null) {
i = new FileInfo(f);
files.put(i.uri, i);
}
return i;
}
/**
* Add a collection of file info objects
*
* @param fs file info objects
*/
public void addAll(final Collection<FileInfo> fs) {
for (final FileInfo f: fs) {
files.put(f.uri, f);
}
}
/**
* File info object.
*/
public static final class FileInfo {
/** Absolute source URI. */
public URI src;
/** File URI. */
public final URI uri;
/** File path. */
public final File file;
/** Absolute result URI. */
public URI result;
/** File format. */
public String format;
/** File has a conref. */
public boolean hasConref;
/** File is part of chunk. */
public boolean isChunked;
/** File has links. Only applies to topics. */
public boolean hasLink;
/** File is resource only. */
public boolean isResourceOnly;
/** File is a link target. */
public boolean isTarget;
/** File is a target in non-conref link. */
public boolean isNonConrefTarget;
/** File is a push conref source. */
public boolean isConrefPush;
/** File has a keyref. */
public boolean hasKeyref;
/** File has coderef. */
public boolean hasCoderef;
/** File is a subject scheme. */
public boolean isSubjectScheme;
/** File is a target in conref link. Opposite of {@link #isNonConrefTarget}. */
public boolean isSkipChunk;
/** File is a coderef target. */
public boolean isSubtarget;
/** File is a flagging image. */
public boolean isFlagImage;
/** Source file is outside base directory. */
public boolean isOutDita;
FileInfo(final URI src, final URI uri, final File file) {
if (uri == null && file == null) throw new IllegalArgumentException(new NullPointerException());
this.src = src;
this.uri = uri != null ? uri : toURI(file);
this.file = uri != null ? toFile(uri) : file;
this.result = src;
}
FileInfo(final URI uri) {
if (uri == null) throw new IllegalArgumentException(new NullPointerException());
this.src = null;
this.uri = uri;
this.file = toFile(uri);
this.result = src;
}
@Deprecated
FileInfo(final File file) {
if (file == null) throw new IllegalArgumentException(new NullPointerException());
this.src = null;
this.uri = toURI(file);
this.file = file;
this.result = src;
}
@Override
public String toString() {
return "FileInfo{" +
"src=" + src +
", result=" + result +
", uri=" + uri +
", file=" + file +
", format='" + format + '\'' +
", hasConref=" + hasConref +
", isChunked=" + isChunked +
", hasLink=" + hasLink +
", isResourceOnly=" + isResourceOnly +
", isTarget=" + isTarget +
", isNonConrefTarget=" + isNonConrefTarget +
", isConrefPush=" + isConrefPush +
", hasKeyref=" + hasKeyref +
", hasCoderef=" + hasCoderef +
", isSubjectScheme=" + isSubjectScheme +
", isSkipChunk=" + isSkipChunk +
", isSubtarget=" + isSubtarget +
", isFlagImage=" + isFlagImage +
", isOutDita=" + isOutDita +
'}';
}
public interface Filter<T> {
boolean accept(T f);
}
public static class Builder {
private URI src;
private URI uri;
private File file;
private URI result;
private String format;
private boolean hasConref;
private boolean isChunked;
private boolean hasLink;
private boolean isResourceOnly;
private boolean isTarget;
private boolean isNonConrefTarget;
private boolean isConrefPush;
private boolean hasKeyref;
private boolean hasCoderef;
private boolean isSubjectScheme;
private boolean isSkipChunk;
private boolean isSubtarget;
private boolean isFlagImage;
private boolean isOutDita;
public Builder() {}
public Builder(final FileInfo orig) {
src = orig.src;
uri = orig.uri;
file = orig.file;
result = orig.result;
format = orig.format;
hasConref = orig.hasConref;
isChunked = orig.isChunked;
hasLink = orig.hasLink;
isResourceOnly = orig.isResourceOnly;
isTarget = orig.isTarget;
isNonConrefTarget = orig.isNonConrefTarget;
isConrefPush = orig.isConrefPush;
hasKeyref = orig.hasKeyref;
hasCoderef = orig.hasCoderef;
isSubjectScheme = orig.isSubjectScheme;
isSkipChunk = orig.isSkipChunk;
isSubtarget = orig.isSubtarget;
isFlagImage = orig.isFlagImage;
isOutDita = orig.isOutDita;
}
/**
* Add file info to this builder. Only non-null and true values will be added.
*/
public Builder add(final FileInfo orig) {
if (orig.src != null) src = orig.src;
if (orig.uri != null) uri = orig.uri;
if (orig.file != null) file = orig.file;
if (orig.result != null) result = orig.result;
if (orig.format != null) format = orig.format;
if (orig.hasConref) hasConref = orig.hasConref;
if (orig.isChunked) isChunked = orig.isChunked;
if (orig.hasLink) hasLink = orig.hasLink;
if (orig.isResourceOnly) isResourceOnly = orig.isResourceOnly;
if (orig.isTarget) isTarget = orig.isTarget;
if (orig.isNonConrefTarget) isNonConrefTarget = orig.isNonConrefTarget;
if (orig.isConrefPush) isConrefPush = orig.isConrefPush;
if (orig.hasKeyref) hasKeyref = orig.hasKeyref;
if (orig.hasCoderef) hasCoderef = orig.hasCoderef;
if (orig.isSubjectScheme) isSubjectScheme = orig.isSubjectScheme;
if (orig.isSkipChunk) isSkipChunk = orig.isSkipChunk;
if (orig.isSubtarget) isSubtarget = orig.isSubtarget;
if (orig.isFlagImage) isFlagImage = orig.isFlagImage;
if (orig.isOutDita) isOutDita = orig.isOutDita;
return this;
}
public Builder src(final URI src) { assert src.isAbsolute(); this.src = src; return this; }
public Builder uri(final URI uri) { this.uri = uri; this.file = null; return this; }
public Builder file(final File file) { this.file = file; this.uri = null; return this; }
public Builder result(final URI result) { assert result.isAbsolute(); this.result = result; return this; }
public Builder format(final String format) { this.format = format; return this; }
public Builder hasConref(final boolean hasConref) { this.hasConref = hasConref; return this; }
public Builder isChunked(final boolean isChunked) { this.isChunked = isChunked; return this; }
public Builder hasLink(final boolean hasLink) { this.hasLink = hasLink; return this; }
public Builder isResourceOnly(final boolean isResourceOnly) { this.isResourceOnly = isResourceOnly; return this; }
public Builder isTarget(final boolean isTarget) { this.isTarget = isTarget; return this; }
public Builder isNonConrefTarget(final boolean isNonConrefTarget) { this.isNonConrefTarget = isNonConrefTarget; return this; }
public Builder isConrefPush(final boolean isConrefPush) { this.isConrefPush = isConrefPush; return this; }
public Builder hasKeyref(final boolean hasKeyref) { this.hasKeyref = hasKeyref; return this; }
public Builder hasCoderef(final boolean hasCoderef) { this.hasCoderef = hasCoderef; return this; }
public Builder isSubjectScheme(final boolean isSubjectScheme) { this.isSubjectScheme = isSubjectScheme; return this; }
public Builder isSkipChunk(final boolean isSkipChunk) { this.isSkipChunk = isSkipChunk; return this; }
public Builder isSubtarget(final boolean isSubtarget) { this.isSubtarget = isSubtarget; return this; }
public Builder isFlagImage(final boolean isFlagImage) { this.isFlagImage = isFlagImage; return this; }
public Builder isOutDita(final boolean isOutDita) { this.isOutDita = isOutDita; return this; }
public FileInfo build() {
if (uri == null && file == null) {
throw new IllegalStateException("uri and file may not be null");
}
final FileInfo fi = new FileInfo(src, uri, file);
if (result != null) {
fi.result = result;
}
fi.format = format;
fi.hasConref = hasConref;
fi.isChunked = isChunked;
fi.hasLink = hasLink;
fi.isResourceOnly = isResourceOnly;
fi.isTarget = isTarget;
fi.isNonConrefTarget = isNonConrefTarget;
fi.isConrefPush = isConrefPush;
fi.hasKeyref = hasKeyref;
fi.hasCoderef = hasCoderef;
fi.isSubjectScheme = isSubjectScheme;
fi.isSkipChunk = isSkipChunk;
fi.isSubtarget = isSubtarget;
fi.isFlagImage = isFlagImage;
fi.isOutDita = isOutDita;
return fi;
}
}
}
public enum OutterControl {
/** Fail behavior. */
FAIL,
/** Warn behavior. */
WARN,
/** Quiet behavior. */
QUIET
}
public enum Generate {
/** Not generate outer files. */
NOT_GENERATEOUTTER(1),
/** Old solution. */
OLDSOLUTION(3);
public final int type;
Generate(final int type) {
this.type = type;
}
public static Generate get(final int type) {
for (final Generate g: Generate.values()) {
if (g.type == type) {
return g;
}
}
throw new IllegalArgumentException();
}
}
/**
* Retrieve the outercontrol.
* @return String outercontrol behavior
*
*/
public OutterControl getOutterControl(){
return OutterControl.valueOf(prop.get(PROPERTY_OUTER_CONTROL).toString());
}
/**
* Set the outercontrol.
* @param control control
*/
public void setOutterControl(final String control){
prop.put(PROPERTY_OUTER_CONTROL, OutterControl.valueOf(control.toUpperCase()).toString());
}
/**
* Retrieve the flag of onlytopicinmap.
* @return boolean if only topic in map
*/
public boolean getOnlyTopicInMap(){
return Boolean.parseBoolean(prop.get(PROPERTY_ONLY_TOPIC_IN_MAP).toString());
}
/**
* Set the onlytopicinmap.
* @param flag onlytopicinmap flag
*/
public void setOnlyTopicInMap(final boolean flag){
prop.put(PROPERTY_ONLY_TOPIC_IN_MAP, Boolean.toString(flag));
}
public Generate getGeneratecopyouter(){
return Generate.valueOf(prop.get(PROPERTY_GENERATE_COPY_OUTER).toString());
}
/**
* Set the generatecopyouter.
* @param flag generatecopyouter flag
*/
public void setGeneratecopyouter(final String flag){
setGeneratecopyouter(Generate.get(Integer.parseInt(flag)));
}
/**
* Set the generatecopyouter.
* @param flag generatecopyouter flag
*/
public void setGeneratecopyouter(final Generate flag){
prop.put(PROPERTY_GENERATE_COPY_OUTER, flag.toString());
}
/**
* Get output dir.
* @return absolute output dir
*/
public File getOutputDir(){
return new File(prop.get(PROPERTY_OUTPUT_DIR).toString());
}
/**
* Set output dir.
* @param outputDir absolute output dir
*/
public void setOutputDir(final File outputDir){
prop.put(PROPERTY_OUTPUT_DIR, outputDir.getAbsolutePath());
}
/**
* Get input file path.
* @return absolute input file path
*/
public URI getInputFile() {
return toURI(prop.get(PROPERTY_INPUT_MAP_URI).toString());
}
/**
* Set input map path.
* @param inputFile absolute input map path
*/
public void setInputFile(final URI inputFile) {
assert inputFile.isAbsolute();
prop.put(PROPERTY_INPUT_MAP_URI, inputFile.toString());
// Deprecated since 2.1
if (inputFile.getScheme().equals("file")) {
prop.put(PROPERTY_INPUT_MAP, new File(inputFile).getAbsolutePath());
}
}
}
| src/main/java/org/dita/dost/util/Job.java | /*
* This file is part of the DITA Open Toolkit project.
* See the accompanying license.txt file for applicable licenses.
*/
package org.dita.dost.util;
import static org.dita.dost.util.Constants.*;
import static org.dita.dost.util.URLUtils.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Field;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.dita.dost.util.Job.FileInfo.Filter;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
/**
* Definition of current job.
*
* <p>Instances are thread-safe.</p>
*
* @since 1.5.4
*/
public final class Job {
private static final String JOB_FILE = ".job.xml";
private static final String ELEMENT_JOB = "job";
private static final String ATTRIBUTE_KEY = "key";
private static final String ELEMENT_ENTRY = "entry";
private static final String ELEMENT_MAP = "map";
private static final String ELEMENT_SET = "set";
private static final String ELEMENT_STRING = "string";
private static final String ATTRIBUTE_NAME = "name";
private static final String ELEMENT_PROPERTY = "property";
private static final String ELEMENT_FILES = "files";
private static final String ELEMENT_FILE = "file";
private static final String ATTRIBUTE_SRC = "src";
private static final String ATTRIBUTE_URI = "uri";
private static final String ATTRIBUTE_PATH = "path";
private static final String ATTRIBUTE_RESULT = "result";
private static final String ATTRIBUTE_FORMAT = "format";
private static final String ATTRIBUTE_CHUNKED = "chunked";
private static final String ATTRIBUTE_HAS_CONREF = "has-conref";
private static final String ATTRIBUTE_HAS_KEYREF = "has-keyref";
private static final String ATTRIBUTE_HAS_CODEREF = "has-coderef";
private static final String ATTRIBUTE_RESOURCE_ONLY = "resource-only";
private static final String ATTRIBUTE_TARGET = "target";
private static final String ATTRIBUTE_CONREF_TARGET = "conref-target";
private static final String ATTRIBUTE_NON_CONREF_TARGET = "non-conref-target";
private static final String ATTRIBUTE_CONREF_PUSH = "conrefpush";
private static final String ATTRIBUTE_SUBJECT_SCHEME = "subjectscheme";
private static final String ATTRIBUTE_HAS_LINK = "has-link";
private static final String ATTRIBUTE_COPYTO_SOURCE_LIST = "copy-to-source";
private static final String ATTRIBUTE_OUT_DITA_FILES_LIST = "out-dita";
private static final String ATTRIBUTE_CHUNKED_DITAMAP_LIST = "chunked-ditamap";
private static final String ATTRIBUTE_FLAG_IMAGE_LIST = "flag-image";
private static final String ATTRIBUTE_SUBSIDIARY_TARGET_LIST = "subtarget";
private static final String ATTRIBUTE_CHUNK_TOPIC_LIST = "skip-chunk";
private static final String PROPERTY_OUTER_CONTROL = ANT_INVOKER_EXT_PARAM_OUTTERCONTROL;
private static final String PROPERTY_ONLY_TOPIC_IN_MAP = ANT_INVOKER_EXT_PARAM_ONLYTOPICINMAP;
private static final String PROPERTY_GENERATE_COPY_OUTER = ANT_INVOKER_EXT_PARAM_GENERATECOPYOUTTER;
private static final String PROPERTY_OUTPUT_DIR = ANT_INVOKER_EXT_PARAM_OUTPUTDIR;
/** Deprecated since 2.2 */
@Deprecated
private static final String PROPERTY_INPUT_MAP = "InputMapDir";
private static final String PROPERTY_INPUT_MAP_URI = "InputMapDir.uri";
/** File name for key definition file */
public static final String KEYDEF_LIST_FILE = "keydef.xml";
/** File name for key definition file */
public static final String SUBJECT_SCHEME_KEYDEF_LIST_FILE = "schemekeydef.xml";
/** File name for temporary input file list file */
public static final String USER_INPUT_FILE_LIST_FILE = "usr.input.file.list";
/** Map of serialization attributes to file info boolean fields. */
private static final Map<String, Field> attrToFieldMap= new HashMap<>();
static {
try {
attrToFieldMap.put(ATTRIBUTE_CHUNKED, FileInfo.class.getField("isChunked"));
attrToFieldMap.put(ATTRIBUTE_HAS_LINK, FileInfo.class.getField("hasLink"));
attrToFieldMap.put(ATTRIBUTE_HAS_CONREF, FileInfo.class.getField("hasConref"));
attrToFieldMap.put(ATTRIBUTE_HAS_KEYREF, FileInfo.class.getField("hasKeyref"));
attrToFieldMap.put(ATTRIBUTE_HAS_CODEREF, FileInfo.class.getField("hasCoderef"));
attrToFieldMap.put(ATTRIBUTE_RESOURCE_ONLY, FileInfo.class.getField("isResourceOnly"));
attrToFieldMap.put(ATTRIBUTE_TARGET, FileInfo.class.getField("isTarget"));
attrToFieldMap.put(ATTRIBUTE_NON_CONREF_TARGET, FileInfo.class.getField("isNonConrefTarget"));
attrToFieldMap.put(ATTRIBUTE_CONREF_PUSH, FileInfo.class.getField("isConrefPush"));
attrToFieldMap.put(ATTRIBUTE_SUBJECT_SCHEME, FileInfo.class.getField("isSubjectScheme"));
attrToFieldMap.put(ATTRIBUTE_OUT_DITA_FILES_LIST, FileInfo.class.getField("isOutDita"));
attrToFieldMap.put(ATTRIBUTE_FLAG_IMAGE_LIST, FileInfo.class.getField("isFlagImage"));
attrToFieldMap.put(ATTRIBUTE_SUBSIDIARY_TARGET_LIST, FileInfo.class.getField("isSubtarget"));
attrToFieldMap.put(ATTRIBUTE_CHUNK_TOPIC_LIST, FileInfo.class.getField("isSkipChunk"));
} catch (final NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
private final Map<String, Object> prop;
public final File tempDir;
private final File jobFile;
private final ConcurrentMap<URI, FileInfo> files = new ConcurrentHashMap<>();
private long lastModified;
/**
* Create new job configuration instance. Initialise by reading temporary configuration files.
*
* @param tempDir temporary directory
* @throws IOException if reading configuration files failed
* @throws IllegalStateException if configuration files are missing
*/
public Job(final File tempDir) throws IOException {
if (!tempDir.isAbsolute()) {
throw new IllegalArgumentException("Temporary directory " + tempDir + " must be absolute");
}
this.tempDir = tempDir;
jobFile = new File(tempDir, JOB_FILE);
prop = new HashMap<>();
read();
}
/**
* Test if serialized configuration file has been updated.
* @return {@code true} if configuration file has been update after this object has been created or serialized
*/
public boolean isStale() {
return jobFile.lastModified() > lastModified;
}
/**
* Read temporary configuration files. If configuration files are not found,
* assume an empty job object is being created.
*
* @throws IOException if reading configuration files failed
* @throws IllegalStateException if configuration files are missing
*/
private void read() throws IOException {
lastModified = jobFile.lastModified();
if (jobFile.exists()) {
try (final InputStream in = new FileInputStream(jobFile)) {
final XMLReader parser = XMLUtils.getXMLReader();
parser.setContentHandler(new JobHandler(prop, files));
parser.parse(new InputSource(in));
} catch (final SAXException e) {
throw new IOException("Failed to read job file: " + e.getMessage());
}
} else {
// defaults
prop.put(PROPERTY_GENERATE_COPY_OUTER, Generate.NOT_GENERATEOUTTER.toString());
prop.put(PROPERTY_ONLY_TOPIC_IN_MAP, Boolean.toString(false));
prop.put(PROPERTY_OUTER_CONTROL, OutterControl.WARN.toString());
}
}
private final static class JobHandler extends DefaultHandler {
private final Map<String, Object> prop;
private final Map<URI, FileInfo> files;
private StringBuilder buf;
private String name;
private String key;
private Set<String> set;
private Map<String, String> map;
JobHandler(final Map<String, Object> prop, final Map<URI, FileInfo> files) {
this.prop = prop;
this.files = files;
}
@Override
public void characters(final char[] ch, final int start, final int length) throws SAXException {
if (buf != null) {
buf.append(ch, start, length);
}
}
@Override
public void ignorableWhitespace(final char[] ch, final int start, final int length) throws SAXException {
if (buf != null) {
buf.append(ch, start, length);
}
}
@Override
public void startElement(final String ns, final String localName, final String qName, final Attributes atts) throws SAXException {
final String n = localName != null ? localName : qName;
switch (n) {
case ELEMENT_PROPERTY:
name = atts.getValue(ATTRIBUTE_NAME);
break;
case ELEMENT_STRING:
buf = new StringBuilder();
break;
case ELEMENT_SET:
set = new HashSet<>();
break;
case ELEMENT_MAP:
map = new HashMap<>();
break;
case ELEMENT_ENTRY:
key = atts.getValue(ATTRIBUTE_KEY);
break;
case ELEMENT_FILE:
final URI src = toURI(atts.getValue(ATTRIBUTE_SRC));
final URI uri = toURI(atts.getValue(ATTRIBUTE_URI));
final File path = toFile(atts.getValue(ATTRIBUTE_PATH));
FileInfo i;
if (uri != null) {
i = new FileInfo(src, uri, toFile(uri));
} else {
i = new FileInfo(src, toURI(path), path);
}
i.result = toURI(atts.getValue(ATTRIBUTE_RESULT));
if (i.result == null) {
i.result = src;
}
i.format = atts.getValue(ATTRIBUTE_FORMAT);
try {
for (Map.Entry<String, Field> e : attrToFieldMap.entrySet()) {
e.getValue().setBoolean(i, Boolean.parseBoolean(atts.getValue(e.getKey())));
}
} catch (final IllegalAccessException ex) {
throw new RuntimeException(ex);
}
files.put(i.uri, i);
break;
}
}
@Override
public void endElement(final String uri, final String localName, final String qName) throws SAXException {
final String n = localName != null ? localName : qName;
switch (n) {
case ELEMENT_PROPERTY:
name = null;
break;
case ELEMENT_STRING:
if (set != null) {
set.add(buf.toString());
} else if (map != null) {
map.put(key, buf.toString());
} else {
prop.put(name, buf.toString());
}
buf = null;
break;
case ELEMENT_SET:
prop.put(name, set);
set = null;
break;
case ELEMENT_MAP:
prop.put(name, map);
map = null;
break;
case ELEMENT_ENTRY:
key = null;
break;
}
}
}
/**
* Store job into temporary configuration files.
*
* @throws IOException if writing configuration files failed
*/
public void write() throws IOException {
OutputStream outStream = null;
XMLStreamWriter out = null;
try {
outStream = new FileOutputStream(jobFile);
out = XMLOutputFactory.newInstance().createXMLStreamWriter(outStream, "UTF-8");
out.writeStartDocument();
out.writeStartElement(ELEMENT_JOB);
for (final Map.Entry<String, Object> e: prop.entrySet()) {
out.writeStartElement(ELEMENT_PROPERTY);
out.writeAttribute(ATTRIBUTE_NAME, e.getKey());
if (e.getValue() instanceof String) {
out.writeStartElement(ELEMENT_STRING);
out.writeCharacters(e.getValue().toString());
out.writeEndElement(); //string
} else if (e.getValue() instanceof Set) {
out.writeStartElement(ELEMENT_SET);
final Set<?> s = (Set<?>) e.getValue();
for (final Object o: s) {
out.writeStartElement(ELEMENT_STRING);
out.writeCharacters(o.toString());
out.writeEndElement(); //string
}
out.writeEndElement(); //set
} else if (e.getValue() instanceof Map) {
out.writeStartElement(ELEMENT_MAP);
final Map<?, ?> s = (Map<?, ?>) e.getValue();
for (final Map.Entry<?, ?> o: s.entrySet()) {
out.writeStartElement(ELEMENT_ENTRY);
out.writeAttribute(ATTRIBUTE_KEY, o.getKey().toString());
out.writeStartElement(ELEMENT_STRING);
out.writeCharacters(o.getValue().toString());
out.writeEndElement(); //string
out.writeEndElement(); //entry
}
out.writeEndElement(); //string
} else {
out.writeStartElement(e.getValue().getClass().getName());
out.writeCharacters(e.getValue().toString());
out.writeEndElement(); //string
}
out.writeEndElement(); //property
}
out.writeStartElement(ELEMENT_FILES);
for (final FileInfo i: files.values()) {
out.writeStartElement(ELEMENT_FILE);
if (i.src != null) {
out.writeAttribute(ATTRIBUTE_SRC, i.src.toString());
}
out.writeAttribute(ATTRIBUTE_URI, i.uri.toString());
out.writeAttribute(ATTRIBUTE_PATH, i.file.getPath());
if (i.result != null) {
out.writeAttribute(ATTRIBUTE_RESULT, i.result.toString());
}
if (i.format != null) {
out.writeAttribute(ATTRIBUTE_FORMAT, i.format);
}
try {
for (Map.Entry<String, Field> e: attrToFieldMap.entrySet()) {
final boolean v = e.getValue().getBoolean(i);
if (v) {
out.writeAttribute(e.getKey(), Boolean.TRUE.toString());
}
}
} catch (final IllegalAccessException ex) {
throw new RuntimeException(ex);
}
out.writeEndElement(); //file
}
out.writeEndElement(); //files
out.writeEndElement(); //job
out.writeEndDocument();
} catch (final IOException e) {
throw new IOException("Failed to write file: " + e.getMessage());
} catch (final XMLStreamException e) {
throw new IOException("Failed to serialize job file: " + e.getMessage());
} finally {
if (out != null) {
try {
out.close();
} catch (final XMLStreamException e) {
throw new IOException("Failed to close file: " + e.getMessage());
}
}
if (outStream != null) {
try {
outStream.close();
} catch (final IOException e) {
throw new IOException("Failed to close file: " + e.getMessage());
}
}
}
lastModified = jobFile.lastModified();
}
/**
* Add file info. If file info with the same file already exists, it will be replaced.
*/
public void add(final FileInfo fileInfo) {
files.put(fileInfo.uri, fileInfo);
}
/**
* Remove file info.
*
* @return removed file info, {@code null} if not found
*/
public FileInfo remove(final FileInfo fileInfo) {
return files.remove(fileInfo.uri);
}
/**
* Searches for the property with the specified key in this property list.
*
* @param key property key
* @return the value in this property list with the specified key value, {@code null} if not found
*/
public String getProperty(final String key) {
return (String) prop.get(key);
}
/**
* Get a map of string properties.
*
* @return map of properties, may be an empty map
*/
public Map<String, String> getProperties() {
final Map<String, String> res = new HashMap<>();
for (final Map.Entry<String, Object> e: prop.entrySet()) {
if (e.getValue() instanceof String) {
res.put(e.getKey(), (String) e.getValue());
}
}
return Collections.unmodifiableMap(res);
}
/**
* Set property value.
*
* @param key property key
* @param value property value
* @return the previous value of the specified key in this property list, or {@code null} if it did not have one
*/
public Object setProperty(final String key, final String value) {
return prop.put(key, value);
}
/**
* Get input file
*
* @return input file path relative to input directory
*/
public URI getInputMap() {
return toURI(getProperty(INPUT_DITAMAP_URI));
}
/**
* Get input directory.
*
* @return absolute input directory path
*/
public URI getInputDir() {
return toURI(getProperty(INPUT_DIR_URI));
}
/**
* Get all file info objects as a map
*
* @return map of file info objects, where the key is the {@link FileInfo#file} value. May be empty
*/
public Map<File, FileInfo> getFileInfoMap() {
final Map<File, FileInfo> ret = new HashMap<>();
for (final Map.Entry<URI, FileInfo> e: files.entrySet()) {
ret.put(e.getValue().file, e.getValue());
}
return Collections.unmodifiableMap(ret);
}
/**
* Get all file info objects
*
* @return collection of file info objects, may be empty
*/
public Collection<FileInfo> getFileInfo() {
return Collections.unmodifiableCollection(new ArrayList<>(files.values()));
}
/**
* Get file info objects that pass the filter
*
* @param filter filter file info object must pass
* @return collection of file info objects that pass the filter, may be empty
*/
public Collection<FileInfo> getFileInfo(final Filter filter) {
final Collection<FileInfo> ret = new ArrayList<>();
for (final FileInfo f: files.values()) {
if (filter.accept(f)) {
ret.add(f);
}
}
return ret;
}
/**
* Get file info object
*
* @param file file URI
* @return file info object, {@code null} if not found
*/
public FileInfo getFileInfo(final URI file) {
if (file == null) {
return null;
} else if (files.containsKey(file)) {
return files.get(file);
} else if (file.isAbsolute()) {
final URI relative = getRelativePath(jobFile.toURI(), file);
return files.get(relative);
} else {
return files.values().stream()
.filter(fileInfo -> file.equals(fileInfo.src))
.findFirst()
.orElse(null);
}
}
/**
* Get or create FileInfo for given path.
* @param file relative URI to temporary directory
* @return created or existing file info object
*/
public FileInfo getOrCreateFileInfo(final URI file) {
assert file.getFragment() == null;
final URI f = file.normalize();
FileInfo i = files.get(f);
if (i == null) {
i = new FileInfo(f);
files.put(i.uri, i);
}
return i;
}
/**
* Add a collection of file info objects
*
* @param fs file info objects
*/
public void addAll(final Collection<FileInfo> fs) {
for (final FileInfo f: fs) {
files.put(f.uri, f);
}
}
/**
* File info object.
*/
public static final class FileInfo {
/** Absolute source URI. */
public URI src;
/** File URI. */
public final URI uri;
/** File path. */
public final File file;
/** Absolute result URI. */
public URI result;
/** File format. */
public String format;
/** File has a conref. */
public boolean hasConref;
/** File is part of chunk. */
public boolean isChunked;
/** File has links. Only applies to topics. */
public boolean hasLink;
/** File is resource only. */
public boolean isResourceOnly;
/** File is a link target. */
public boolean isTarget;
/** File is a target in non-conref link. */
public boolean isNonConrefTarget;
/** File is a push conref source. */
public boolean isConrefPush;
/** File has a keyref. */
public boolean hasKeyref;
/** File has coderef. */
public boolean hasCoderef;
/** File is a subject scheme. */
public boolean isSubjectScheme;
/** File is a target in conref link. Opposite of {@link #isNonConrefTarget}. */
public boolean isSkipChunk;
/** File is a coderef target. */
public boolean isSubtarget;
/** File is a flagging image. */
public boolean isFlagImage;
/** Source file is outside base directory. */
public boolean isOutDita;
FileInfo(final URI src, final URI uri, final File file) {
if (uri == null && file == null) throw new IllegalArgumentException(new NullPointerException());
this.src = src;
this.uri = uri != null ? uri : toURI(file);
this.file = uri != null ? toFile(uri) : file;
this.result = src;
}
FileInfo(final URI uri) {
if (uri == null) throw new IllegalArgumentException(new NullPointerException());
this.src = null;
this.uri = uri;
this.file = toFile(uri);
this.result = src;
}
@Deprecated
FileInfo(final File file) {
if (file == null) throw new IllegalArgumentException(new NullPointerException());
this.src = null;
this.uri = toURI(file);
this.file = file;
this.result = src;
}
@Override
public String toString() {
return "FileInfo{" +
"src=" + src +
", result=" + result +
", uri=" + uri +
", file=" + file +
", format='" + format + '\'' +
", hasConref=" + hasConref +
", isChunked=" + isChunked +
", hasLink=" + hasLink +
", isResourceOnly=" + isResourceOnly +
", isTarget=" + isTarget +
", isNonConrefTarget=" + isNonConrefTarget +
", isConrefPush=" + isConrefPush +
", hasKeyref=" + hasKeyref +
", hasCoderef=" + hasCoderef +
", isSubjectScheme=" + isSubjectScheme +
", isSkipChunk=" + isSkipChunk +
", isSubtarget=" + isSubtarget +
", isFlagImage=" + isFlagImage +
", isOutDita=" + isOutDita +
'}';
}
public interface Filter<T> {
boolean accept(T f);
}
public static class Builder {
private URI src;
private URI uri;
private File file;
private URI result;
private String format;
private boolean hasConref;
private boolean isChunked;
private boolean hasLink;
private boolean isResourceOnly;
private boolean isTarget;
private boolean isNonConrefTarget;
private boolean isConrefPush;
private boolean hasKeyref;
private boolean hasCoderef;
private boolean isSubjectScheme;
private boolean isSkipChunk;
private boolean isSubtarget;
private boolean isFlagImage;
private boolean isOutDita;
public Builder() {}
public Builder(final FileInfo orig) {
src = orig.src;
uri = orig.uri;
file = orig.file;
result = orig.result;
format = orig.format;
hasConref = orig.hasConref;
isChunked = orig.isChunked;
hasLink = orig.hasLink;
isResourceOnly = orig.isResourceOnly;
isTarget = orig.isTarget;
isNonConrefTarget = orig.isNonConrefTarget;
isConrefPush = orig.isConrefPush;
hasKeyref = orig.hasKeyref;
hasCoderef = orig.hasCoderef;
isSubjectScheme = orig.isSubjectScheme;
isSkipChunk = orig.isSkipChunk;
isSubtarget = orig.isSubtarget;
isFlagImage = orig.isFlagImage;
isOutDita = orig.isOutDita;
}
/**
* Add file info to this builder. Only non-null and true values will be added.
*/
public Builder add(final FileInfo orig) {
if (orig.src != null) src = orig.src;
if (orig.uri != null) uri = orig.uri;
if (orig.file != null) file = orig.file;
if (orig.result != null) result = orig.result;
if (orig.format != null) format = orig.format;
if (orig.hasConref) hasConref = orig.hasConref;
if (orig.isChunked) isChunked = orig.isChunked;
if (orig.hasLink) hasLink = orig.hasLink;
if (orig.isResourceOnly) isResourceOnly = orig.isResourceOnly;
if (orig.isTarget) isTarget = orig.isTarget;
if (orig.isNonConrefTarget) isNonConrefTarget = orig.isNonConrefTarget;
if (orig.isConrefPush) isConrefPush = orig.isConrefPush;
if (orig.hasKeyref) hasKeyref = orig.hasKeyref;
if (orig.hasCoderef) hasCoderef = orig.hasCoderef;
if (orig.isSubjectScheme) isSubjectScheme = orig.isSubjectScheme;
if (orig.isSkipChunk) isSkipChunk = orig.isSkipChunk;
if (orig.isSubtarget) isSubtarget = orig.isSubtarget;
if (orig.isFlagImage) isFlagImage = orig.isFlagImage;
if (orig.isOutDita) isOutDita = orig.isOutDita;
return this;
}
public Builder src(final URI src) { assert src.isAbsolute(); this.src = src; return this; }
public Builder uri(final URI uri) { this.uri = uri; this.file = null; return this; }
public Builder file(final File file) { this.file = file; this.uri = null; return this; }
public Builder result(final URI result) { assert result.isAbsolute(); this.result = result; return this; }
public Builder format(final String format) { this.format = format; return this; }
public Builder hasConref(final boolean hasConref) { this.hasConref = hasConref; return this; }
public Builder isChunked(final boolean isChunked) { this.isChunked = isChunked; return this; }
public Builder hasLink(final boolean hasLink) { this.hasLink = hasLink; return this; }
public Builder isResourceOnly(final boolean isResourceOnly) { this.isResourceOnly = isResourceOnly; return this; }
public Builder isTarget(final boolean isTarget) { this.isTarget = isTarget; return this; }
public Builder isNonConrefTarget(final boolean isNonConrefTarget) { this.isNonConrefTarget = isNonConrefTarget; return this; }
public Builder isConrefPush(final boolean isConrefPush) { this.isConrefPush = isConrefPush; return this; }
public Builder hasKeyref(final boolean hasKeyref) { this.hasKeyref = hasKeyref; return this; }
public Builder hasCoderef(final boolean hasCoderef) { this.hasCoderef = hasCoderef; return this; }
public Builder isSubjectScheme(final boolean isSubjectScheme) { this.isSubjectScheme = isSubjectScheme; return this; }
public Builder isSkipChunk(final boolean isSkipChunk) { this.isSkipChunk = isSkipChunk; return this; }
public Builder isSubtarget(final boolean isSubtarget) { this.isSubtarget = isSubtarget; return this; }
public Builder isFlagImage(final boolean isFlagImage) { this.isFlagImage = isFlagImage; return this; }
public Builder isOutDita(final boolean isOutDita) { this.isOutDita = isOutDita; return this; }
public FileInfo build() {
if (uri == null && file == null) {
throw new IllegalStateException("uri and file may not be null");
}
final FileInfo fi = new FileInfo(src, uri, file);
if (result != null) {
fi.result = result;
}
fi.format = format;
fi.hasConref = hasConref;
fi.isChunked = isChunked;
fi.hasLink = hasLink;
fi.isResourceOnly = isResourceOnly;
fi.isTarget = isTarget;
fi.isNonConrefTarget = isNonConrefTarget;
fi.isConrefPush = isConrefPush;
fi.hasKeyref = hasKeyref;
fi.hasCoderef = hasCoderef;
fi.isSubjectScheme = isSubjectScheme;
fi.isSkipChunk = isSkipChunk;
fi.isSubtarget = isSubtarget;
fi.isFlagImage = isFlagImage;
fi.isOutDita = isOutDita;
return fi;
}
}
}
public enum OutterControl {
/** Fail behavior. */
FAIL,
/** Warn behavior. */
WARN,
/** Quiet behavior. */
QUIET
}
public enum Generate {
/** Not generate outer files. */
NOT_GENERATEOUTTER(1),
/** Old solution. */
OLDSOLUTION(3);
public final int type;
Generate(final int type) {
this.type = type;
}
public static Generate get(final int type) {
for (final Generate g: Generate.values()) {
if (g.type == type) {
return g;
}
}
throw new IllegalArgumentException();
}
}
/**
* Retrieve the outercontrol.
* @return String outercontrol behavior
*
*/
public OutterControl getOutterControl(){
return OutterControl.valueOf(prop.get(PROPERTY_OUTER_CONTROL).toString());
}
/**
* Set the outercontrol.
* @param control control
*/
public void setOutterControl(final String control){
prop.put(PROPERTY_OUTER_CONTROL, OutterControl.valueOf(control.toUpperCase()).toString());
}
/**
* Retrieve the flag of onlytopicinmap.
* @return boolean if only topic in map
*/
public boolean getOnlyTopicInMap(){
return Boolean.parseBoolean(prop.get(PROPERTY_ONLY_TOPIC_IN_MAP).toString());
}
/**
* Set the onlytopicinmap.
* @param flag onlytopicinmap flag
*/
public void setOnlyTopicInMap(final boolean flag){
prop.put(PROPERTY_ONLY_TOPIC_IN_MAP, Boolean.toString(flag));
}
public Generate getGeneratecopyouter(){
return Generate.valueOf(prop.get(PROPERTY_GENERATE_COPY_OUTER).toString());
}
/**
* Set the generatecopyouter.
* @param flag generatecopyouter flag
*/
public void setGeneratecopyouter(final String flag){
setGeneratecopyouter(Generate.get(Integer.parseInt(flag)));
}
/**
* Set the generatecopyouter.
* @param flag generatecopyouter flag
*/
public void setGeneratecopyouter(final Generate flag){
prop.put(PROPERTY_GENERATE_COPY_OUTER, flag.toString());
}
/**
* Get output dir.
* @return absolute output dir
*/
public File getOutputDir(){
return new File(prop.get(PROPERTY_OUTPUT_DIR).toString());
}
/**
* Set output dir.
* @param outputDir absolute output dir
*/
public void setOutputDir(final File outputDir){
prop.put(PROPERTY_OUTPUT_DIR, outputDir.getAbsolutePath());
}
/**
* Get input file path.
* @return absolute input file path
*/
public URI getInputFile() {
return toURI(prop.get(PROPERTY_INPUT_MAP_URI).toString());
}
/**
* Set input map path.
* @param inputFile absolute input map path
*/
public void setInputFile(final URI inputFile) {
assert inputFile.isAbsolute();
prop.put(PROPERTY_INPUT_MAP_URI, inputFile.toString());
// Deprecated since 2.1
if (inputFile.getScheme().equals("file")) {
prop.put(PROPERTY_INPUT_MAP, new File(inputFile).getAbsolutePath());
}
}
}
| Allow using absolute temp file URI to get file info
| src/main/java/org/dita/dost/util/Job.java | Allow using absolute temp file URI to get file info |
|
Java | apache-2.0 | c8a42a9c02d3be62c160a32aa5a0060d70cf4b63 | 0 | max3163/jmeter,ubikloadpack/jmeter,ubikloadpack/jmeter,hemikak/jmeter,etnetera/jmeter,max3163/jmeter,d0k1/jmeter,vherilier/jmeter,ubikloadpack/jmeter,hemikak/jmeter,vherilier/jmeter,ra0077/jmeter,vherilier/jmeter,ubikloadpack/jmeter,ra0077/jmeter,ubikfsabbe/jmeter,ubikfsabbe/jmeter,hemikak/jmeter,etnetera/jmeter,hemikak/jmeter,etnetera/jmeter,ra0077/jmeter,max3163/jmeter,vherilier/jmeter,d0k1/jmeter,ubikfsabbe/jmeter,ubikfsabbe/jmeter,d0k1/jmeter,etnetera/jmeter,ra0077/jmeter,etnetera/jmeter,d0k1/jmeter,max3163/jmeter | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.report.core;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import org.apache.jmeter.samplers.SampleSaveConfiguration;
//import org.apache.jmeter.samplers.SampleResult;
//import org.apache.jmeter.samplers.SampleSaveConfiguration;
import org.apache.jmeter.save.CSVSaveService;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.util.JOrphanUtils;
/**
* Reader class for reading CSV files.<reader>
* <p>
* Handles {@link SampleMetadata} reading and sample extraction.
* </p>
*
* @since 2.14
*/
public class CsvSampleReader implements Closeable{
private static final int BUF_SIZE = 10000;
private static final String CHARSET = "ISO8859-1";
private static final char DEFAULT_SEPARATOR =
JMeterUtils.getPropDefault("jmeter.save.saveservice.default_delimiter", ",").charAt(0); //$NON-NLS-1$ //$NON-NLS-2$
private File file;
private BufferedReader reader;
private char separator;
private long row;
private SampleMetadata metadata;
private int columnCount;
private Sample lastSampleRead;
/**
* Instantiates a new csv sample reader.
*
* @param inputFile
* the input file (must not be {@code null})
* @param separator
* the separator
* @param useSaveSampleCfg
* indicates whether the reader uses jmeter
* SampleSaveConfiguration to define metadata
*/
public CsvSampleReader(File inputFile, char separator, boolean useSaveSampleCfg) {
this(inputFile, null, separator, useSaveSampleCfg);
}
/**
* Instantiates a new csv sample reader.
*
* @param inputFile
* the input file (must not be {@code null})
* @param metadata
* the metadata
*/
public CsvSampleReader(File inputFile, SampleMetadata metadata) {
this(inputFile, metadata, DEFAULT_SEPARATOR, false);
}
private CsvSampleReader(File inputFile, SampleMetadata metadata,
char separator, boolean useSaveSampleCfg) {
if (!(inputFile.isFile() && inputFile.canRead())) {
throw new IllegalArgumentException(inputFile.getAbsolutePath()
+ "does not exist or is not readable");
}
this.file = inputFile;
try {
this.reader = new BufferedReader(new InputStreamReader(
new FileInputStream(file), CHARSET), BUF_SIZE);
} catch (FileNotFoundException | UnsupportedEncodingException ex) {
throw new SampleException("Could not create file reader !", ex);
}
if (metadata == null) {
this.metadata = readMetadata(separator, useSaveSampleCfg);
} else {
this.metadata = metadata;
}
this.columnCount = this.metadata.getColumnCount();
this.separator = this.metadata.getSeparator();
this.row = 0;
this.lastSampleRead = nextSample();
}
private SampleMetadata readMetadata(char separator, boolean useSaveSampleCfg) {
try {
SampleMetadata result;
// Read first line
String line = reader.readLine();
if(line == null) {
throw new IllegalArgumentException("File is empty");
}
// When we can use sample save config and there is no header in csv
// file
if (useSaveSampleCfg
&& CSVSaveService.getSampleSaveConfiguration(line,
file.getAbsolutePath()) == null) {
// Build metadata from default save config
result = new SampleMetadata(
SampleSaveConfiguration.staticConfig());
} else {
// Build metadata from headers
result = new SampleMetaDataParser(separator).parse(line);
}
return result;
} catch (Exception e) {
throw new SampleException("Could not read metadata !", e);
}
}
/**
* Gets the metadata.
*
* @return the metadata
*/
public SampleMetadata getMetadata() {
return metadata;
}
private Sample nextSample() {
String[] data;
try {
data = CSVSaveService.csvReadFile(reader, separator);
Sample sample = null;
if (data.length > 0) {
// TODO is it correct to use a filler ?
if (data.length < columnCount) {
String[] filler = new String[columnCount];
System.arraycopy(data, 0, filler, 0, data.length);
for (int i = data.length; i < columnCount; i++) {
filler[i] = "";
}
data = filler;
}
sample = new Sample(row, metadata, data);
}
return sample;
} catch (IOException e) {
throw new SampleException("Could not read sample <" + row + ">", e);
}
}
/**
* Gets next sample from the file.
*
* @return the sample
*/
public Sample readSample() {
Sample out = lastSampleRead;
lastSampleRead = nextSample();
return out;
}
/**
* Gets next sample from file but keep the reading file position.
*
* @return the sample
*/
public Sample peek() {
return lastSampleRead;
}
/**
* Indicates whether the file contains more samples
*
* @return true, if the file contains more samples
*/
public boolean hasNext() {
return lastSampleRead != null;
}
/**
* Close the reader.
*/
@Override
public void close() {
JOrphanUtils.closeQuietly(reader);
}
}
| src/core/org/apache/jmeter/report/core/CsvSampleReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.report.core;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import org.apache.jmeter.samplers.SampleSaveConfiguration;
//import org.apache.jmeter.samplers.SampleResult;
//import org.apache.jmeter.samplers.SampleSaveConfiguration;
import org.apache.jmeter.save.CSVSaveService;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.util.JOrphanUtils;
/**
* Reader class for reading CSV files.<reader>
* <p>
* Handles {@link SampleMetadata} reading and sample extraction.
* </p>
*
* @since 2.14
*/
public class CsvSampleReader implements Closeable{
private static final int BUF_SIZE = 10000;
private static final String CHARSET = "ISO8859-1";
private static final char DEFAULT_SEPARATOR =
JMeterUtils.getPropDefault("jmeter.save.saveservice.default_delimiter", ",").charAt(0); //$NON-NLS-1$ //$NON-NLS-2$
private File file;
private BufferedReader reader;
private char separator;
private long row;
private SampleMetadata metadata;
private int columnCount;
private Sample lastSampleRead;
/**
* Instantiates a new csv sample reader.
*
* @param inputFile
* the input file (must not be {@code null})
* @param separator
* the separator
* @param useSaveSampleCfg
* indicates whether the reader uses jmeter
* SampleSaveConfiguration to define metadata
*/
public CsvSampleReader(File inputFile, char separator, boolean useSaveSampleCfg) {
this(inputFile, null, separator, useSaveSampleCfg);
}
/**
* Instantiates a new csv sample reader.
*
* @param inputFile
* the input file (must not be {@code null})
* @param metadata
* the metadata
*/
public CsvSampleReader(File inputFile, SampleMetadata metadata) {
this(inputFile, metadata, DEFAULT_SEPARATOR, false);
}
private CsvSampleReader(File inputFile, SampleMetadata metadata,
char separator, boolean useSaveSampleCfg) {
if (!(inputFile.isFile() && inputFile.canRead())) {
throw new IllegalArgumentException(inputFile.getAbsolutePath()
+ "does not exist or is not readable");
}
this.file = inputFile;
try {
this.reader = new BufferedReader(new InputStreamReader(
new FileInputStream(file), CHARSET), BUF_SIZE);
} catch (FileNotFoundException | UnsupportedEncodingException ex) {
throw new SampleException("Could not create file reader !", ex);
}
if (metadata == null) {
metadata = readMetadata(separator, useSaveSampleCfg);
}
this.metadata = metadata;
this.columnCount = metadata.getColumnCount();
this.separator = metadata.getSeparator();
this.row = 0;
this.lastSampleRead = nextSample();
}
private SampleMetadata readMetadata(char separator, boolean useSaveSampleCfg) {
try {
SampleMetadata result;
// Read first line
String line = reader.readLine();
if(line == null) {
throw new IllegalArgumentException("File is empty");
}
// When we can use sample save config and there is no header in csv
// file
if (useSaveSampleCfg
&& CSVSaveService.getSampleSaveConfiguration(line,
file.getAbsolutePath()) == null) {
// Build metadata from default save config
result = new SampleMetadata(
SampleSaveConfiguration.staticConfig());
} else {
// Build metadata from headers
result = new SampleMetaDataParser(separator).parse(line);
}
return result;
} catch (Exception e) {
throw new SampleException("Could not read metadata !", e);
}
}
/**
* Gets the metadata.
*
* @return the metadata
*/
public SampleMetadata getMetadata() {
return metadata;
}
private Sample nextSample() {
String[] data;
try {
data = CSVSaveService.csvReadFile(reader, separator);
Sample sample = null;
if (data.length > 0) {
// TODO is it correct to use a filler ?
if (data.length < columnCount) {
String[] filler = new String[columnCount];
System.arraycopy(data, 0, filler, 0, data.length);
for (int i = data.length; i < columnCount; i++) {
filler[i] = "";
}
data = filler;
}
sample = new Sample(row, metadata, data);
}
return sample;
} catch (IOException e) {
throw new SampleException("Could not read sample <" + row + ">", e);
}
}
/**
* Gets next sample from the file.
*
* @return the sample
*/
public Sample readSample() {
Sample out = lastSampleRead;
lastSampleRead = nextSample();
return out;
}
/**
* Gets next sample from file but keep the reading file position.
*
* @return the sample
*/
public Sample peek() {
return lastSampleRead;
}
/**
* Indicates whether the file contains more samples
*
* @return true, if the file contains more samples
*/
public boolean hasNext() {
return lastSampleRead != null;
}
/**
* Close the reader.
*/
@Override
public void close() {
JOrphanUtils.closeQuietly(reader);
}
}
| Don't reassign parameter metadata.
git-svn-id: 5ccfe34f605a6c2f9041ff2965ab60012c62539a@1721356 13f79535-47bb-0310-9956-ffa450edef68
| src/core/org/apache/jmeter/report/core/CsvSampleReader.java | Don't reassign parameter metadata. |
|
Java | apache-2.0 | 325b00a36f51f4ed02225c58401dc8bd406e1bfe | 0 | igapyon/blancoApexFormatter,igapyon/blancoApexFormatter | /*
* Copyright 2016 Toshiki Iga
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package blanco.apex.formatter.syntax;
import java.util.List;
import blanco.apex.parser.token.BlancoApexLiteralToken;
import blanco.apex.parser.token.BlancoApexSpecialCharToken;
import blanco.apex.parser.token.BlancoApexToken;
import blanco.apex.parser.token.BlancoApexWhitespaceToken;
import blanco.apex.parser.token.BlancoApexWordToken;
import blanco.apex.syntaxparser.BlancoApexSyntaxUtil;
import blanco.apex.syntaxparser.token.AbstractBlancoApexSyntaxToken;
import blanco.apex.syntaxparser.token.BlancoApexSyntaxBlockToken;
import blanco.apex.syntaxparser.token.BlancoApexSyntaxBoxBracketsToken;
import blanco.apex.syntaxparser.token.BlancoApexSyntaxParenthesisToken;
/**
* Format indent.
*
* @author Toshiki Iga
*/
public class BlancoApexSyntaxSpecialCharFormatter {
/**
* main format method.
*
* @param tokenList
*/
public void format(final List<BlancoApexToken> tokenList) {
// process relative normalize.
internalFormat(tokenList, new BlancoApexSyntaxBlockToken());
}
protected void internalFormat(final List<BlancoApexToken> tokenList, final AbstractBlancoApexSyntaxToken parent) {
for (int index = 0; index < tokenList.size(); index++) {
if (tokenList.get(index) instanceof AbstractBlancoApexSyntaxToken) {
internalFormat(((AbstractBlancoApexSyntaxToken) tokenList.get(index)).getTokenList(),
((AbstractBlancoApexSyntaxToken) tokenList.get(index)));
} else if (tokenList.get(index) instanceof BlancoApexSpecialCharToken) {
final BlancoApexSpecialCharToken specialChar = (BlancoApexSpecialCharToken) tokenList.get(index);
if (BlancoApexSyntaxUtil.isIncludedIgnoreCase(specialChar.getValue(),
new String[] { "=", "==", "<=", ">=", "!=", "||", "&&", "+", "-", "*", "/",
"?"/* , ":" care about label */ , "=>" })) {
if (index < tokenList.size() - 1) {
final BlancoApexToken rightToken = tokenList.get(index + 1);
if (rightToken instanceof BlancoApexWordToken //
|| rightToken instanceof BlancoApexLiteralToken //
|| rightToken instanceof BlancoApexSyntaxParenthesisToken) {
tokenList.add(index + 1, new BlancoApexWhitespaceToken(" ", -1));
}
}
if (index > 0) {
final BlancoApexToken leftToken = tokenList.get(index - 1);
if (leftToken instanceof BlancoApexWordToken //
|| leftToken instanceof BlancoApexLiteralToken //
|| leftToken instanceof BlancoApexSyntaxParenthesisToken
|| leftToken instanceof BlancoApexSyntaxBoxBracketsToken) {
tokenList.add(index, new BlancoApexWhitespaceToken(" ", -1));
}
}
}
// workaround for non supportig generics.
if (BlancoApexSyntaxUtil.isIncludedIgnoreCase(specialChar.getValue(), new String[] { ">" })) {
if (index < tokenList.size() - 1) {
final BlancoApexToken rightToken = tokenList.get(index + 1);
if (rightToken instanceof BlancoApexWordToken //
|| rightToken instanceof BlancoApexLiteralToken //
/*
* || rightToken instanceof
* BlancoApexSyntaxParenthesisToken workaround for non
* generics support.
*/) {
tokenList.add(index + 1, new BlancoApexWhitespaceToken(" ", -1));
}
}
}
// workaround for non supportig generics.
if (BlancoApexSyntaxUtil.isIncludedIgnoreCase(specialChar.getValue(), new String[] { "<" })) {
if (index > 0) {
final BlancoApexToken leftToken = tokenList.get(index - 1);
if (/*
* leftToken instanceof BlancoApexWordToken // ||
* workaround for non support generics.
*/
leftToken instanceof BlancoApexLiteralToken //
|| leftToken instanceof BlancoApexSyntaxParenthesisToken) {
tokenList.add(index, new BlancoApexWhitespaceToken(" ", -1));
}
}
}
}
}
}
}
| src/blanco/apex/formatter/syntax/BlancoApexSyntaxSpecialCharFormatter.java | /*
* Copyright 2016 Toshiki Iga
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package blanco.apex.formatter.syntax;
import java.util.List;
import blanco.apex.parser.token.BlancoApexLiteralToken;
import blanco.apex.parser.token.BlancoApexSpecialCharToken;
import blanco.apex.parser.token.BlancoApexToken;
import blanco.apex.parser.token.BlancoApexWhitespaceToken;
import blanco.apex.parser.token.BlancoApexWordToken;
import blanco.apex.syntaxparser.BlancoApexSyntaxUtil;
import blanco.apex.syntaxparser.token.AbstractBlancoApexSyntaxToken;
import blanco.apex.syntaxparser.token.BlancoApexSyntaxBlockToken;
import blanco.apex.syntaxparser.token.BlancoApexSyntaxParenthesisToken;
/**
* Format indent.
*
* @author Toshiki Iga
*/
public class BlancoApexSyntaxSpecialCharFormatter {
/**
* main format method.
*
* @param tokenList
*/
public void format(final List<BlancoApexToken> tokenList) {
// process relative normalize.
internalFormat(tokenList, new BlancoApexSyntaxBlockToken());
}
protected void internalFormat(final List<BlancoApexToken> tokenList, final AbstractBlancoApexSyntaxToken parent) {
for (int index = 0; index < tokenList.size(); index++) {
if (tokenList.get(index) instanceof AbstractBlancoApexSyntaxToken) {
internalFormat(((AbstractBlancoApexSyntaxToken) tokenList.get(index)).getTokenList(),
((AbstractBlancoApexSyntaxToken) tokenList.get(index)));
} else if (tokenList.get(index) instanceof BlancoApexSpecialCharToken) {
final BlancoApexSpecialCharToken specialChar = (BlancoApexSpecialCharToken) tokenList.get(index);
if (BlancoApexSyntaxUtil.isIncludedIgnoreCase(specialChar.getValue(),
new String[] { "=", "==", "<=", ">=", "!=", "||", "&&", "+", "-", "*", "/",
"?"/* , ":" care about label */ , "=>" })) {
if (index < tokenList.size() - 1) {
final BlancoApexToken rightToken = tokenList.get(index + 1);
if (rightToken instanceof BlancoApexWordToken //
|| rightToken instanceof BlancoApexLiteralToken //
|| rightToken instanceof BlancoApexSyntaxParenthesisToken) {
tokenList.add(index + 1, new BlancoApexWhitespaceToken(" ", -1));
}
}
if (index > 0) {
final BlancoApexToken leftToken = tokenList.get(index - 1);
if (leftToken instanceof BlancoApexWordToken //
|| leftToken instanceof BlancoApexLiteralToken //
|| leftToken instanceof BlancoApexSyntaxParenthesisToken) {
tokenList.add(index, new BlancoApexWhitespaceToken(" ", -1));
}
}
}
// workaround for non supportig generics.
if (BlancoApexSyntaxUtil.isIncludedIgnoreCase(specialChar.getValue(), new String[] { ">" })) {
if (index < tokenList.size() - 1) {
final BlancoApexToken rightToken = tokenList.get(index + 1);
if (rightToken instanceof BlancoApexWordToken //
|| rightToken instanceof BlancoApexLiteralToken //
/*
* || rightToken instanceof
* BlancoApexSyntaxParenthesisToken workaround for non
* generics support.
*/) {
tokenList.add(index + 1, new BlancoApexWhitespaceToken(" ", -1));
}
}
}
// workaround for non supportig generics.
if (BlancoApexSyntaxUtil.isIncludedIgnoreCase(specialChar.getValue(), new String[] { "<" })) {
if (index > 0) {
final BlancoApexToken leftToken = tokenList.get(index - 1);
if (/*
* leftToken instanceof BlancoApexWordToken // ||
* workaround for non support generics.
*/
leftToken instanceof BlancoApexLiteralToken //
|| leftToken instanceof BlancoApexSyntaxParenthesisToken) {
tokenList.add(index, new BlancoApexWhitespaceToken(" ", -1));
}
}
}
}
}
}
}
| update formatter. | src/blanco/apex/formatter/syntax/BlancoApexSyntaxSpecialCharFormatter.java | update formatter. |
|
Java | apache-2.0 | 6650fdef0015571ccaf32c0c01b9d991fad7ccf1 | 0 | chriscm2006/WebSocketServer | package com.moba11y.websocketserver;
import com.chriscm.clog.CLog;
import com.google.gson.JsonObject;
import java.util.Map;
import java.util.TreeMap;
/**
* Created by chrismcmeeking on 2/1/17.
*/
@SuppressWarnings("unused")
public class WebSocket {
private Map<String, WebSocketEventListener> mWebSocketEventListeners = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
org.java_websocket.WebSocket mWebSocket;
WebSocket(org.java_websocket.WebSocket webSocket) {
mWebSocket = webSocket;
}
public void send(Message message) {
mWebSocket.send(message.toString());
}
public interface WebSocketEventListener {
void onEvent(WebSocket socket, JsonObject jsonObject);
}
public void addEventListener(final String eventName, WebSocketEventListener webSocketEventListener) {
CLog.d("Adding Event Listener: " + eventName);
mWebSocketEventListeners.put(eventName, webSocketEventListener);
}
public void onMessage(final Message message) {
final String type = message.getType();
final JsonObject data = message.getData();
CLog.d("Incoming message Type: " + type + " Data: " + data.toString());
mWebSocketEventListeners.get(type).onEvent(this, data);
}
}
| app/src/main/java/com/moba11y/websocketserver/WebSocket.java | package com.moba11y.websocketserver;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import java.util.Map;
import java.util.TreeMap;
/**
* Created by chrismcmeeking on 2/1/17.
*/
@SuppressWarnings("unused")
public class WebSocket {
private Map<String, WebSocketEventListener> mWebSocketEventListeners = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
org.java_websocket.WebSocket mWebSocket;
WebSocket(org.java_websocket.WebSocket webSocket) {
mWebSocket = webSocket;
}
public void send(Message message) {
mWebSocket.send(message.toString());
}
public interface WebSocketEventListener {
void onEvent(WebSocket socket, JsonObject jsonObject);
}
public void addEventListener(final String eventName, WebSocketEventListener webSocketEventListener) {
mWebSocketEventListeners.put(eventName, webSocketEventListener);
}
public void onMessage(final Message message) {
final String type = message.getType();
final JsonObject data = message.getData();
mWebSocketEventListeners.get(type).onEvent(this, data);
}
}
| Adding logging commands for event listeners.
| app/src/main/java/com/moba11y/websocketserver/WebSocket.java | Adding logging commands for event listeners. |
|
Java | apache-2.0 | 5eff676240b3f85187836c7b909f33febb9969cd | 0 | querydsl/querydsl,pkcool/querydsl,johnktims/querydsl,Log10Solutions/querydsl,lpandzic/querydsl,gordski/querydsl,Log10Solutions/querydsl,lpandzic/querydsl,attila-kiss-it/querydsl,attila-kiss-it/querydsl,vveloso/querydsl,robertandrewbain/querydsl,johnktims/querydsl,pkcool/querydsl,kevinleturc/querydsl,vveloso/querydsl,gordski/querydsl,dharaburda/querydsl,vveloso/querydsl,querydsl/querydsl,attila-kiss-it/querydsl,mdiazf/querydsl,izeye/querydsl,balazs-zsoldos/querydsl,johnktims/querydsl,kevinleturc/querydsl,robertandrewbain/querydsl,mdiazf/querydsl,tomforster/querydsl,balazs-zsoldos/querydsl,pkcool/querydsl,Log10Solutions/querydsl,izeye/querydsl,dharaburda/querydsl,querydsl/querydsl,robertandrewbain/querydsl,tomforster/querydsl,izeye/querydsl,mdiazf/querydsl,querydsl/querydsl,dharaburda/querydsl,balazs-zsoldos/querydsl,lpandzic/querydsl,mosoft521/querydsl,lpandzic/querydsl,tomforster/querydsl,gordski/querydsl,mosoft521/querydsl,kevinleturc/querydsl,mosoft521/querydsl | /*
* Copyright 2011, Mysema Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mysema.query;
import java.sql.*;
import java.util.Map;
import java.util.Properties;
import com.google.common.collect.Maps;
import com.mysema.query.ddl.CreateTableClause;
import com.mysema.query.ddl.DropTableClause;
import com.mysema.query.sql.*;
import org.hsqldb.types.Types;
/**
* @author tiwe
*
*/
public final class Connections {
public static final int TEST_ROW_COUNT = 100;
private static ThreadLocal<Connection> connHolder = new ThreadLocal<Connection>();
private static ThreadLocal<Target> targetHolder = new ThreadLocal<Target>();
private static ThreadLocal<SQLTemplates> templatesHolder = new ThreadLocal<SQLTemplates>();
// datetest
private static final String CREATE_TABLE_DATETEST = "create table DATE_TEST(DATE_TEST date)";
// survey
private static final String CREATE_TABLE_SURVEY =
"create table SURVEY(ID int auto_increment, NAME varchar(30), NAME2 varchar(30))";
// test
private static final String CREATE_TABLE_TEST = "create table TEST(NAME varchar(255))";
// timetest
private static final String CREATE_TABLE_TIMETEST = "create table TIME_TEST(TIME_TEST time)";
// employee
private static final String INSERT_INTO_EMPLOYEE = "insert into EMPLOYEE " +
"(ID, FIRSTNAME, LASTNAME, SALARY, DATEFIELD, TIMEFIELD, SUPERIOR_ID) " +
"values (?,?,?,?,?,?,?)";
private static final String INSERT_INTO_TEST_VALUES = "insert into TEST values(?)";
private static ThreadLocal<Statement> stmtHolder = new ThreadLocal<Statement>();
private static boolean derbyInited, sqlServerInited, h2Inited, hsqlInited, mysqlInited, cubridInited, oracleInited, postgresInited, sqliteInited, teradataInited;
public static void close() throws SQLException{
if (stmtHolder.get() != null) {
stmtHolder.get().close();
}
if (connHolder.get() != null) {
connHolder.get().close();
}
}
public static Connection getConnection() {
return connHolder.get();
}
public static Target getTarget() {
return targetHolder.get();
}
public static SQLTemplates getTemplates() {
return templatesHolder.get();
}
public static void setTemplates(SQLTemplates templates) {
templatesHolder.set(templates);
}
private static Connection getDerby() throws SQLException, ClassNotFoundException {
Class.forName("org.apache.derby.jdbc.EmbeddedDriver");
String url = "jdbc:derby:target/demoDB;create=true";
return DriverManager.getConnection(url, "", "");
}
private static Connection getHSQL() throws SQLException, ClassNotFoundException {
Class.forName("org.hsqldb.jdbcDriver");
String url = "jdbc:hsqldb:target/tutorial";
return DriverManager.getConnection(url, "sa", "");
}
private static Connection getH2() throws SQLException, ClassNotFoundException{
Class.forName("org.h2.Driver");
String url = "jdbc:h2:target/h2;LOCK_MODE=0";
return DriverManager.getConnection(url, "sa", "");
}
private static Connection getMySQL() throws SQLException, ClassNotFoundException {
Class.forName("com.mysql.jdbc.Driver");
String url = "jdbc:mysql://localhost:3306/querydsl";
return DriverManager.getConnection(url, "querydsl", "querydsl");
}
private static Connection getOracle() throws SQLException, ClassNotFoundException{
Class.forName("oracle.jdbc.driver.OracleDriver");
String url = "jdbc:oracle:thin:@localhost:1521:xe";
return DriverManager.getConnection(url, "querydsl", "querydsl");
}
private static Connection getPostgres() throws ClassNotFoundException, SQLException{
Class.forName("org.postgresql.Driver");
String url = "jdbc:postgresql://localhost:5432/querydsl";
return DriverManager.getConnection(url, "querydsl", "querydsl");
}
private static Connection getSQLServer() throws ClassNotFoundException, SQLException{
Class.forName("net.sourceforge.jtds.jdbc.Driver");
Properties props = new Properties();
props.put("user", "querydsl");
props.put("password", "querydsl");
props.put("sendTimeAsDatetime", "false");
String url = "jdbc:jtds:sqlserver://localhost:1433/querydsl";
// return DriverManager.getConnection(url, "querydsl", "querydsl");
return DriverManager.getConnection(url, props);
}
private static Connection getCubrid() throws ClassNotFoundException, SQLException {
Class.forName("cubrid.jdbc.driver.CUBRIDDriver");
String url = "jdbc:cubrid:localhost:30000:demodb:public::";
return DriverManager.getConnection(url);
}
private static Connection getSQLite() throws SQLException, ClassNotFoundException {
//System.setProperty("sqlite.purejava", "true");
Class.forName("org.sqlite.JDBC");
return DriverManager.getConnection("jdbc:sqlite:target/sample.db");
}
private static Connection getTeradata() throws SQLException, ClassNotFoundException {
Class.forName("com.teradata.jdbc.TeraDriver");
return DriverManager.getConnection("jdbc:teradata://teradata/dbc", "querydsl", "querydsl");
}
private static CreateTableClause createTable(SQLTemplates templates, String table) {
return new CreateTableClause(connHolder.get(), templates, table);
}
public static void dropTable(SQLTemplates templates, String table) throws SQLException{
new DropTableClause(connHolder.get(), templates, table).execute();
}
public static void dropType(Statement stmt, String type) throws SQLException {
try {
stmt.execute("drop type " + type);
} catch (SQLException e) {
if (!e.getMessage().contains("does not exist")) {
throw e;
}
}
}
public static Statement getStatement() {
return stmtHolder.get();
}
private static void createEmployeeTable(SQLTemplates templates) {
createTable(templates, "EMPLOYEE")
.column("ID", Integer.class).notNull()
.column("FIRSTNAME", String.class).size(50)
.column("LASTNAME", String.class).size(50)
.column("SALARY", Double.class)
.column("DATEFIELD", Date.class)
.column("TIMEFIELD", Time.class)
.column("SUPERIOR_ID", Integer.class)
.primaryKey("PK_EMPLOYEE", "ID")
.foreignKey("FK_SUPERIOR","SUPERIOR_ID").references("EMPLOYEE","ID")
.execute();
}
public static Map<Integer, String> getSpatialData() {
Map<Integer, String> m = Maps.newHashMap();
// point
m.put(1, "POINT (2 2)");
m.put(2, "POINT (8 7)");
m.put(3, "POINT (1 9)");
m.put(4, "POINT (9 2)");
m.put(5, "POINT (4 4)");
// linestring
m.put(6, "LINESTRING (30 10, 10 30)");
m.put(7, "LINESTRING (30 10, 10 30, 40 40)");
// polygon
m.put(8, "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10), (20 30, 35 35, 30 20, 20 30))");
m.put(9, "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))");
// multipoint
m.put(11, "MULTIPOINT (10 40, 40 30)");
m.put(11, "MULTIPOINT (10 40, 40 30, 20 20, 30 10)");
// multilinestring
m.put(12, "MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30 10))");
m.put(13, "MULTILINESTRING ((10 10, 20 20, 10 40))");
// multipolygon
m.put(14, "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))");
m.put(15, "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)), " +
"((20 35, 10 30, 10 10, 30 5, 45 20, 20 35), " +
"(30 20, 20 15, 20 25, 30 20)))");
// XXX POLYHEDRALSURFACE not supported
/* GEOMETRYCOLLECTION(POINT(4 6),LINESTRING(4 6,7 10))
CIRCULARSTRING(1 5, 6 2, 7 3)
COMPOUNDCURVE(CIRCULARSTRING(0 0,1 1,1 0),(1 0,0 1))
CURVEPOLYGON(CIRCULARSTRING(-2 0,-1 -1,0 0,1 -1,2 0,0 2,-2 0),(-1 0,0 0.5,1 0,0 1,-1 0))
MULTICURVE((5 5,3 5,3 3,0 3),CIRCULARSTRING(0 0,2 1,2 2))
TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0))
TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0)))
*/
return m;
}
public static void initCubrid() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.CUBRID);
//SQLTemplates templates = new MySQLTemplates();
Connection c = getCubrid();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (cubridInited) {
return;
}
// survey
stmt.execute("drop table if exists SURVEY");
stmt.execute("create table SURVEY(ID int auto_increment(16693,2), " +
"NAME varchar(30)," +
"NAME2 varchar(30)," +
"constraint suryey_pk primary key(ID))");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello');");
// test
stmt.execute("drop table if exists \"TEST\"");
stmt.execute("create table \"TEST\"(NAME varchar(255))");
PreparedStatement pstmt = c.prepareStatement("insert into \"TEST\" values(?)");
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table if exists EMPLOYEE");
//createEmployeeTable(templates);
stmt.execute("create table EMPLOYEE ( " +
"ID INT PRIMARY KEY AUTO_INCREMENT, " +
"FIRSTNAME VARCHAR(50), " +
"LASTNAME VARCHAR(50), " +
"SALARY DECIMAL, " +
"DATEFIELD DATE, " +
"TIMEFIELD TIME, " +
"SUPERIOR_ID INT, " +
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID) " +
")");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table if exists TIME_TEST");
stmt.execute("drop table if exists DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
cubridInited = true;
}
public static void initDerby() throws SQLException, ClassNotFoundException {
targetHolder.set(Target.DERBY);
SQLTemplates templates = new DerbyTemplates();
Connection c = getDerby();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (derbyInited) {
return;
}
// types
dropType(stmt, "price restrict");
stmt.execute("create type price external name 'com.example.Price' language java");
// survey
dropTable(templates, "SURVEY");
stmt.execute("create table SURVEY(" +
"ID int generated by default as identity(start with 1, increment by 1), " +
"NAME varchar(30)," +
"NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello')");
// test
dropTable(templates, "TEST");
stmt.execute(CREATE_TABLE_TEST);
stmt.execute("create index test_name on test(name)");
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
dropTable(templates, "EMPLOYEE");
createEmployeeTable(templates);
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
dropTable(templates, "TIME_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
dropTable(templates, "DATE_TEST");
stmt.execute(CREATE_TABLE_DATETEST);
derbyInited = true;
}
public static void initH2() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.H2);
SQLTemplates templates = new H2Templates();
Connection c = getH2();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (h2Inited) {
return;
}
stmt.execute("DROP ALIAS IF EXISTS InitGeoDB");
stmt.execute("CREATE ALIAS InitGeoDB for \"geodb.GeoDB.InitGeoDB\"");
stmt.execute("CALL InitGeoDB()");
// shapes
dropTable(templates, "SHAPES");
stmt.execute("create table SHAPES (ID int not null primary key, GEOMETRY blob)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into SHAPES values(" + entry.getKey()
+", ST_GeomFromText('" + entry.getValue() + "', 4326))");
}
// qtest
stmt.execute("drop table QTEST if exists");
stmt.execute("create table QTEST (ID int IDENTITY(1,1) NOT NULL, C1 int NULL)");
// survey
stmt.execute("drop table SURVEY if exists");
stmt.execute(CREATE_TABLE_SURVEY);
stmt.execute("insert into SURVEY values (1, 'Hello World', 'Hello');");
stmt.execute("alter table SURVEY alter column id int auto_increment");
// test
stmt.execute("drop table TEST if exists");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table EMPLOYEE if exists");
createEmployeeTable(templates);
stmt.execute("alter table EMPLOYEE alter column id int auto_increment");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table TIME_TEST if exists");
stmt.execute("drop table DATE_TEST if exists");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
h2Inited = true;
}
public static void initHSQL() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.HSQLDB);
SQLTemplates templates = new HSQLDBTemplates();
Connection c = getHSQL();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (hsqlInited) {
return;
}
// dual
stmt.execute("drop table DUAL if exists");
stmt.execute("create table DUAL ( DUMMY varchar(1) )");
stmt.execute("insert into DUAL (DUMMY) values ('X')");
// survey
stmt.execute("drop table SURVEY if exists");
//stmt.execute(CREATE_TABLE_SURVEY);
stmt.execute("create table SURVEY(" +
"ID int generated by default as identity, " +
"NAME varchar(30)," +
"NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1, 'Hello World', 'Hello')");
// test
stmt.execute("drop table TEST if exists");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table EMPLOYEE if exists");
createEmployeeTable(templates);
stmt.execute("alter table EMPLOYEE alter column id int generated by default as identity");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table TIME_TEST if exists");
stmt.execute("drop table DATE_TEST if exists");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
hsqlInited = true;
}
public static void initMySQL() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.MYSQL);
//SQLTemplates templates = new MySQLTemplates();
Connection c = getMySQL();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (mysqlInited) {
return;
}
// shapes
stmt.execute("drop table if exists SHAPES");
stmt.execute("create table SHAPES (ID int not null primary key, GEOMETRY geometry)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into SHAPES values(" + entry.getKey()
+", GeomFromText('" + entry.getValue() + "'))");
}
// survey
stmt.execute("drop table if exists SURVEY");
stmt.execute("create table SURVEY(ID int primary key auto_increment, " +
"NAME varchar(30)," +
"NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello');");
// test
stmt.execute("drop table if exists TEST");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table if exists EMPLOYEE");
//createEmployeeTable(templates);
stmt.execute("create table EMPLOYEE ( " +
"ID INT PRIMARY KEY AUTO_INCREMENT, " +
"FIRSTNAME VARCHAR(50), " +
"LASTNAME VARCHAR(50), " +
"SALARY DECIMAL, " +
"DATEFIELD DATE, " +
"TIMEFIELD TIME, " +
"SUPERIOR_ID INT, " +
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID) " +
")");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table if exists TIME_TEST");
stmt.execute("drop table if exists DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
mysqlInited = true;
}
public static void initOracle() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.ORACLE);
SQLTemplates templates = new OracleTemplates();
Connection c = getOracle();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (oracleInited) {
return;
}
// types
stmt.execute("create or replace type ssn_t as object (ssn_type char(11))");
// survey
dropTable(templates, "SURVEY");
stmt.execute("create table SURVEY (ID number(10,0), " +
"NAME varchar(30 char)," +
"NAME2 varchar(30 char))");
try {
stmt.execute("drop sequence survey_seq");
} catch(SQLException e) {
if (!e.getMessage().contains("sequence does not exist")) {
throw e;
}
}
stmt.execute("create sequence survey_seq");
stmt.execute("create or replace trigger survey_trigger\n"+
"before insert on survey\n"+
"for each row\n" +
"when (new.id is null)\n"+
"begin\n"+
" select survey_seq.nextval into :new.id from dual;\n"+
"end;\n");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello')");
// test
dropTable(templates, "TEST");
stmt.execute("create table TEST(name varchar(255))");
String sql = "insert into TEST values(?)";
PreparedStatement pstmt = c.prepareStatement(sql);
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
// employee
dropTable(templates, "EMPLOYEE");
stmt.execute("create table EMPLOYEE ( " +
"ID NUMBER(10,0), " +
"FIRSTNAME VARCHAR2(50 CHAR), " +
"LASTNAME VARCHAR2(50 CHAR), " +
"SALARY DOUBLE PRECISION, " +
"DATEFIELD DATE, " +
"TIMEFIELD TIMESTAMP, " +
"SUPERIOR_ID NUMBER(10,0), " +
"CONSTRAINT PK_EMPLOYEE PRIMARY KEY(ID), " +
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID)" +
")");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
dropTable(templates, "DATE_TEST");
stmt.execute("create table date_test(date_test date)");
oracleInited = true;
}
public static void initPostgres() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.POSTGRES);
SQLTemplates templates = new PostgresTemplates(true);
// NOTE : unquoted identifiers are converted to lower case in Postgres
Connection c = getPostgres();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (postgresInited) {
return;
}
// shapes
dropTable(templates, "SHAPES");
// stmt.execute("create table \"SHAPES\" (\"ID\" int not null primary key, \"GEOMETRY\" geography(POINT,4326))");
stmt.execute("create table \"SHAPES\" (\"ID\" int not null primary key)");
stmt.execute("select AddGeometryColumn('SHAPES', 'GEOMETRY', -1, 'GEOMETRY', 2)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into \"SHAPES\" values(" + entry.getKey()
+", '" + entry.getValue() + "')");
}
// types
dropType(stmt, "u_country");
stmt.execute("create type u_country as enum ('Brazil', 'England', 'Germany')");
dropType(stmt, "u_street_type");
stmt.execute("create type u_street_type as (street VARCHAR(100), number VARCHAR(30))");
// arrays
dropTable(templates, "ARRAYTEST");
stmt.execute("create table \"ARRAYTEST\" (\n" +
"\"ID\" bigint primary key,\n" +
"\"MYARRAY\" varchar(8)[])");
// survey
dropTable(templates, "SURVEY");
try {
stmt.execute("drop sequence SURVEY_SEQ");
} catch(SQLException e) {
if (!e.getMessage().contains("does not exist")) {
throw e;
}
}
stmt.execute("create sequence SURVEY_SEQ");
stmt.execute("create table \"SURVEY\"(" +
"\"ID\" int DEFAULT NEXTVAL('SURVEY_SEQ'), " +
"\"NAME\" varchar(30), \"NAME2\" varchar(30))");
stmt.execute("insert into \"SURVEY\" values (1, 'Hello World', 'Hello')");
// test
dropTable(templates, "TEST");
stmt.execute(quote(CREATE_TABLE_TEST,"TEST","NAME"));
String sql = quote(INSERT_INTO_TEST_VALUES,"TEST");
PreparedStatement pstmt = c.prepareStatement(sql);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
// stmt.execute("drop table employee if exists");
dropTable(templates, "EMPLOYEE");
createEmployeeTable(templates);
addEmployees("insert into \"EMPLOYEE\" " +
"(\"ID\", \"FIRSTNAME\", \"LASTNAME\", \"SALARY\", \"DATEFIELD\", \"TIMEFIELD\", \"SUPERIOR_ID\") " +
"values (?,?,?,?,?,?,?)");
// date_test and time_test
dropTable(templates, "TIME_TEST");
dropTable(templates, "DATE_TEST");
stmt.execute(quote(CREATE_TABLE_TIMETEST, "TIME_TEST"));
stmt.execute(quote(CREATE_TABLE_DATETEST, "DATE_TEST"));
postgresInited = true;
}
public static void initSQLite() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.SQLITE);
// SQLTemplates templates = new SQLiteTemplates();
Connection c = getSQLite();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (sqliteInited) {
return;
}
// qtest
stmt.execute("drop table if exists QTEST");
stmt.execute("create table QTEST (ID int IDENTITY(1,1) NOT NULL, C1 int NULL)");
// survey
stmt.execute("drop table if exists SURVEY");
stmt.execute("create table SURVEY(ID int auto_increment, " +
"NAME varchar(30)," +
"NAME2 varchar(30)," +
"constraint suryey_pk primary key(ID))");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello');");
// test
stmt.execute("drop table if exists TEST");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table if exists EMPLOYEE");
stmt.execute("create table EMPLOYEE ( " +
"ID INT AUTO_INCREMENT, " +
"FIRSTNAME VARCHAR(50), " +
"LASTNAME VARCHAR(50), " +
"SALARY DECIMAL, " +
"DATEFIELD DATE, " +
"TIMEFIELD TIME, " +
"SUPERIOR_ID INT, " +
"CONSTRAINT PK_EMPLOYEE PRIMARY KEY(ID),"+
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID) " +
")");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table if exists TIME_TEST");
stmt.execute("drop table if exists DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
sqliteInited = true;
}
public static void initSQLServer() throws SQLException, ClassNotFoundException {
targetHolder.set(Target.SQLSERVER);
SQLTemplates templates = new SQLServerTemplates();
Connection c = getSQLServer();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (sqlServerInited) {
return;
}
dropTable(templates, "SHAPES");
stmt.execute("create table SHAPES (ID int not null primary key, GEOMETRY geometry)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into SHAPES values(" + entry.getKey()
+", geometry::STGeomFromText('" + entry.getValue() + "', 0))");
}
// survey
dropTable(templates, "SURVEY");
stmt.execute("create table SURVEY(ID int, NAME varchar(30), NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1, 'Hello World', 'Hello')");
// test
dropTable(templates, "TEST");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
dropTable(templates, "EMPLOYEE");
createEmployeeTable(templates);
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
dropTable(templates, "TIME_TEST");
dropTable(templates, "DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
sqlServerInited = true;
}
public static void initTeradata() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.TERADATA);
SQLTemplates templates = new TeradataTemplates();
Connection c = getTeradata();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (teradataInited) {
return;
}
String identity = "GENERATED ALWAYS AS IDENTITY(START WITH 1 INCREMENT BY 1)";
// shapes
dropTable(templates, "SHAPES");
stmt.execute("create table SHAPES (ID int not null primary key, GEOMETRY ST_GEOMETRY)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into SHAPES values(" + entry.getKey()
+", '" + entry.getValue() + "')");
}
// qtest
dropTable(templates, "QTEST");
stmt.execute("create table QTEST (ID int " + identity + " NOT NULL, C1 int NULL)");
// survey
dropTable(templates, "SURVEY");
stmt.execute("create table SURVEY(ID int " + identity + ", NAME varchar(30), NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1, 'Hello World', 'Hello');");
// test
dropTable(templates, "TEST");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
dropTable(templates, "EMPLOYEE");
stmt.execute("create table EMPLOYEE (\n" +
"ID INTEGER NOT NULL PRIMARY KEY, \n" +
"FIRSTNAME VARCHAR(100),\n" +
"LASTNAME VARCHAR(100),\n" +
"SALARY DOUBLE PRECISION,\n" +
"DATEFIELD DATE,\n" +
"TIMEFIELD TIME,\n" +
"SUPERIOR_ID INTEGER,\n" +
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID))");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
dropTable(templates, "TIME_TEST");
dropTable(templates, "DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
teradataInited = true;
}
static void addEmployee(String sql, int id, String firstName, String lastName,
double salary, int superiorId) throws SQLException {
PreparedStatement stmt = connHolder.get().prepareStatement(sql);
stmt.setInt(1, id);
stmt.setString(2, firstName);
stmt.setString(3,lastName);
stmt.setDouble(4, salary);
stmt.setDate(5, Constants.date);
stmt.setTime(6, Constants.time);
if (superiorId <= 0) {
stmt.setNull(7, Types.INTEGER);
} else {
stmt.setInt(7, superiorId);
}
stmt.execute();
stmt.close();
}
private static void addEmployees(String sql) throws SQLException {
addEmployee(sql, 1, "Mike", "Smith", 160000, -1);
addEmployee(sql, 2, "Mary", "Smith", 140000, -1);
// Employee under Mike
addEmployee(sql, 10, "Joe", "Divis", 50000, 1);
addEmployee(sql, 11, "Peter", "Mason", 45000, 1);
addEmployee(sql, 12, "Steve", "Johnson", 40000, 1);
addEmployee(sql, 13, "Jim", "Hood", 35000, 1);
// Employee under Mike
addEmployee(sql, 20, "Jennifer", "Divis", 60000, 2);
addEmployee(sql, 21, "Helen", "Mason", 50000, 2);
addEmployee(sql, 22, "Daisy", "Johnson", 40000, 2);
addEmployee(sql, 23, "Barbara", "Hood", 30000, 2);
}
private static String quote(String sql, String... identifiers) {
String rv = sql;
for (String id : identifiers) {
rv = rv.replace(id, "\"" + id + "\"");
}
return rv;
}
private Connections() {}
}
| querydsl-sql/src/test/java/com/mysema/query/Connections.java | /*
* Copyright 2011, Mysema Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mysema.query;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Time;
import java.util.Map;
import java.util.Properties;
import org.hsqldb.types.Types;
import com.google.common.collect.Maps;
import com.mysema.query.ddl.CreateTableClause;
import com.mysema.query.ddl.DropTableClause;
import com.mysema.query.sql.DerbyTemplates;
import com.mysema.query.sql.H2Templates;
import com.mysema.query.sql.HSQLDBTemplates;
import com.mysema.query.sql.OracleTemplates;
import com.mysema.query.sql.PostgresTemplates;
import com.mysema.query.sql.SQLServerTemplates;
import com.mysema.query.sql.SQLTemplates;
import com.mysema.query.sql.TeradataTemplates;
/**
* @author tiwe
*
*/
public final class Connections {
public static final int TEST_ROW_COUNT = 100;
private static ThreadLocal<Connection> connHolder = new ThreadLocal<Connection>();
private static ThreadLocal<Target> targetHolder = new ThreadLocal<Target>();
private static ThreadLocal<SQLTemplates> templatesHolder = new ThreadLocal<SQLTemplates>();
// datetest
private static final String CREATE_TABLE_DATETEST = "create table DATE_TEST(DATE_TEST date)";
// survey
private static final String CREATE_TABLE_SURVEY =
"create table SURVEY(ID int auto_increment, NAME varchar(30), NAME2 varchar(30))";
// test
private static final String CREATE_TABLE_TEST = "create table TEST(NAME varchar(255))";
// timetest
private static final String CREATE_TABLE_TIMETEST = "create table TIME_TEST(TIME_TEST time)";
// employee
private static final String INSERT_INTO_EMPLOYEE = "insert into EMPLOYEE " +
"(ID, FIRSTNAME, LASTNAME, SALARY, DATEFIELD, TIMEFIELD, SUPERIOR_ID) " +
"values (?,?,?,?,?,?,?)";
private static final String INSERT_INTO_TEST_VALUES = "insert into TEST values(?)";
private static ThreadLocal<Statement> stmtHolder = new ThreadLocal<Statement>();
private static boolean derbyInited, sqlServerInited, h2Inited, hsqlInited, mysqlInited, cubridInited, oracleInited, postgresInited, sqliteInited, teradataInited;
public static void close() throws SQLException{
if (stmtHolder.get() != null) {
stmtHolder.get().close();
}
if (connHolder.get() != null) {
connHolder.get().close();
}
}
public static Connection getConnection() {
return connHolder.get();
}
public static Target getTarget() {
return targetHolder.get();
}
public static SQLTemplates getTemplates() {
return templatesHolder.get();
}
public static void setTemplates(SQLTemplates templates) {
templatesHolder.set(templates);
}
private static Connection getDerby() throws SQLException, ClassNotFoundException {
Class.forName("org.apache.derby.jdbc.EmbeddedDriver");
String url = "jdbc:derby:target/demoDB;create=true";
return DriverManager.getConnection(url, "", "");
}
private static Connection getHSQL() throws SQLException, ClassNotFoundException {
Class.forName("org.hsqldb.jdbcDriver");
String url = "jdbc:hsqldb:target/tutorial";
return DriverManager.getConnection(url, "sa", "");
}
private static Connection getH2() throws SQLException, ClassNotFoundException{
Class.forName("org.h2.Driver");
String url = "jdbc:h2:target/h2;LOCK_MODE=0";
return DriverManager.getConnection(url, "sa", "");
}
private static Connection getMySQL() throws SQLException, ClassNotFoundException {
Class.forName("com.mysql.jdbc.Driver");
String url = "jdbc:mysql://localhost:3306/querydsl";
return DriverManager.getConnection(url, "querydsl", "querydsl");
}
private static Connection getOracle() throws SQLException, ClassNotFoundException{
Class.forName("oracle.jdbc.driver.OracleDriver");
String url = "jdbc:oracle:thin:@localhost:1521:xe";
return DriverManager.getConnection(url, "querydsl", "querydsl");
}
private static Connection getPostgres() throws ClassNotFoundException, SQLException{
Class.forName("org.postgresql.Driver");
String url = "jdbc:postgresql://localhost:5432/querydsl";
return DriverManager.getConnection(url, "querydsl", "querydsl");
}
private static Connection getSQLServer() throws ClassNotFoundException, SQLException{
Class.forName("net.sourceforge.jtds.jdbc.Driver");
Properties props = new Properties();
props.put("user", "querydsl");
props.put("password", "querydsl");
props.put("sendTimeAsDatetime", "false");
String url = "jdbc:jtds:sqlserver://localhost:1433/querydsl";
// return DriverManager.getConnection(url, "querydsl", "querydsl");
return DriverManager.getConnection(url, props);
}
private static Connection getCubrid() throws ClassNotFoundException, SQLException {
Class.forName("cubrid.jdbc.driver.CUBRIDDriver");
String url = "jdbc:cubrid:localhost:30000:demodb:public::";
return DriverManager.getConnection(url);
}
private static Connection getSQLite() throws SQLException, ClassNotFoundException {
//System.setProperty("sqlite.purejava", "true");
Class.forName("org.sqlite.JDBC");
return DriverManager.getConnection("jdbc:sqlite:target/sample.db");
}
private static Connection getTeradata() throws SQLException, ClassNotFoundException {
Class.forName("com.teradata.jdbc.TeraDriver");
return DriverManager.getConnection("jdbc:teradata://teradata/dbc", "querydsl", "querydsl");
}
private static CreateTableClause createTable(SQLTemplates templates, String table) {
return new CreateTableClause(connHolder.get(), templates, table);
}
public static void dropTable(SQLTemplates templates, String table) throws SQLException{
new DropTableClause(connHolder.get(), templates, table).execute();
}
public static void dropType(Statement stmt, String type) throws SQLException {
try {
stmt.execute("drop type " + type);
} catch (SQLException e) {
if (!e.getMessage().contains("does not exist")) {
throw e;
}
}
}
public static Statement getStatement() {
return stmtHolder.get();
}
private static void createEmployeeTable(SQLTemplates templates) {
createTable(templates, "EMPLOYEE")
.column("ID", Integer.class).notNull()
.column("FIRSTNAME", String.class).size(50)
.column("LASTNAME", String.class).size(50)
.column("SALARY", Double.class)
.column("DATEFIELD", Date.class)
.column("TIMEFIELD", Time.class)
.column("SUPERIOR_ID", Integer.class)
.primaryKey("PK_EMPLOYEE", "ID")
.foreignKey("FK_SUPERIOR","SUPERIOR_ID").references("EMPLOYEE","ID")
.execute();
}
public static Map<Integer, String> getSpatialData() {
Map<Integer, String> m = Maps.newHashMap();
// point
m.put(1, "POINT (2 2)");
m.put(2, "POINT (8 7)");
m.put(3, "POINT (1 9)");
m.put(4, "POINT (9 2)");
m.put(5, "POINT (4 4)");
// linestring
m.put(6, "LINESTRING (30 10, 10 30)");
m.put(7, "LINESTRING (30 10, 10 30, 40 40)");
// polygon
m.put(8, "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10), (20 30, 35 35, 30 20, 20 30))");
m.put(9, "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))");
// multipoint
m.put(11, "MULTIPOINT (10 40, 40 30)");
m.put(11, "MULTIPOINT (10 40, 40 30, 20 20, 30 10)");
// multilinestring
m.put(12, "MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30 10))");
m.put(13, "MULTILINESTRING ((10 10, 20 20, 10 40))");
// multipolygon
m.put(14, "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))");
m.put(15, "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)), " +
"((20 35, 10 30, 10 10, 30 5, 45 20, 20 35), " +
"(30 20, 20 15, 20 25, 30 20)))");
// XXX POLYHEDRALSURFACE not supported
/* GEOMETRYCOLLECTION(POINT(4 6),LINESTRING(4 6,7 10))
CIRCULARSTRING(1 5, 6 2, 7 3)
COMPOUNDCURVE(CIRCULARSTRING(0 0,1 1,1 0),(1 0,0 1))
CURVEPOLYGON(CIRCULARSTRING(-2 0,-1 -1,0 0,1 -1,2 0,0 2,-2 0),(-1 0,0 0.5,1 0,0 1,-1 0))
MULTICURVE((5 5,3 5,3 3,0 3),CIRCULARSTRING(0 0,2 1,2 2))
TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0))
TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0)))
*/
return m;
}
public static void initDerby() throws SQLException, ClassNotFoundException {
targetHolder.set(Target.DERBY);
SQLTemplates templates = new DerbyTemplates();
Connection c = getDerby();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (derbyInited) {
return;
}
// types
dropType(stmt, "price restrict");
stmt.execute("create type price external name 'com.example.Price' language java");
// survey
dropTable(templates, "SURVEY");
stmt.execute("create table SURVEY(" +
"ID int generated by default as identity(start with 1, increment by 1), " +
"NAME varchar(30)," +
"NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello')");
// test
dropTable(templates, "TEST");
stmt.execute(CREATE_TABLE_TEST);
stmt.execute("create index test_name on test(name)");
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
dropTable(templates, "EMPLOYEE");
createEmployeeTable(templates);
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
dropTable(templates, "TIME_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
dropTable(templates, "DATE_TEST");
stmt.execute(CREATE_TABLE_DATETEST);
derbyInited = true;
}
public static void initSQLServer() throws SQLException, ClassNotFoundException {
targetHolder.set(Target.SQLSERVER);
SQLTemplates templates = new SQLServerTemplates();
Connection c = getSQLServer();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (sqlServerInited) {
return;
}
dropTable(templates, "SHAPES");
stmt.execute("create table SHAPES (ID int not null primary key, GEOMETRY geometry)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into SHAPES values(" + entry.getKey()
+", geometry::STGeomFromText('" + entry.getValue() + "', 0))");
}
// survey
dropTable(templates, "SURVEY");
stmt.execute("create table SURVEY(ID int, NAME varchar(30), NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1, 'Hello World', 'Hello')");
// test
dropTable(templates, "TEST");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
dropTable(templates, "EMPLOYEE");
createEmployeeTable(templates);
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
dropTable(templates, "TIME_TEST");
dropTable(templates, "DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
sqlServerInited = true;
}
public static void initH2() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.H2);
SQLTemplates templates = new H2Templates();
Connection c = getH2();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (h2Inited) {
return;
}
stmt.execute("DROP ALIAS IF EXISTS InitGeoDB");
stmt.execute("CREATE ALIAS InitGeoDB for \"geodb.GeoDB.InitGeoDB\"");
stmt.execute("CALL InitGeoDB()");
// shapes
dropTable(templates, "SHAPES");
stmt.execute("create table SHAPES (ID int not null primary key, GEOMETRY blob)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into SHAPES values(" + entry.getKey()
+", ST_GeomFromText('" + entry.getValue() + "', 4326))");
}
// qtest
stmt.execute("drop table QTEST if exists");
stmt.execute("create table QTEST (ID int IDENTITY(1,1) NOT NULL, C1 int NULL)");
// survey
stmt.execute("drop table SURVEY if exists");
stmt.execute(CREATE_TABLE_SURVEY);
stmt.execute("insert into SURVEY values (1, 'Hello World', 'Hello');");
stmt.execute("alter table SURVEY alter column id int auto_increment");
// test
stmt.execute("drop table TEST if exists");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table EMPLOYEE if exists");
createEmployeeTable(templates);
stmt.execute("alter table EMPLOYEE alter column id int auto_increment");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table TIME_TEST if exists");
stmt.execute("drop table DATE_TEST if exists");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
h2Inited = true;
}
public static void initSQLite() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.SQLITE);
// SQLTemplates templates = new SQLiteTemplates();
Connection c = getSQLite();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (sqliteInited) {
return;
}
// qtest
stmt.execute("drop table if exists QTEST");
stmt.execute("create table QTEST (ID int IDENTITY(1,1) NOT NULL, C1 int NULL)");
// survey
stmt.execute("drop table if exists SURVEY");
stmt.execute("create table SURVEY(ID int auto_increment, " +
"NAME varchar(30)," +
"NAME2 varchar(30)," +
"constraint suryey_pk primary key(ID))");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello');");
// test
stmt.execute("drop table if exists TEST");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table if exists EMPLOYEE");
stmt.execute("create table EMPLOYEE ( " +
"ID INT AUTO_INCREMENT, " +
"FIRSTNAME VARCHAR(50), " +
"LASTNAME VARCHAR(50), " +
"SALARY DECIMAL, " +
"DATEFIELD DATE, " +
"TIMEFIELD TIME, " +
"SUPERIOR_ID INT, " +
"CONSTRAINT PK_EMPLOYEE PRIMARY KEY(ID),"+
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID) " +
")");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table if exists TIME_TEST");
stmt.execute("drop table if exists DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
sqliteInited = true;
}
public static void initHSQL() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.HSQLDB);
SQLTemplates templates = new HSQLDBTemplates();
Connection c = getHSQL();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (hsqlInited) {
return;
}
// dual
stmt.execute("drop table DUAL if exists");
stmt.execute("create table DUAL ( DUMMY varchar(1) )");
stmt.execute("insert into DUAL (DUMMY) values ('X')");
// survey
stmt.execute("drop table SURVEY if exists");
//stmt.execute(CREATE_TABLE_SURVEY);
stmt.execute("create table SURVEY(" +
"ID int generated by default as identity, " +
"NAME varchar(30)," +
"NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1, 'Hello World', 'Hello')");
// test
stmt.execute("drop table TEST if exists");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table EMPLOYEE if exists");
createEmployeeTable(templates);
stmt.execute("alter table EMPLOYEE alter column id int generated by default as identity");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table TIME_TEST if exists");
stmt.execute("drop table DATE_TEST if exists");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
hsqlInited = true;
}
public static void initCubrid() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.CUBRID);
//SQLTemplates templates = new MySQLTemplates();
Connection c = getCubrid();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (cubridInited) {
return;
}
// survey
stmt.execute("drop table if exists SURVEY");
stmt.execute("create table SURVEY(ID int auto_increment(16693,2), " +
"NAME varchar(30)," +
"NAME2 varchar(30)," +
"constraint suryey_pk primary key(ID))");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello');");
// test
stmt.execute("drop table if exists \"TEST\"");
stmt.execute("create table \"TEST\"(NAME varchar(255))");
PreparedStatement pstmt = c.prepareStatement("insert into \"TEST\" values(?)");
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table if exists EMPLOYEE");
//createEmployeeTable(templates);
stmt.execute("create table EMPLOYEE ( " +
"ID INT PRIMARY KEY AUTO_INCREMENT, " +
"FIRSTNAME VARCHAR(50), " +
"LASTNAME VARCHAR(50), " +
"SALARY DECIMAL, " +
"DATEFIELD DATE, " +
"TIMEFIELD TIME, " +
"SUPERIOR_ID INT, " +
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID) " +
")");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table if exists TIME_TEST");
stmt.execute("drop table if exists DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
cubridInited = true;
}
public static void initMySQL() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.MYSQL);
//SQLTemplates templates = new MySQLTemplates();
Connection c = getMySQL();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (mysqlInited) {
return;
}
// shapes
stmt.execute("drop table if exists SHAPES");
stmt.execute("create table SHAPES (ID int not null primary key, GEOMETRY geometry)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into SHAPES values(" + entry.getKey()
+", GeomFromText('" + entry.getValue() + "'))");
}
// survey
stmt.execute("drop table if exists SURVEY");
stmt.execute("create table SURVEY(ID int primary key auto_increment, " +
"NAME varchar(30)," +
"NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello');");
// test
stmt.execute("drop table if exists TEST");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
stmt.execute("drop table if exists EMPLOYEE");
//createEmployeeTable(templates);
stmt.execute("create table EMPLOYEE ( " +
"ID INT PRIMARY KEY AUTO_INCREMENT, " +
"FIRSTNAME VARCHAR(50), " +
"LASTNAME VARCHAR(50), " +
"SALARY DECIMAL, " +
"DATEFIELD DATE, " +
"TIMEFIELD TIME, " +
"SUPERIOR_ID INT, " +
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID) " +
")");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
stmt.execute("drop table if exists TIME_TEST");
stmt.execute("drop table if exists DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
mysqlInited = true;
}
public static void initOracle() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.ORACLE);
SQLTemplates templates = new OracleTemplates();
Connection c = getOracle();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (oracleInited) {
return;
}
// types
stmt.execute("create or replace type ssn_t as object (ssn_type char(11))");
// survey
dropTable(templates, "SURVEY");
stmt.execute("create table SURVEY (ID number(10,0), " +
"NAME varchar(30 char)," +
"NAME2 varchar(30 char))");
try {
stmt.execute("drop sequence survey_seq");
} catch(SQLException e) {
if (!e.getMessage().contains("sequence does not exist")) {
throw e;
}
}
stmt.execute("create sequence survey_seq");
stmt.execute("create or replace trigger survey_trigger\n"+
"before insert on survey\n"+
"for each row\n" +
"when (new.id is null)\n"+
"begin\n"+
" select survey_seq.nextval into :new.id from dual;\n"+
"end;\n");
stmt.execute("insert into SURVEY values (1,'Hello World','Hello')");
// test
dropTable(templates, "TEST");
stmt.execute("create table TEST(name varchar(255))");
String sql = "insert into TEST values(?)";
PreparedStatement pstmt = c.prepareStatement(sql);
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
// employee
dropTable(templates, "EMPLOYEE");
stmt.execute("create table EMPLOYEE ( " +
"ID NUMBER(10,0), " +
"FIRSTNAME VARCHAR2(50 CHAR), " +
"LASTNAME VARCHAR2(50 CHAR), " +
"SALARY DOUBLE PRECISION, " +
"DATEFIELD DATE, " +
"TIMEFIELD TIMESTAMP, " +
"SUPERIOR_ID NUMBER(10,0), " +
"CONSTRAINT PK_EMPLOYEE PRIMARY KEY(ID), " +
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID)" +
")");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
dropTable(templates, "DATE_TEST");
stmt.execute("create table date_test(date_test date)");
oracleInited = true;
}
public static void initPostgres() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.POSTGRES);
SQLTemplates templates = new PostgresTemplates(true);
// NOTE : unquoted identifiers are converted to lower case in Postgres
Connection c = getPostgres();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (postgresInited) {
return;
}
// shapes
dropTable(templates, "SHAPES");
// stmt.execute("create table \"SHAPES\" (\"ID\" int not null primary key, \"GEOMETRY\" geography(POINT,4326))");
stmt.execute("create table \"SHAPES\" (\"ID\" int not null primary key)");
stmt.execute("select AddGeometryColumn('SHAPES', 'GEOMETRY', -1, 'GEOMETRY', 2)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into \"SHAPES\" values(" + entry.getKey()
+", '" + entry.getValue() + "')");
}
// types
dropType(stmt, "u_country");
stmt.execute("create type u_country as enum ('Brazil', 'England', 'Germany')");
dropType(stmt, "u_street_type");
stmt.execute("create type u_street_type as (street VARCHAR(100), number VARCHAR(30))");
// arrays
dropTable(templates, "ARRAYTEST");
stmt.execute("create table \"ARRAYTEST\" (\n" +
"\"ID\" bigint primary key,\n" +
"\"MYARRAY\" varchar(8)[])");
// survey
dropTable(templates, "SURVEY");
try {
stmt.execute("drop sequence SURVEY_SEQ");
} catch(SQLException e) {
if (!e.getMessage().contains("does not exist")) {
throw e;
}
}
stmt.execute("create sequence SURVEY_SEQ");
stmt.execute("create table \"SURVEY\"(" +
"\"ID\" int DEFAULT NEXTVAL('SURVEY_SEQ'), " +
"\"NAME\" varchar(30), \"NAME2\" varchar(30))");
stmt.execute("insert into \"SURVEY\" values (1, 'Hello World', 'Hello')");
// test
dropTable(templates, "TEST");
stmt.execute(quote(CREATE_TABLE_TEST,"TEST","NAME"));
String sql = quote(INSERT_INTO_TEST_VALUES,"TEST");
PreparedStatement pstmt = c.prepareStatement(sql);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
// stmt.execute("drop table employee if exists");
dropTable(templates, "EMPLOYEE");
createEmployeeTable(templates);
addEmployees("insert into \"EMPLOYEE\" " +
"(\"ID\", \"FIRSTNAME\", \"LASTNAME\", \"SALARY\", \"DATEFIELD\", \"TIMEFIELD\", \"SUPERIOR_ID\") " +
"values (?,?,?,?,?,?,?)");
// date_test and time_test
dropTable(templates, "TIME_TEST");
dropTable(templates, "DATE_TEST");
stmt.execute(quote(CREATE_TABLE_TIMETEST, "TIME_TEST"));
stmt.execute(quote(CREATE_TABLE_DATETEST, "DATE_TEST"));
postgresInited = true;
}
public static void initTeradata() throws SQLException, ClassNotFoundException{
targetHolder.set(Target.TERADATA);
SQLTemplates templates = new TeradataTemplates();
Connection c = getTeradata();
connHolder.set(c);
Statement stmt = c.createStatement();
stmtHolder.set(stmt);
if (teradataInited) {
return;
}
String identity = "GENERATED ALWAYS AS IDENTITY(START WITH 1 INCREMENT BY 1)";
// shapes
dropTable(templates, "SHAPES");
stmt.execute("create table SHAPES (ID int not null primary key, GEOMETRY ST_GEOMETRY)");
for (Map.Entry<Integer, String> entry : getSpatialData().entrySet()) {
stmt.execute("insert into SHAPES values(" + entry.getKey()
+", '" + entry.getValue() + "')");
}
// qtest
dropTable(templates, "QTEST");
stmt.execute("create table QTEST (ID int " + identity + " NOT NULL, C1 int NULL)");
// survey
dropTable(templates, "SURVEY");
stmt.execute("create table SURVEY(ID int " + identity + ", NAME varchar(30), NAME2 varchar(30))");
stmt.execute("insert into SURVEY values (1, 'Hello World', 'Hello');");
// test
dropTable(templates, "TEST");
stmt.execute(CREATE_TABLE_TEST);
PreparedStatement pstmt = c.prepareStatement(INSERT_INTO_TEST_VALUES);
try{
for (int i = 0; i < TEST_ROW_COUNT; i++) {
pstmt.setString(1, "name" + i);
pstmt.addBatch();
}
pstmt.executeBatch();
}finally{
pstmt.close();
}
// employee
dropTable(templates, "EMPLOYEE");
stmt.execute("create table EMPLOYEE (\n" +
"ID INTEGER NOT NULL PRIMARY KEY, \n" +
"FIRSTNAME VARCHAR(100),\n" +
"LASTNAME VARCHAR(100),\n" +
"SALARY DOUBLE PRECISION,\n" +
"DATEFIELD DATE,\n" +
"TIMEFIELD TIME,\n" +
"SUPERIOR_ID INTEGER,\n" +
"CONSTRAINT FK_SUPERIOR FOREIGN KEY(SUPERIOR_ID) REFERENCES EMPLOYEE(ID))");
addEmployees(INSERT_INTO_EMPLOYEE);
// date_test and time_test
dropTable(templates, "TIME_TEST");
dropTable(templates, "DATE_TEST");
stmt.execute(CREATE_TABLE_TIMETEST);
stmt.execute(CREATE_TABLE_DATETEST);
teradataInited = true;
}
static void addEmployee(String sql, int id, String firstName, String lastName,
double salary, int superiorId) throws SQLException {
PreparedStatement stmt = connHolder.get().prepareStatement(sql);
stmt.setInt(1, id);
stmt.setString(2, firstName);
stmt.setString(3,lastName);
stmt.setDouble(4, salary);
stmt.setDate(5, Constants.date);
stmt.setTime(6, Constants.time);
if (superiorId <= 0) {
stmt.setNull(7, Types.INTEGER);
} else {
stmt.setInt(7, superiorId);
}
stmt.execute();
stmt.close();
}
private static void addEmployees(String sql) throws SQLException {
addEmployee(sql, 1, "Mike", "Smith", 160000, -1);
addEmployee(sql, 2, "Mary", "Smith", 140000, -1);
// Employee under Mike
addEmployee(sql, 10, "Joe", "Divis", 50000, 1);
addEmployee(sql, 11, "Peter", "Mason", 45000, 1);
addEmployee(sql, 12, "Steve", "Johnson", 40000, 1);
addEmployee(sql, 13, "Jim", "Hood", 35000, 1);
// Employee under Mike
addEmployee(sql, 20, "Jennifer", "Divis", 60000, 2);
addEmployee(sql, 21, "Helen", "Mason", 50000, 2);
addEmployee(sql, 22, "Daisy", "Johnson", 40000, 2);
addEmployee(sql, 23, "Barbara", "Hood", 30000, 2);
}
private static String quote(String sql, String... identifiers) {
String rv = sql;
for (String id : identifiers) {
rv = rv.replace(id, "\"" + id + "\"");
}
return rv;
}
private Connections() {}
}
| Reorder inits
| querydsl-sql/src/test/java/com/mysema/query/Connections.java | Reorder inits |
|
Java | apache-2.0 | 69d6b2ebd001d71220139521e648c717db6c9003 | 0 | osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi | /*
* ============================================================================
* (c) Copyright 2004 Nokia
* This material, including documentation and any related computer programs,
* is protected by copyright controlled by Nokia and its licensors.
* All rights are reserved.
*
* These materials have been contributed to the Open Services Gateway
* Initiative (OSGi)as "MEMBER LICENSED MATERIALS" as defined in, and subject
* to the terms of, the OSGi Member Agreement specifically including, but not
* limited to, the license rights and warranty disclaimers as set forth in
* Sections 3.2 and 12.1 thereof, and the applicable Statement of Work.
* All company, brand and product names contained within this document may be
* trademarks that are the sole property of the respective owners.
* The above notice must be included on all copies of this document.
* ============================================================================
*/
package org.osgi.impl.service.dmt;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.security.*;
import java.util.*;
import org.osgi.service.dmt.*;
import org.osgi.service.dmt.security.*;
import org.osgi.service.dmt.spi.*;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventAdmin;
import org.osgi.service.log.LogService;
import org.osgi.service.permissionadmin.PermissionInfo;
// OPTIMIZE node handling (e.g. retrieve plugin from dispatcher only once per API call)
// OPTIMIZE only retrieve meta-data once per API call
// OPTIMIZE only call commit/rollback for plugins that were actually modified since the last transaction boundary
public class DmtSessionImpl implements DmtSession {
private static final int SHOULD_NOT_EXIST = 0;
private static final int SHOULD_EXIST = 1;
private static final int SHOULD_BE_LEAF = 2; // implies SHOULD_EXIST
private static final int SHOULD_BE_INTERIOR = 3; // implies SHOULD_EXIST
private static final Class[] PERMISSION_CONSTRUCTOR_SIG =
new Class[] { String.class, String.class };
private static Hashtable acls;
// Stores the ACL table at the start of each transaction in an atomic
// session. Can be static because atomic session cannot run in parallel.
private static Hashtable savedAcls;
static {
init_acls();
}
private final AccessControlContext securityContext;
private final DmtAdminImpl dmtAdmin;
private final Context context;
private final String principal;
private final Node subtreeNode;
private final int lockMode;
private final int sessionId;
private EventList eventList;
private Vector dataPlugins;
private int state;
// Session creation is done in two phases:
// - DmtAdmin creates a new DmtSessionImpl instance (this should indicate
// as many errors as possible, but must not call any plugins)
// - when all conflicting sessions have been closed, DmtAdmin calls "open()"
// to actually open the session for external use
DmtSessionImpl(String principal, String subtreeUri, int lockMode,
PermissionInfo[] permissions, Context context,
DmtAdminImpl dmtAdmin) throws DmtException {
Node node = Node.validateAndNormalizeUri(subtreeUri);
subtreeNode = node.isAbsolute() ?
node : Node.ROOT_NODE.appendRelativeNode(node);
this.principal = principal;
this.lockMode = lockMode;
this.dmtAdmin = dmtAdmin;
this.context = context;
if(principal != null) { // remote session
SecurityManager sm = System.getSecurityManager();
if(sm != null)
sm.checkPermission(new DmtPrincipalPermission(principal));
try {
securityContext = getSecurityContext(permissions);
} catch(Exception e) {
throw new DmtException(subtreeNode.getUri(),
DmtException.COMMAND_FAILED,
"Unable to create Protection Domain for remote server.",
e);
}
} else
securityContext = null;
if(lockMode == LOCK_TYPE_ATOMIC)
eventList = new EventList();
sessionId =
(new Long(System.currentTimeMillis())).hashCode() ^ hashCode();
dataPlugins = new Vector();
state = STATE_CLOSED;
}
// called directly before returning the session object in getSession()
// throws NODE_NOT_FOUND if the previously specified root does not exist
void open() throws DmtException {
if(lockMode == LOCK_TYPE_ATOMIC)
// shallow copy is enough, Nodes and Acls are immutable
savedAcls = (Hashtable) acls.clone();
state = STATE_OPEN;
// after everything is initialized, check with the plugins whether the
// given node really exists
checkNode(subtreeNode, SHOULD_EXIST);
}
// called by Dmt Admin when checking session conflicts
Node getRootNode() {
return subtreeNode;
}
// called by the Session Wrapper, rollback parameter is:
// - true if a fatal exception has been thrown in a DMT access method
// - false if any exception has been thrown in the commit/rollback methods
protected void invalidateSession(boolean rollback, boolean timeout) {
state = STATE_INVALID;
context.log(LogService.LOG_WARNING, "Invalidating session '" +
sessionId + "' because of " + (timeout ? "timeout." : "error."),
null);
if(lockMode == LOCK_TYPE_ATOMIC && rollback) {
try {
rollbackPlugins();
} catch(DmtException e) {
context.log(LogService.LOG_WARNING, "Error rolling back " +
"plugin while invalidating session.", e);
}
}
try {
closeAndRelease(false);
} catch(DmtException e) {
context.log(LogService.LOG_WARNING, "Error closing plugin while " +
"invalidating session.", e);
}
}
/* These methods can be called even before the session has been opened, and
* also after the session has been closed. */
public synchronized int getState() {
return state;
}
public String getPrincipal() {
return principal;
}
public int getSessionId() {
return sessionId;
}
public String getRootUri() {
return subtreeNode.getUri();
}
public int getLockType() {
return lockMode;
}
public String mangle(String nodeName) {
return dmtAdmin.mangle(nodeName);
}
/* These methods are only meaningful in the context of an open session. */
// no other API methods can be called while this method is executed
public synchronized void close() throws DmtException {
checkSession();
// changed to CLOSED if this method finishes without error
state = STATE_INVALID;
closeAndRelease(lockMode == LOCK_TYPE_ATOMIC);
state = STATE_CLOSED;
}
private void closeAndRelease(boolean commit) throws DmtException {
try {
if(commit)
commitPlugins();
closePlugins();
} finally {
// DmtAdmin must be notified that this session has ended, otherwise
// other sessions might never be allowed to run
dmtAdmin.releaseSession(this);
}
}
private void closePlugins() throws DmtException {
Vector closeExceptions = new Vector();
// this block requires synchronization
ListIterator i = dataPlugins.listIterator(dataPlugins.size());
while (i.hasPrevious()) {
try {
((PluginSessionWrapper) i.previous()).close();
} catch(Exception e) {
closeExceptions.add(e);
}
}
dataPlugins.clear();
if (closeExceptions.size() != 0)
throw new DmtException((String) null, DmtException.COMMAND_FAILED,
"Some plugins failed to close.", closeExceptions, false);
}
// no other API methods can be called while this method is executed
public synchronized void commit() throws DmtException {
checkSession();
if (lockMode != LOCK_TYPE_ATOMIC)
throw new IllegalStateException("Commit can only be requested " +
"for atomic sessions.");
// changed back to OPEN if this method finishes without error
state = STATE_INVALID;
commitPlugins();
savedAcls = (Hashtable) acls.clone();
state = STATE_OPEN;
}
// precondition: lockMode == LOCK_TYPE_ATOMIC
private void commitPlugins() throws DmtException {
Vector commitExceptions = new Vector();
ListIterator i = dataPlugins.listIterator(dataPlugins.size());
// this block requires synchronization
while (i.hasPrevious()) {
PluginSessionWrapper wrappedPlugin = (PluginSessionWrapper) i.previous();
try {
// checks transaction support before calling commit on the plugin
wrappedPlugin.commit();
} catch(Exception e) {
purgeEvents(wrappedPlugin.getSessionRoot());
commitExceptions.add(e);
}
}
sendEvent(EventList.ADD);
sendEvent(EventList.DELETE);
sendEvent(EventList.REPLACE);
sendEvent(EventList.RENAME);
sendEvent(EventList.COPY);
eventList.clear();
if (commitExceptions.size() != 0)
throw new DmtException((String) null,
DmtException.TRANSACTION_ERROR,
"Some plugins failed to commit.",
commitExceptions, false);
}
// no other API methods can be called while this method is executed
public synchronized void rollback() throws DmtException {
checkSession();
if (lockMode != LOCK_TYPE_ATOMIC)
throw new IllegalStateException("Rollback can only be requested " +
"for atomic sessions.");
// changed back to OPEN if this method finishes without error
state = STATE_INVALID;
acls = (Hashtable) savedAcls.clone();
rollbackPlugins();
state = STATE_OPEN;
}
private void rollbackPlugins() throws DmtException {
eventList.clear();
Vector rollbackExceptions = new Vector();
// this block requires synchronization
ListIterator i = dataPlugins.listIterator(dataPlugins.size());
while (i.hasPrevious()) {
try {
// checks transaction support before calling rollback on the plugin
((PluginSessionWrapper) i.previous()).rollback();
} catch(Exception e) {
rollbackExceptions.add(e);
}
}
if (rollbackExceptions.size() != 0)
throw new DmtException((String) null, DmtException.ROLLBACK_FAILED,
"Some plugins failed to roll back or close.",
rollbackExceptions, false);
}
public synchronized void execute(String nodeUri, String data)
throws DmtException {
internalExecute(nodeUri, null, data);
}
public synchronized void execute(String nodeUri, String correlator,
String data) throws DmtException {
internalExecute(nodeUri, correlator, data);
}
// same as execute/3 but can be called internally, because it is not wrapped
private void internalExecute(String nodeUri, final String correlator,
final String data) throws DmtException {
checkSession();
// not allowing to execute non-existent nodes, all Management Objects
// defined in the spec have data plugins backing them
final Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.EXEC, MetaNode.CMD_EXECUTE);
final ExecPlugin plugin =
context.getPluginDispatcher().getExecPlugin(node);
final DmtSession session = this;
if (plugin == null)
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"No exec plugin registered for given node.");
try {
AccessController.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws DmtException {
plugin.execute(session, node.getPath(), correlator, data);
return null;
}
}, securityContext);
} catch(PrivilegedActionException e) {
throw (DmtException) e.getException();
}
}
// requires DmtPermission with GET action, no ACL check done because there
// are no ACLs stored for non-existing nodes (in theory)
public synchronized boolean isNodeUri(String nodeUri) {
checkSession();
try {
Node node = makeAbsoluteUri(nodeUri);
checkLocalPermission(node, writeAclCommands(Acl.GET));
checkNode(node, SHOULD_EXIST);
// not checking meta-data for the GET capability, the plugin must be
// prepared to answer isNodeUri() even if the node is not "gettable"
} catch (DmtException e) {
return false; // invalid node URI or error opening plugin
}
return true;
}
public synchronized boolean isLeafNode(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return isLeafNodeNoCheck(node);
}
// GET property op
public synchronized Acl getNodeAcl(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
Acl acl = (Acl) acls.get(node);
return acl == null ? null : acl;
}
// GET property op
public synchronized Acl getEffectiveNodeAcl(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getEffectiveNodeAclNoCheck(node);
}
// REPLACE property op
public synchronized void setNodeAcl(String nodeUri, Acl acl)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
// check for REPLACE permission:
if (isLeafNodeNoCheck(node)) // on the parent node for leaf nodes
checkNodePermission(node.getParent(), Acl.REPLACE);
else // on the node itself or the parent for interior nodes (parent will
// be ignored in case of the root node)
checkNodeOrParentPermission(node, Acl.REPLACE);
// Not checking REPLACE capability, node does not have to be modifiable
// to have an ACL associated with it. It should be possible to set
// ACLs everywhere, and the "Replace" Access Type seems to be given
// only for modifiable nodes.
// check that the new ACL is valid
if(node.isRoot() && (acl == null || !acl.isPermitted("*", Acl.ADD)))
// should be 405 "Forbidden" according to DMTND 7.7.1.2
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED, "Root ACL must allow " +
"the Add operation for all principals.");
if (acl == null || isEmptyAcl(acl))
acls.remove(node);
else
acls.put(node, acl);
getReadableDataSession(node).nodeChanged(node.getPath());
enqueueEvent(EventList.REPLACE, node);
}
public synchronized MetaNode getMetaNode(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
checkNodePermission(node, Acl.GET);
// not checking meta-data for the GET capability, meta-data should
// always be publicly available
return getMetaNodeNoCheck(node);
}
public synchronized DmtData getNodeValue(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
return internalGetNodeValue(node);
}
// also used by copy() to pass an already validated Node instead of a URI
private DmtData internalGetNodeValue(Node node) throws DmtException {
checkNode(node, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
ReadableDataSession pluginSession = getReadableDataSession(node);
DmtData data = pluginSession.getNodeValue(node.getPath());
boolean isLeafNode = pluginSession.isLeafNode(node.getPath());
boolean isLeafData = data.getFormat() != DmtData.FORMAT_NODE;
if(isLeafNode != isLeafData)
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"Error retrieving node value, the type of the data " +
"returned by the plugin does not match the node type.");
return data;
}
public synchronized String[] getChildNodeNames(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
return internalGetChildNodeNames(node);
}
// also used by copy() to pass an already validated Node instead of a URI
private String[] internalGetChildNodeNames(Node node) throws DmtException {
checkNode(node, SHOULD_BE_INTERIOR);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
String[] pluginChildNodes =
getReadableDataSession(node).getChildNodeNames(node.getPath());
List processedChildNodes = normalizeChildNodeNames(pluginChildNodes);
String[] processedChildNodeArray = (String[])
processedChildNodes.toArray(new String[processedChildNodes.size()]);
// ordering is not a requirement, but allows easier testing of plugins
Arrays.sort(processedChildNodeArray);
return processedChildNodeArray;
}
// GET property op
public synchronized String getNodeTitle(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
return internalGetNodeTitle(node);
}
// also used by copy() to pass an already validated Node instead of a URI
private String internalGetNodeTitle(Node node) throws DmtException {
checkNode(node, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeTitle(node.getPath());
}
// GET property op
public synchronized int getNodeVersion(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeVersion(node.getPath());
}
// GET property op
public synchronized Date getNodeTimestamp(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeTimestamp(node.getPath());
}
// GET property op
public synchronized int getNodeSize(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_BE_LEAF);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeSize(node.getPath());
}
// GET property op
public synchronized String getNodeType(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
return internalGetNodeType(node);
}
// also used by copy() to pass an already validated Node instead of a URI
private String internalGetNodeType(Node node) throws DmtException {
checkNode(node, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeType(node.getPath());
}
// REPLACE property op
public synchronized void setNodeTitle(String nodeUri, String title)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
internalSetNodeTitle(node, title, true); // send event if successful
}
// also used by copy() to pass an already validated Node instead of a URI
// and to set the node title without triggering an event
private void internalSetNodeTitle(Node node, String title,
boolean sendEvent) throws DmtException {
checkNode(node, SHOULD_EXIST);
checkOperation(node, Acl.REPLACE, MetaNode.CMD_REPLACE);
try {
if (title != null && title.getBytes("UTF-8").length > 255)
throw new DmtException(node.getUri(),
DmtException.COMMAND_FAILED,
"Length of Title property exceeds 255 bytes (UTF-8).");
} catch (UnsupportedEncodingException e) {
// never happens
}
getReadWriteDataSession(node).setNodeTitle(node.getPath(), title);
if(sendEvent)
enqueueEvent(EventList.REPLACE, node);
}
public synchronized void setNodeValue(String nodeUri, DmtData data)
throws DmtException {
commonSetNodeValue(nodeUri, data);
}
public synchronized void setDefaultNodeValue(String nodeUri)
throws DmtException {
commonSetNodeValue(nodeUri, null);
}
private void commonSetNodeValue(String nodeUri, DmtData data)
throws DmtException {
checkWriteSession();
int nodeConstraint =
data == null ? SHOULD_EXIST :
data.getFormat() == DmtData.FORMAT_NODE ?
SHOULD_BE_INTERIOR : SHOULD_BE_LEAF;
Node node = makeAbsoluteUriAndCheck(nodeUri, nodeConstraint);
checkOperation(node, Acl.REPLACE, MetaNode.CMD_REPLACE);
// check data against meta-data in case of leaf nodes (meta-data does
// not contain constraints for interior node values)
if(isLeafNodeNoCheck(node))
checkValue(node, data);
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode != null && metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(), DmtException.METADATA_MISMATCH,
"Cannot set the value of a permanent node.");
getReadWriteDataSession(node).setNodeValue(node.getPath(), data);
traverseEvents(EventList.REPLACE, node);
}
private void traverseEvents(int mode, Node node) throws DmtException {
if(isLeafNodeNoCheck(node))
enqueueEvent(mode, node);
else {
String children[] = internalGetChildNodeNames(node);
Arrays.sort(children);
for (int i = 0; i < children.length; i++)
traverseEvents(mode, node.appendSegment(children[i]));
}
}
// SyncML DMTND 7.5 (p16) Type: only the Get command is applicable!
public synchronized void setNodeType(String nodeUri, String type)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.REPLACE, MetaNode.CMD_REPLACE);
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode != null && metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(), DmtException.METADATA_MISMATCH,
"Cannot set type property of permanent node.");
if(isLeafNodeNoCheck(node))
checkMimeType(node, type);
// could check type string for interior nodes, but this impl. does not
// handle it anyway, so we leave it to the plugins if they need it
// (same in createInteriorNode/2)
getReadWriteDataSession(node).setNodeType(node.getPath(), type);
enqueueEvent(EventList.REPLACE, node);
}
public synchronized void deleteNode(String nodeUri) throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
if(node.isRoot())
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED,
"Cannot delete root node.");
checkOperation(node, Acl.DELETE, MetaNode.CMD_DELETE);
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode != null) {
if(metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot delete permanent node.");
if(!metaNode.isZeroOccurrenceAllowed()) {
// maxOccurrence == 1 means that there cannot be other instances
// of this node, so it cannot be deleted. If maxOccurrence > 1
// then we have to check whether this is the last one.
if(metaNode.getMaxOccurrence() == 1)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Metadata does not allow deleting the only " +
"instance of this node.");
checkNodeIsInSession(node.getParent(), "(needed to determine" +
"the number of siblings of the given node) ");
if(getNodeCardinality(node) == 1)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Metadata does not allow deleting the last " +
"instance of this node.");
}
}
getReadWriteDataSession(node).deleteNode(node.getPath());
moveAclEntries(node, null);
enqueueEvent(EventList.DELETE, node);
}
public synchronized void createInteriorNode(String nodeUri)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateInteriorNode(node, null, true, false);
}
public synchronized void createInteriorNode(String nodeUri, String type)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateInteriorNode(node, type, true, false);
}
// - used by the other createInteriorNode variants
// - also used by copy() to pass an already validated Node instead of a URI
// and to create interior nodes without triggering an event
// - also used by ensureInteriorAncestors, to create missing nodes while
// skipping automatically created nodes
private void commonCreateInteriorNode(Node node, String type,
boolean sendEvent, boolean skipAutomatic) throws DmtException {
checkNode(node, SHOULD_NOT_EXIST);
Node parent = node.getParent();
if(parent == null) // this should never happen, root must always exist
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"Cannot create root node.");
// Return silently if all of the following conditions are met:
// - the parent node has been created while ensuring that the ancestor
// nodes all exist
// - this call is part of creating the ancestors for some sub-node (as
// indicated by 'skipAutomatic')
// - this current node was created automatically, triggered by the
// creation of the parent (i.e. it has AUTOMATIC scope)
if(ensureInteriorAncestors(parent, sendEvent) && skipAutomatic &&
getReadableDataSession(node).isNodeUri(node.getPath()))
return;
checkNodePermission(parent, Acl.ADD);
checkNodeCapability(node, MetaNode.CMD_ADD);
MetaNode metaNode = getMetaNodeNoCheck(node);
if(metaNode != null && metaNode.isLeaf())
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot create the specified interior node, " +
"meta-data defines it as a leaf node.");
// could check type string, but this impl. does not handle it anyway
// so we leave it to the plugins if they need it (same in setNodeType)
checkNewNode(node);
checkMaxOccurrence(node);
// it is not really useful to allow creating automatic nodes, but this
// is not a hard requirement, and should be enforced by the (lack of
// the) ADD access type instead
getReadWriteDataSession(node).createInteriorNode(node.getPath(), type);
assignNewNodePermissions(node, parent);
if(sendEvent)
enqueueEvent(EventList.ADD, node);
}
public synchronized void createLeafNode(String nodeUri) throws DmtException {
// not calling createLeafNode/3, because it is wrapped
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateLeafNode(node, null, null, true);
}
public synchronized void createLeafNode(String nodeUri, DmtData value)
throws DmtException {
// not calling createLeafNode/3, because it is wrapped
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateLeafNode(node, value, null, true);
}
public synchronized void createLeafNode(String nodeUri, DmtData value,
String mimeType) throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateLeafNode(node, value, mimeType, true);
}
// - used by the other createLeafNode variants
// - also used by copy() to pass an already validated Node instead of a URI
// and to create leaf nodes without triggering an event
private void commonCreateLeafNode(Node node, DmtData value,
String mimeType, boolean sendEvent) throws DmtException {
checkNode(node, SHOULD_NOT_EXIST);
Node parent = node.getParent();
if(parent == null) // this should never happen, root must always exist
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"Cannot create root node.");
ensureInteriorAncestors(parent, sendEvent);
checkNodePermission(parent, Acl.ADD);
checkNodeCapability(node, MetaNode.CMD_ADD);
MetaNode metaNode = getMetaNodeNoCheck(node);
if(metaNode != null && !metaNode.isLeaf())
throw new DmtException(node.getUri(), DmtException.METADATA_MISMATCH,
"Cannot create the specified leaf node, meta-data " +
"defines it as an interior node.");
checkNewNode(node);
checkValue(node, value);
checkMimeType(node, mimeType);
checkMaxOccurrence(node);
// it is not really useful to allow creating automatic nodes, but this
// is not a hard requirement, and should be enforced by the (lack of
// the) ADD access type instead
getReadWriteDataSession(node).createLeafNode(node.getPath(), value,
mimeType);
if(sendEvent)
enqueueEvent(EventList.ADD, node);
}
// Tree may be left in an inconsistent state if there is an error when only
// part of the tree has been copied.
public synchronized void copy(String nodeUri, String newNodeUri,
boolean recursive) throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
Node newNode = makeAbsoluteUriAndCheck(newNodeUri, SHOULD_NOT_EXIST);
if (node.isAncestorOf(newNode))
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED,
"Cannot copy node to its descendant, '" + newNode + "'.");
if (context.getPluginDispatcher()
.handledBySameDataPlugin(node, newNode)) {
Node newParentNode = newNode.getParent();
// newParentNode cannot be null, because newNode is a valid absolute
// nonexisting node, so it cannot be the root
ensureInteriorAncestors(newParentNode, false);
// DMTND 7.7.1.5: "needs correct access rights for the equivalent
// Add, Delete, Get, and Replace commands"
copyPermissionCheck(node, newParentNode, newNode, recursive);
checkNodeCapability(node, MetaNode.CMD_GET);
checkNodeCapability(newNode, MetaNode.CMD_ADD);
checkNewNode(newNode);
checkMaxOccurrence(newNode);
// for leaf nodes: since we are not passing a data object to the
// plugin, checking the value and mime-type against the new
// meta-data is the responsibility of the plugin itself
try {
getReadWriteDataSession(newNode).copy(node.getPath(),
newNode.getPath(), recursive);
assignNewNodePermissions(newNode, newParentNode);
} catch(DmtException e) {
// fall back to generic algorithm if plugin doesn't support copy
if(e.getCode() != DmtException.FEATURE_NOT_SUPPORTED)
throw e;
// the above checks will be performed again, but we cannot even
// attempt to call the plugin without them
copyNoCheck(node, newNode, recursive);
}
}
else
copyNoCheck(node, newNode, recursive); // does not trigger events
enqueueEvent(EventList.COPY, node, newNode);
}
public synchronized void renameNode(String nodeUri, String newNodeName)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.REPLACE, MetaNode.CMD_REPLACE);
Node parent = node.getParent();
if (parent == null)
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED,
"Cannot rename root node.");
String newName = Node.validateAndNormalizeNodeName(newNodeName);
Node newNode = parent.appendSegment(newName);
checkNode(newNode, SHOULD_NOT_EXIST);
checkNewNode(newNode);
MetaNode metaNode = getMetaNodeNoCheck(node);
MetaNode newMetaNode = getMetaNodeNoCheck(newNode);
if (metaNode != null) {
if(metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot rename permanent node.");
int maxOcc = metaNode.getMaxOccurrence();
// sanity check: all siblings of a node must either have a
// cardinality of 1, or they must be part of the same multi-node
if(newMetaNode != null && maxOcc != newMetaNode.getMaxOccurrence())
throw new DmtException(node.getUri(),
DmtException.COMMAND_FAILED,
"Cannot rename node, illegal meta-data found (a " +
"member of a multi-node has a sibling with different " +
"meta-data).");
// if this is a multi-node (maxOcc > 1), renaming does not affect
// the cardinality
if(maxOcc == 1 && !metaNode.isZeroOccurrenceAllowed())
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Metadata does not allow deleting last instance of " +
"this node.");
}
// the new node must be the same (leaf/interior) as the original
if(newMetaNode != null && newMetaNode.isLeaf() != isLeafNodeNoCheck(node))
throw new DmtException(newNode.getUri(),
DmtException.METADATA_MISMATCH,
"The destination of the rename operation is " +
(newMetaNode.isLeaf() ? "a leaf" : "an interior") +
" node according to the meta-data, which does not match " +
"the source node.");
// for leaf nodes: since we are not passing a data object to the
// plugin, checking the value and mime-type against the new
// meta-data is the responsibility of the plugin itself
getReadWriteDataSession(node).renameNode(node.getPath(), newName);
moveAclEntries(node, newNode);
enqueueEvent(EventList.RENAME, node, newNode);
}
/**
* Create an Access Control Context based on the given permissions. The
* Permission objects are first created from the PermissionInfo objects,
* then added to a permission collection, which is added to a protection
* domain with no code source, which is used to create the access control
* context. If the <code>null</code> argument is given, an empty access
* control context is created.
*
* @throws Exception if there is an error creating one of the permission
* objects (can be one of ClassNotFoundException, SecurityException,
* NoSuchMethodException, ClassCastException,
* IllegalArgumentException, InstantiationException,
* IllegalAccessException or InvocationTargetException)
*/
private AccessControlContext getSecurityContext(PermissionInfo[] permissions)
throws Exception {
PermissionCollection permissionCollection = new Permissions();
if(permissions != null)
for (int i = 0; i < permissions.length; i++) {
PermissionInfo info = permissions[i];
Class permissionClass = Class.forName(info.getType());
Constructor constructor = permissionClass
.getConstructor(PERMISSION_CONSTRUCTOR_SIG);
Permission permission = (Permission) constructor.newInstance(
new Object[] {info.getName(), info.getActions()});
permissionCollection.add(permission);
}
return new AccessControlContext(new ProtectionDomain[] {
new ProtectionDomain(null, permissionCollection)});
}
private void checkSession() {
if(state != STATE_OPEN)
throw new IllegalStateException(
"Session is not open, cannot perform DMT operations.");
}
private void checkWriteSession() {
checkSession();
if(lockMode == LOCK_TYPE_SHARED)
throw new IllegalStateException(
"Session is not open for writing, cannot perform " +
"requested write operation.");
}
private void purgeEvents(Node root) {
eventList.excludeRoot(root);
}
private void sendEvent(int type) {
Node[] nodes = eventList.getNodes(type);
Node[] newNodes = eventList.getNewNodes(type);
if(nodes.length != 0)
sendEvent(type, nodes, newNodes);
}
private void enqueueEvent(int type, Node node) {
if(lockMode == LOCK_TYPE_ATOMIC)
eventList.add(type, node);
else
sendEvent(type, new Node[] { node }, null);
}
private void enqueueEvent(int type, Node node, Node newNode) {
if(lockMode == LOCK_TYPE_ATOMIC)
eventList.add(type, node, newNode);
else
sendEvent(type, new Node[] { node }, new Node[] { newNode });
}
private void sendEvent(int type, Node[] nodes, Node[] newNodes) {
String topic = EventList.getTopic(type);
Hashtable properties = new Hashtable();
properties.put("session.id", new Integer(sessionId));
properties.put("nodes", Node.getUriArray(nodes));
if(newNodes != null)
properties.put("newnodes", Node.getUriArray(newNodes));
final Event event = new Event(topic, properties);
// it's an error if Event Admin is missing, but it could also be ignored
final EventAdmin eventChannel =
(EventAdmin) context.getTracker(EventAdmin.class).getService();
if(eventChannel == null)
throw new MissingResourceException("Event Admin not found.",
EventAdmin.class.getName(), null);
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
eventChannel.postEvent(event);
return null;
}
});
}
private boolean isLeafNodeNoCheck(Node node) throws DmtException {
return getReadableDataSession(node).isLeafNode(node.getPath());
}
private MetaNode getMetaNodeNoCheck(Node node) throws DmtException {
return getReadableDataSession(node).getMetaNode(node.getPath());
}
// precondition: 'uri' must point be valid (checked with isNodeUri or
// returned by getChildNodeNames)
private void copyNoCheck(Node node, Node newNode, boolean recursive)
throws DmtException {
boolean isLeaf = isLeafNodeNoCheck(node);
String type = internalGetNodeType(node);
// create new node (without sending a separate event about it)
if (isLeaf)
// if getNodeValue() returns null, we attempt to set the default
commonCreateLeafNode(newNode, internalGetNodeValue(node), type,
false);
else
commonCreateInteriorNode(newNode, type, false, false);
// copy Title property (without sending event) if it is supported by
// both source and target plugins
try {
String title = internalGetNodeTitle(node);
// It could be valid to copy "null" Titles as well, if the
// implementation has default values for the Title property.
if(title != null)
internalSetNodeTitle(newNode, title, false);
} catch (DmtException e) {
if (e.getCode() != DmtException.FEATURE_NOT_SUPPORTED)
throw new DmtException(node.getUri(),
DmtException.COMMAND_FAILED, "Error copying node to '" +
newNode + "', cannot copy title.", e);
}
// Format, Name, Size, TStamp and VerNo properties do not need to be
// expicitly copied
// copy children if mode is recursive and node is interior
if (recursive && !isLeaf) {
// 'children' is [] if there are no child nodes
String[] children = internalGetChildNodeNames(node);
for (int i = 0; i < children.length; i++)
copyNoCheck(node.appendSegment(children[i]),
newNode.appendSegment(children[i]), true);
}
}
// precondition: path must be absolute, and the parent of the given node
// must be within the subtree of the session
private int getNodeCardinality(Node node) throws DmtException {
Node parent = node.getParent();
String[] neighbours =
getReadableDataSession(parent).getChildNodeNames(parent.getPath());
return normalizeChildNodeNames(neighbours).size();
}
private void assignNewNodePermissions(Node node, Node parent)
throws DmtException {
// DMTND 7.7.1.3: if parent does not have Replace permissions, give Add,
// Delete and Replace permissions to child. (This rule cannot be
// applied to Java permissions, only to ACLs.)
if(principal != null) {
try {
checkNodePermission(parent, Acl.REPLACE);
} catch (DmtException e) {
if (e.getCode() != DmtException.PERMISSION_DENIED)
throw e; // should not happen
Acl parentAcl = getEffectiveNodeAclNoCheck(parent);
Acl newAcl = parentAcl.addPermission(principal, Acl.ADD
| Acl.DELETE | Acl.REPLACE);
acls.put(node, newAcl);
}
}
}
private void checkOperation(Node node, int actions, int capability)
throws DmtException {
checkNodePermission(node, actions);
checkNodeCapability(node, capability);
}
// throws SecurityException if principal is local user, and sufficient
// privileges are missing
private void checkNodePermission(Node node, int actions)
throws DmtException {
checkNodeOrParentPermission(principal, node, actions, false);
}
// throws SecurityException if principal is local user, and sufficient
// privileges are missing
private void checkNodeOrParentPermission(Node node, int actions)
throws DmtException {
checkNodeOrParentPermission(principal, node, actions, true);
}
// Performs the necessary permission checks for a copy operation:
// - checks that the caller has GET rights (ACL or Java permission) for all
// source nodes
// - in case of local sessions, checks that the caller has REPLACE Java
// permissions on all nodes where a title needs to be set, and ADD Java
// permissions for the parents of all added nodes
// - in case of remote sessions, only the ACL of the parent of the target
// node needs to be checked, because ACLs cannot be set for nonexitent
// nodes; in this case the ADD ACL is always required, while REPLACE is
// checked only if any of the copied nodes has a non-null Title string
//
// Precondition: 'node' must point be valid (checked with isNodeUri or
// returned by getChildNodeNames)
private void copyPermissionCheck(Node node, Node newParentNode,
Node newNode, boolean recursive) throws DmtException {
boolean hasTitle = copyPermissionCheckRecursive(node, newParentNode,
newNode, recursive);
// ACL not copied, so the parent of the target node only needs
// REPLACE permission if the copied node (or any node in the copied
// subtree) has a title
// remote access permissions for the target only need to be checked once
if(principal != null) {
checkNodePermission(newParentNode, Acl.ADD);
if(hasTitle)
checkNodePermission(newNode, Acl.REPLACE);
}
}
private boolean copyPermissionCheckRecursive(Node node,
Node newParentNode, Node newNode, boolean recursive)
throws DmtException {
// check that the caller has GET rights for the current node
checkNodePermission(node, Acl.GET);
// check whether the node has a non-null title
boolean hasTitle = nodeHasTitle(node);
// local access permissions need to be checked for each target node
if(principal == null) {
checkLocalPermission(newParentNode, writeAclCommands(Acl.ADD));
if(hasTitle)
checkLocalPermission(newNode, writeAclCommands(Acl.REPLACE));
}
// perform the checks recursively for the subtree if requested
if (recursive && !isLeafNodeNoCheck(node)) {
// 'children' is [] if there are no child nodes
String[] children = internalGetChildNodeNames(node);
for (int i = 0; i < children.length; i++)
if(copyPermissionCheckRecursive(node.appendSegment(children[i]),
newNode, newNode.appendSegment(children[i]), true))
hasTitle = true;
}
return hasTitle;
}
// Returns true if the plugin handling the given node supports the Title
// property and value of the property is non-null. This is used for
// determining whether the caller needs to have REPLACE rights for the
// target node of the enclosing copy operation.
private boolean nodeHasTitle(Node node) throws DmtException {
try {
return internalGetNodeTitle(node) != null;
} catch (DmtException e) {
// FEATURE_NOT_SUPPORTED means that Title is not supported
if (e.getCode() != DmtException.FEATURE_NOT_SUPPORTED)
throw e;
}
return false;
}
private Node makeAbsoluteUriAndCheck(String nodeUri, int check)
throws DmtException {
Node node = makeAbsoluteUri(nodeUri);
checkNode(node, check);
return node;
}
// returns a plugin for read-only use
private ReadableDataSession getReadableDataSession(Node node)
throws DmtException {
return getPluginSession(node, false);
}
// returns a plugin for writing
private ReadWriteDataSession getReadWriteDataSession(Node node)
throws DmtException {
return getPluginSession(node, true);
}
// precondition: if 'writable' is true, session lock type must not be shared
// 'synchronized' is just indication, all entry points are synch'd anyway
private synchronized PluginSessionWrapper getPluginSession(Node node,
boolean writeOperation) throws DmtException {
PluginSessionWrapper wrappedPlugin = null;
Node wrappedPluginRoot = null;
// Look through the open plugin sessions, and find the session with the
// lowest root that handles the given node.
Iterator i = dataPlugins.iterator();
while (i.hasNext()) {
PluginSessionWrapper plugin = (PluginSessionWrapper) i.next();
Node pluginRoot = plugin.getSessionRoot();
if(pluginRoot.isAncestorOf(node) && (wrappedPluginRoot == null ||
wrappedPluginRoot.isAncestorOf(pluginRoot))) {
wrappedPlugin = plugin;
wrappedPluginRoot = pluginRoot;
}
}
// Find the plugin that would/will handle the given node, and the root
// of the (potential) session opened on it.
PluginRegistration pluginRegistration =
context.getPluginDispatcher().getDataPlugin(node);
Node root = getRootForPlugin(pluginRegistration, node);
// If we found a plugin session handling the node, and the potential
// new plugin session root (defined by 'root') is not in its subtree,
// then use the open session. If there is no session yet, or if a new
// session could be opened with a deeper root, then a new session is
// opened. (This guarantees that the proper plugin is used instead of
// the root plugin for nodes below the "root tree".)
if(wrappedPlugin != null &&
!wrappedPluginRoot.isAncestorOf(root, true)) {
if(writeOperation &&
wrappedPlugin.getSessionType() == LOCK_TYPE_SHARED)
throw getWriteException(lockMode, node);
return wrappedPlugin;
}
// No previously opened session found, attempting to open session with
// correct lock type.
DataPlugin plugin = pluginRegistration.getDataPlugin();
ReadableDataSession pluginSession = null;
int pluginSessionType = lockMode;
if(lockMode != LOCK_TYPE_SHARED) {
pluginSession = openPluginSession(plugin, root, pluginSessionType);
if(pluginSession == null && writeOperation)
throw getWriteException(lockMode, node);
}
// read-only session if lockMode is LOCK_TYPE_SHARED, or if the
// plugin did not support the writing lock mode, and the current
// operation is for reading
if(pluginSession == null) {
pluginSessionType = LOCK_TYPE_SHARED;
pluginSession = openPluginSession(plugin, root, pluginSessionType);
}
wrappedPlugin = new PluginSessionWrapper(pluginRegistration,
pluginSession, pluginSessionType, root, securityContext);
// this requires synchronized access
dataPlugins.add(wrappedPlugin);
return wrappedPlugin;
}
private Node getRootForPlugin(PluginRegistration plugin, Node node) {
Node[] roots = plugin.getDataRoots();
for(int i = 0; i < roots.length; i++)
if(roots[i].isAncestorOf(node))
return roots[i].isAncestorOf(subtreeNode)
? subtreeNode : roots[i];
throw new IllegalStateException("Internal error, plugin root not " +
"found for a URI handled by the plugin.");
}
private ReadableDataSession openPluginSession(
final DataPlugin plugin, Node root,
final int pluginSessionType) throws DmtException {
final DmtSession session = this;
final String[] rootPath = root.getPath();
ReadableDataSession pluginSession;
try {
pluginSession = (ReadableDataSession)
AccessController.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws DmtException {
switch(pluginSessionType) {
case LOCK_TYPE_EXCLUSIVE:
return plugin.openReadWriteSession(rootPath, session);
case LOCK_TYPE_ATOMIC:
return plugin.openAtomicSession(rootPath, session);
default: // LOCK_TYPE_SHARED
return plugin.openReadOnlySession(rootPath, session);
}
}
}, securityContext);
} catch(PrivilegedActionException e) {
throw (DmtException) e.getException();
}
return pluginSession;
}
// precondition: path must be absolute
private void checkNode(Node node, int check) throws DmtException {
boolean shouldExist = (check != SHOULD_NOT_EXIST);
if (getReadableDataSession(node).isNodeUri(node.getPath()) != shouldExist)
throw new DmtException(node.getUri(),
shouldExist ? DmtException.NODE_NOT_FOUND
: DmtException.NODE_ALREADY_EXISTS,
"The specified URI should point to "
+ (shouldExist ? "an existing" : "a non-existent")
+ " node to perform the requested operation.");
boolean shouldBeLeaf = (check == SHOULD_BE_LEAF);
boolean shouldBeInterior = (check == SHOULD_BE_INTERIOR);
if ((shouldBeLeaf || shouldBeInterior)
&& isLeafNodeNoCheck(node) != shouldBeLeaf)
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED,
"The specified URI should point to "
+ (shouldBeLeaf ? "a leaf" : "an internal")
+ " node to perform the requested operation.");
}
// precondition: checkNode() must have been called for the given uri
private void checkNodeCapability(Node node, int capability)
throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if(metaNode != null && !metaNode.can(capability))
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Node meta-data does not allow the " +
capabilityName(capability) + " operation for this node.");
// default for all capabilities is 'true', if no meta-data is provided
}
private void checkValue(Node node, DmtData data) throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if(metaNode == null)
return;
// if default data was requested, only check that there is a default
if(data == null) {
if(metaNode.getDefault() == null)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"This node has no default value in the meta-data.");
return;
}
if(!metaNode.isValidValue(data))
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"The specified node value is not valid according to " +
"the meta-data.");
// not checking value meta-data constraints individually, but leaving
// this to the isValidValue method of the meta-node
/*
if((metaNode.getFormat() & data.getFormat()) == 0)
throw new DmtException(uri, DmtException.METADATA_MISMATCH,
"The format of the specified value is not in the list of " +
"valid formats given in the node meta-data.");
if(data.getFormat() == DmtData.FORMAT_INTEGER) {
if(metaNode.getMax() < data.getInt())
throw new DmtException(uri, DmtException.METADATA_MISMATCH,
"Attempting to set too large integer, meta-data " +
"specifies the maximum value of " + metaNode.getMax());
if(metaNode.getMin() > data.getInt())
throw new DmtException(uri, DmtException.METADATA_MISMATCH,
"Attempting to set too small integer, meta-data " +
"specifies the minimum value of " + metaNode.getMin());
}
DmtData[] validValues = metaNode.getValidValues();
if(validValues != null && !Arrays.asList(validValues).contains(data))
throw new DmtException(uri, DmtException.METADATA_MISMATCH,
"Specified value is not in the list of valid values " +
"given in the node meta-data.");
*/
}
private void checkNewNode(Node node) throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if(metaNode == null)
return;
if(metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot create permanent node.");
if(!metaNode.isValidName(node.getLastSegment()))
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"The specified node name is not valid according to " +
"the meta-data.");
// not checking valid name list from meta-data, but leaving this to the
// isValidName method of the meta-node
/*
String[] validNames = metaNode.getValidNames();
if(validNames != null && !Arrays.asList(validNames).contains(name))
throw new DmtException(uri, DmtException.METADATA_MISMATCH,
"The specified node name is not in the list of valid " +
"names specified in the node meta-data.");
*/
}
private void checkMimeType(Node node, String type) throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if(metaNode == null)
return;
if(type == null) // default MIME type was requested
return;
int sep = type.indexOf('/');
if(sep == -1 || sep == 0 || sep == type.length()-1)
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"The given type string does not contain a MIME type.");
String[] validMimeTypes = metaNode.getMimeTypes();
if(validMimeTypes != null && !Arrays.asList(validMimeTypes).contains(type))
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"The specified MIME type is not in the list of valid " +
"types in the node meta-data.");
}
private void checkMaxOccurrence(Node node) throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if(metaNode == null)
return;
// If maxOccurrence == 1 then it is not a multi-node, so it can be
// created if it did not exist before. If maxOccurrence > 1, it can
// only be created if the number of existing nodes does not reach it.
int maxOccurrence = metaNode.getMaxOccurrence();
if(maxOccurrence != Integer.MAX_VALUE && maxOccurrence > 1
&& getNodeCardinality(node) >= maxOccurrence)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot create the specified node, meta-data maximizes " +
"the number of instances of this node to " + maxOccurrence + ".");
}
private Node makeAbsoluteUri(String nodeUri) throws DmtException {
Node node = Node.validateAndNormalizeUri(nodeUri);
if (node.isAbsolute()) {
checkNodeIsInSession(node, "");
return node;
}
return subtreeNode.appendRelativeNode(node);
}
private void checkNodeIsInSession(Node node, String uriExplanation)
throws DmtException {
if (!subtreeNode.isAncestorOf(node))
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"Specified URI " + uriExplanation + "points outside the " +
"subtree of this session.");
}
private boolean ensureInteriorAncestors(Node node, boolean sendEvent)
throws DmtException {
checkNodeIsInSession(node, "(needed to ensure " +
"a proper creation point for the new node) ");
if (!getReadableDataSession(node).isNodeUri(node.getPath())) {
commonCreateInteriorNode(node, null, sendEvent, true);
return true;
}
checkNode(node, SHOULD_BE_INTERIOR);
return false;
}
private static DmtException getWriteException(int lockMode, Node node) {
boolean atomic = (lockMode == LOCK_TYPE_ATOMIC);
return new DmtException(node.getUri(),
atomic ? DmtException.TRANSACTION_ERROR : DmtException.COMMAND_NOT_ALLOWED,
"The plugin handling the requested node does not support " +
(atomic ? "" : "non-") + "atomic writing.");
}
// remove null entries from the returned array (if it is non-null)
private static List normalizeChildNodeNames(String[] pluginChildNodes) {
List processedChildNodes = new Vector();
if (pluginChildNodes != null)
for (int i = 0; i < pluginChildNodes.length; i++)
if (pluginChildNodes[i] != null)
processedChildNodes.add(pluginChildNodes[i]);
return processedChildNodes;
}
// Move ACL entries from 'node' to 'newNode'.
// If 'newNode' is 'null', the ACL entries are removed (moved to nowhere).
private static void moveAclEntries(Node node, Node newNode) {
synchronized (acls) {
Hashtable newEntries = null;
if (newNode != null)
newEntries = new Hashtable();
Iterator i = acls.entrySet().iterator();
while (i.hasNext()) {
Map.Entry entry = (Map.Entry) i.next();
Node relativeNode =
node.getRelativeNode((Node) entry.getKey());
if (relativeNode != null) {
if (newNode != null)
newEntries.put(newNode.appendRelativeNode(relativeNode),
entry.getValue());
i.remove();
}
}
if (newNode != null)
acls.putAll(newEntries);
}
}
private static Acl getEffectiveNodeAclNoCheck(Node node) {
Acl acl;
synchronized (acls) {
acl = (Acl) acls.get(node);
// must finish whithout NullPointerException, because root ACL must
// not be empty
while (acl == null || isEmptyAcl(acl)) {
node = node.getParent();
acl = (Acl) acls.get(node);
}
}
return acl;
}
// precondition: node parameter must be an absolute node
// throws SecurityException if principal is local user, and sufficient
// privileges are missing
private static void checkNodeOrParentPermission(String name, Node node,
int actions, boolean checkParent) throws DmtException {
if(node.isRoot())
checkParent = false;
Node parent = null;
if(checkParent) // not null, as the uri is absolute but not "."
parent = node.getParent();
if (name != null) {
// succeed if the principal has the required permissions on the
// given uri, OR if the checkParent parameter is true and the
// principal has the required permissions for the parent uri
if (!(
hasAclPermission(node, name, actions) ||
checkParent && hasAclPermission(parent, name, actions)))
throw new DmtException(node.getUri(),
DmtException.PERMISSION_DENIED, "Principal '" + name
+ "' does not have the required permissions ("
+ writeAclCommands(actions) + ") on the node "
+ (checkParent ? "or its parent " : "")
+ "to perform this operation.");
}
else { // not doing local permission check if ACL check was done
String actionString = writeAclCommands(actions);
checkLocalPermission(node, actionString);
if(checkParent)
checkLocalPermission(parent, actionString);
}
}
private static boolean hasAclPermission(Node node, String name, int actions) {
return getEffectiveNodeAclNoCheck(node).isPermitted(name, actions);
}
private static void checkLocalPermission(Node node, String actions) {
SecurityManager sm = System.getSecurityManager();
if(sm != null)
sm.checkPermission(new DmtPermission(node.getUri(), actions));
}
private static String capabilityName(int capability) {
switch(capability) {
case MetaNode.CMD_ADD: return "Add";
case MetaNode.CMD_DELETE: return "Delete";
case MetaNode.CMD_EXECUTE: return "Execute";
case MetaNode.CMD_GET: return "Get";
case MetaNode.CMD_REPLACE: return "Replace";
}
// never reached
throw new IllegalArgumentException(
"Unknown meta-data capability constant " + capability + ".");
}
// ENHANCE define constants for the action names in the Acl class
private static String writeAclCommands(int actions) {
String commands = null;
commands = writeCommand(commands, actions, Acl.ADD, "Add");
commands = writeCommand(commands, actions, Acl.DELETE, "Delete");
commands = writeCommand(commands, actions, Acl.EXEC, "Exec");
commands = writeCommand(commands, actions, Acl.GET, "Get");
commands = writeCommand(commands, actions, Acl.REPLACE, "Replace");
return (commands != null) ? commands : "";
}
private static String writeCommand(String base, int actions, int action,
String entry) {
if ((actions & action) != 0)
return (base != null) ? base + ',' + entry : entry;
return base;
}
private static boolean isEmptyAcl(Acl acl) {
return acl.getPermissions("*") == 0 && acl.getPrincipals().length == 0;
}
static void init_acls() {
acls = new Hashtable();
acls.put(Node.ROOT_NODE, new Acl("Add=*&Get=*&Replace=*"));
}
public String toString() {
StringBuffer info = new StringBuffer();
info.append("DmtSessionImpl(");
info.append(principal).append(", ");
info.append(subtreeNode).append(", ");
if(lockMode == LOCK_TYPE_ATOMIC)
info.append("atomic");
else if(lockMode == LOCK_TYPE_EXCLUSIVE)
info.append("exclusive");
else
info.append("shared");
info.append(", ");
if(state == STATE_CLOSED)
info.append("closed");
else if(state == STATE_OPEN)
info.append("open");
else
info.append("invalid");
return info.append(')').toString();
}
}
// Sets of node URIs for the different types of changes.
// Only used in atomic transactions.
class EventList {
// two-parameter event types
static final int RENAME = 0;
static final int COPY = 1;
// single-parameter event types
static final int ADD = 2;
static final int DELETE = 3;
static final int REPLACE = 4;
private static final int TWO_PARAM_EVENT_TYPE_NUM = 2;
private static final int EVENT_TYPE_NUM = 5;
private List[] nodeLists = new List[EVENT_TYPE_NUM];
private List[] newNodeLists = new List[TWO_PARAM_EVENT_TYPE_NUM];
EventList() {
for(int i = 0; i < EVENT_TYPE_NUM; i++)
nodeLists[i] = new Vector();
for(int i = 0; i < TWO_PARAM_EVENT_TYPE_NUM; i++)
newNodeLists[i] = new Vector();
}
synchronized void clear() {
for(int i = 0; i < EVENT_TYPE_NUM; i++)
nodeLists[i].clear();
for(int i = 0; i < TWO_PARAM_EVENT_TYPE_NUM; i++)
newNodeLists[i].clear();
}
synchronized void excludeRoot(Node root) {
int i = 0;
for(; i < TWO_PARAM_EVENT_TYPE_NUM; i++)
// cannot use iterator here because if there is any match,
// items have to be removed from both lists
for(int k = 0; k < nodeLists[i].size(); k++)
if(root.isAncestorOf((Node)nodeLists[i].get(k)) ||
root.isAncestorOf((Node)newNodeLists[i].get(k))) {
nodeLists[i].remove(k);
newNodeLists[i].remove(k);
}
for(; i < EVENT_TYPE_NUM; i++) {
Iterator iterator = nodeLists[i].iterator();
while(iterator.hasNext())
if(root.isAncestorOf((Node) iterator.next()))
iterator.remove();
}
}
synchronized void add(int type, Node node) {
if(type < TWO_PARAM_EVENT_TYPE_NUM)
throw new IllegalArgumentException("Missing parameter for event.");
nodeLists[type].add(node);
}
synchronized void add(int type, Node node, Node newNode) {
if(type >= TWO_PARAM_EVENT_TYPE_NUM)
throw new IllegalArgumentException("Too many parameters for event.");
nodeLists[type].add(node);
newNodeLists[type].add(newNode);
}
synchronized Node[] getNodes(int type) {
return (Node[]) nodeLists[type].toArray(new Node[0]);
}
synchronized Node[] getNewNodes(int type) {
if(type >= TWO_PARAM_EVENT_TYPE_NUM)
return null;
return (Node[]) newNodeLists[type].toArray(new Node[0]);
}
static String getTopic(int type) {
switch(type) {
case ADD: return "org/osgi/service/dmt/ADDED";
case DELETE: return "org/osgi/service/dmt/DELETED";
case REPLACE: return "org/osgi/service/dmt/REPLACED";
case RENAME: return "org/osgi/service/dmt/RENAMED";
case COPY: return "org/osgi/service/dmt/COPIED";
}
throw new IllegalArgumentException("Unknown event type.");
}
}
| org.osgi.impl.service.dmt/src/org/osgi/impl/service/dmt/DmtSessionImpl.java | /*
* ============================================================================
* (c) Copyright 2004 Nokia
* This material, including documentation and any related computer programs,
* is protected by copyright controlled by Nokia and its licensors.
* All rights are reserved.
*
* These materials have been contributed to the Open Services Gateway
* Initiative (OSGi)as "MEMBER LICENSED MATERIALS" as defined in, and subject
* to the terms of, the OSGi Member Agreement specifically including, but not
* limited to, the license rights and warranty disclaimers as set forth in
* Sections 3.2 and 12.1 thereof, and the applicable Statement of Work.
* All company, brand and product names contained within this document may be
* trademarks that are the sole property of the respective owners.
* The above notice must be included on all copies of this document.
* ============================================================================
*/
package org.osgi.impl.service.dmt;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.security.*;
import java.util.*;
import org.osgi.service.dmt.*;
import org.osgi.service.dmt.security.*;
import org.osgi.service.dmt.spi.*;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventAdmin;
import org.osgi.service.log.LogService;
import org.osgi.service.permissionadmin.PermissionInfo;
// OPTIMIZE node handling (e.g. retrieve plugin from dispatcher only once per
// API call)
// OPTIMIZE only retrieve meta-data once per API call
// OPTIMIZE only call commit/rollback for plugins that were actually modified
// since the last transaction boundary
public class DmtSessionImpl implements DmtSession {
private static final int SHOULD_NOT_EXIST = 0;
private static final int SHOULD_EXIST = 1;
private static final int SHOULD_BE_LEAF = 2; // implies
// SHOULD_EXIST
private static final int SHOULD_BE_INTERIOR = 3; // implies
// SHOULD_EXIST
private static final Class[] PERMISSION_CONSTRUCTOR_SIG = new Class[] {
String.class, String.class };
private static Hashtable acls;
// Stores the ACL table at the start of each transaction in an atomic
// session. Can be static because atomic session cannot run in parallel.
private static Hashtable savedAcls;
static {
init_acls();
}
private final AccessControlContext securityContext;
private final DmtAdminImpl dmtAdmin;
private final Context context;
private final String principal;
private final Node subtreeNode;
private final int lockMode;
private final int sessionId;
private EventList eventList;
private Vector dataPlugins;
private int state;
// Session creation is done in two phases:
// - DmtAdmin creates a new DmtSessionImpl instance (this should indicate
// as many errors as possible, but must not call any plugins)
// - when all conflicting sessions have been closed, DmtAdmin calls "open()"
// to actually open the session for external use
DmtSessionImpl(String principal, String subtreeUri, int lockMode,
PermissionInfo[] permissions, Context context, DmtAdminImpl dmtAdmin)
throws DmtException {
Node node = Node.validateAndNormalizeUri(subtreeUri);
subtreeNode = node.isAbsolute() ? node : Node.ROOT_NODE
.appendRelativeNode(node);
this.principal = principal;
this.lockMode = lockMode;
this.dmtAdmin = dmtAdmin;
this.context = context;
if (principal != null) { // remote session
SecurityManager sm = System.getSecurityManager();
if (sm != null)
sm.checkPermission(new DmtPrincipalPermission(principal));
try {
securityContext = getSecurityContext(permissions);
}
catch (Exception e) {
throw new DmtException(
subtreeNode.getUri(),
DmtException.COMMAND_FAILED,
"Unable to create Protection Domain for remote server.",
e);
}
}
else
securityContext = null;
if (lockMode == LOCK_TYPE_ATOMIC)
eventList = new EventList();
sessionId = (new Long(System.currentTimeMillis())).hashCode()
^ hashCode();
dataPlugins = new Vector();
state = STATE_CLOSED;
}
// called directly before returning the session object in getSession()
// throws NODE_NOT_FOUND if the previously specified root does not exist
void open() throws DmtException {
if (lockMode == LOCK_TYPE_ATOMIC)
// shallow copy is enough, Nodes and Acls are immutable
savedAcls = (Hashtable) acls.clone();
state = STATE_OPEN;
// after everything is initialized, check with the plugins whether the
// given node really exists
checkNode(subtreeNode, SHOULD_EXIST);
}
// called by Dmt Admin when checking session conflicts
Node getRootNode() {
return subtreeNode;
}
// called by the Session Wrapper, rollback parameter is:
// - true if a fatal exception has been thrown in a DMT access method
// - false if any exception has been thrown in the commit/rollback methods
protected void invalidateSession(boolean rollback, boolean timeout) {
state = STATE_INVALID;
context.log(LogService.LOG_WARNING, "Invalidating session '"
+ sessionId + "' because of "
+ (timeout ? "timeout." : "error."), null);
if (lockMode == LOCK_TYPE_ATOMIC && rollback) {
try {
rollbackPlugins();
}
catch (DmtException e) {
context.log(LogService.LOG_WARNING, "Error rolling back "
+ "plugin while invalidating session.", e);
}
}
try {
closeAndRelease(false);
}
catch (DmtException e) {
context.log(LogService.LOG_WARNING, "Error closing plugin while "
+ "invalidating session.", e);
}
}
/*
* These methods can be called even before the session has been opened, and
* also after the session has been closed.
*/
public synchronized int getState() {
return state;
}
public String getPrincipal() {
return principal;
}
public int getSessionId() {
return sessionId;
}
public String getRootUri() {
return subtreeNode.getUri();
}
public int getLockType() {
return lockMode;
}
public String mangle(String nodeName) {
return dmtAdmin.mangle(nodeName);
}
/* These methods are only meaningful in the context of an open session. */
// no other API methods can be called while this method is executed
public synchronized void close() throws DmtException {
checkSession();
// changed to CLOSED if this method finishes without error
state = STATE_INVALID;
closeAndRelease(lockMode == LOCK_TYPE_ATOMIC);
state = STATE_CLOSED;
}
private void closeAndRelease(boolean commit) throws DmtException {
try {
if (commit)
commitPlugins();
closePlugins();
}
finally {
// DmtAdmin must be notified that this session has ended, otherwise
// other sessions might never be allowed to run
dmtAdmin.releaseSession(this);
}
}
private void closePlugins() throws DmtException {
Vector closeExceptions = new Vector();
// this block requires synchronization
ListIterator i = dataPlugins.listIterator(dataPlugins.size());
while (i.hasPrevious()) {
try {
((PluginSessionWrapper) i.previous()).close();
}
catch (Exception e) {
closeExceptions.add(e);
}
}
dataPlugins.clear();
if (closeExceptions.size() != 0)
throw new DmtException((String) null, DmtException.COMMAND_FAILED,
"Some plugins failed to close.", closeExceptions, false);
}
// no other API methods can be called while this method is executed
public synchronized void commit() throws DmtException {
checkSession();
if (lockMode != LOCK_TYPE_ATOMIC)
throw new IllegalStateException("Commit can only be requested "
+ "for atomic sessions.");
// changed back to OPEN if this method finishes without error
state = STATE_INVALID;
commitPlugins();
savedAcls = (Hashtable) acls.clone();
state = STATE_OPEN;
}
// precondition: lockMode == LOCK_TYPE_ATOMIC
private void commitPlugins() throws DmtException {
Vector commitExceptions = new Vector();
ListIterator i = dataPlugins.listIterator(dataPlugins.size());
// this block requires synchronization
while (i.hasPrevious()) {
PluginSessionWrapper wrappedPlugin = (PluginSessionWrapper) i
.previous();
try {
// checks transaction support before calling commit on the
// plugin
wrappedPlugin.commit();
}
catch (Exception e) {
purgeEvents(wrappedPlugin.getSessionRoot());
commitExceptions.add(e);
}
}
sendEvent(EventList.ADD);
sendEvent(EventList.DELETE);
sendEvent(EventList.REPLACE);
sendEvent(EventList.RENAME);
sendEvent(EventList.COPY);
eventList.clear();
if (commitExceptions.size() != 0)
throw new DmtException((String) null,
DmtException.TRANSACTION_ERROR,
"Some plugins failed to commit.", commitExceptions, false);
}
// no other API methods can be called while this method is executed
public synchronized void rollback() throws DmtException {
checkSession();
if (lockMode != LOCK_TYPE_ATOMIC)
throw new IllegalStateException("Rollback can only be requested "
+ "for atomic sessions.");
// changed back to OPEN if this method finishes without error
state = STATE_INVALID;
acls = (Hashtable) savedAcls.clone();
rollbackPlugins();
state = STATE_OPEN;
}
private void rollbackPlugins() throws DmtException {
eventList.clear();
Vector rollbackExceptions = new Vector();
// this block requires synchronization
ListIterator i = dataPlugins.listIterator(dataPlugins.size());
while (i.hasPrevious()) {
try {
// checks transaction support before calling rollback on the
// plugin
((PluginSessionWrapper) i.previous()).rollback();
}
catch (Exception e) {
rollbackExceptions.add(e);
}
}
if (rollbackExceptions.size() != 0)
throw new DmtException((String) null, DmtException.ROLLBACK_FAILED,
"Some plugins failed to roll back or close.",
rollbackExceptions, false);
}
public synchronized void execute(String nodeUri, String data)
throws DmtException {
internalExecute(nodeUri, null, data);
}
public synchronized void execute(String nodeUri, String correlator,
String data) throws DmtException {
internalExecute(nodeUri, correlator, data);
}
// same as execute/3 but can be called internally, because it is not wrapped
private void internalExecute(String nodeUri, final String correlator,
final String data) throws DmtException {
checkSession();
// not allowing to execute non-existent nodes, all Management Objects
// defined in the spec have data plugins backing them
final Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.EXEC, MetaNode.CMD_EXECUTE);
final ExecPlugin plugin = context.getPluginDispatcher().getExecPlugin(
node);
final DmtSession session = this;
if (plugin == null)
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"No exec plugin registered for given node.");
try {
AccessController.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws DmtException {
plugin.execute(session, node.getPath(), correlator, data);
return null;
}
}, securityContext);
}
catch (PrivilegedActionException e) {
throw (DmtException) e.getException();
}
}
// requires DmtPermission with GET action, no ACL check done because there
// are no ACLs stored for non-existing nodes (in theory)
public synchronized boolean isNodeUri(String nodeUri) {
checkSession();
try {
Node node = makeAbsoluteUri(nodeUri);
checkLocalPermission(node, writeAclCommands(Acl.GET));
checkNode(node, SHOULD_EXIST);
// not checking meta-data for the GET capability, the plugin must be
// prepared to answer isNodeUri() even if the node is not "gettable"
}
catch (DmtException e) {
return false; // invalid node URI or error opening plugin
}
return true;
}
public synchronized boolean isLeafNode(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return isLeafNodeNoCheck(node);
}
// GET property op
public synchronized Acl getNodeAcl(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
Acl acl = (Acl) acls.get(node);
return acl == null ? null : acl;
}
// GET property op
public synchronized Acl getEffectiveNodeAcl(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getEffectiveNodeAclNoCheck(node);
}
// REPLACE property op
public synchronized void setNodeAcl(String nodeUri, Acl acl)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
// check for REPLACE permission:
if (isLeafNodeNoCheck(node)) // on the parent node for leaf nodes
checkNodePermission(node.getParent(), Acl.REPLACE);
else
// on the node itself or the parent for interior nodes (parent will
// be ignored in case of the root node)
checkNodeOrParentPermission(node, Acl.REPLACE);
// Not checking REPLACE capability, node does not have to be modifiable
// to have an ACL associated with it. It should be possible to set
// ACLs everywhere, and the "Replace" Access Type seems to be given
// only for modifiable nodes.
// check that the new ACL is valid
if (node.isRoot() && (acl == null || !acl.isPermitted("*", Acl.ADD)))
// should be 405 "Forbidden" according to DMTND 7.7.1.2
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED, "Root ACL must allow "
+ "the Add operation for all principals.");
if (acl == null || isEmptyAcl(acl))
acls.remove(node);
else
acls.put(node, acl);
getReadableDataSession(node).nodeChanged(node.getPath());
enqueueEvent(EventList.REPLACE, node);
}
public synchronized MetaNode getMetaNode(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
checkNodePermission(node, Acl.GET);
// not checking meta-data for the GET capability, meta-data should
// always be publicly available
return getMetaNodeNoCheck(node);
}
public synchronized DmtData getNodeValue(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
return internalGetNodeValue(node);
}
// also used by copy() to pass an already validated Node instead of a URI
private DmtData internalGetNodeValue(Node node) throws DmtException {
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
DmtData data = getReadableDataSession(node)
.getNodeValue(node.getPath());
checkNode(
node,
data.getFormat() == DmtData.FORMAT_NODE ? SHOULD_BE_INTERIOR
: SHOULD_BE_LEAF);
return data;
}
public synchronized String[] getChildNodeNames(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
return internalGetChildNodeNames(node);
}
// also used by copy() to pass an already validated Node instead of a URI
private String[] internalGetChildNodeNames(Node node) throws DmtException {
checkNode(node, SHOULD_BE_INTERIOR);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
String[] pluginChildNodes = getReadableDataSession(node)
.getChildNodeNames(node.getPath());
List processedChildNodes = normalizeChildNodeNames(pluginChildNodes);
String[] processedChildNodeArray = (String[]) processedChildNodes
.toArray(new String[processedChildNodes.size()]);
// ordering is not a requirement, but allows easier testing of plugins
Arrays.sort(processedChildNodeArray);
return processedChildNodeArray;
}
// GET property op
public synchronized String getNodeTitle(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
return internalGetNodeTitle(node);
}
// also used by copy() to pass an already validated Node instead of a URI
private String internalGetNodeTitle(Node node) throws DmtException {
checkNode(node, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeTitle(node.getPath());
}
// GET property op
public synchronized int getNodeVersion(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeVersion(node.getPath());
}
// GET property op
public synchronized Date getNodeTimestamp(String nodeUri)
throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeTimestamp(node.getPath());
}
// GET property op
public synchronized int getNodeSize(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_BE_LEAF);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeSize(node.getPath());
}
// GET property op
public synchronized String getNodeType(String nodeUri) throws DmtException {
checkSession();
Node node = makeAbsoluteUri(nodeUri);
return internalGetNodeType(node);
}
// also used by copy() to pass an already validated Node instead of a URI
private String internalGetNodeType(Node node) throws DmtException {
checkNode(node, SHOULD_EXIST);
checkOperation(node, Acl.GET, MetaNode.CMD_GET);
return getReadableDataSession(node).getNodeType(node.getPath());
}
// REPLACE property op
public synchronized void setNodeTitle(String nodeUri, String title)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
internalSetNodeTitle(node, title, true); // send event if successful
}
// also used by copy() to pass an already validated Node instead of a URI
// and to set the node title without triggering an event
private void internalSetNodeTitle(Node node, String title, boolean sendEvent)
throws DmtException {
checkNode(node, SHOULD_EXIST);
checkOperation(node, Acl.REPLACE, MetaNode.CMD_REPLACE);
try {
if (title != null && title.getBytes("UTF-8").length > 255)
throw new DmtException(node.getUri(),
DmtException.COMMAND_FAILED,
"Length of Title property exceeds 255 bytes (UTF-8).");
}
catch (UnsupportedEncodingException e) {
// never happens
}
getReadWriteDataSession(node).setNodeTitle(node.getPath(), title);
if (sendEvent)
enqueueEvent(EventList.REPLACE, node);
}
public synchronized void setNodeValue(String nodeUri, DmtData data)
throws DmtException {
commonSetNodeValue(nodeUri, data);
}
public synchronized void setDefaultNodeValue(String nodeUri)
throws DmtException {
commonSetNodeValue(nodeUri, null);
}
private void commonSetNodeValue(String nodeUri, DmtData data)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(
nodeUri,
data.getFormat() == DmtData.FORMAT_NODE ? SHOULD_BE_INTERIOR
: SHOULD_BE_LEAF);
checkOperation(node, Acl.REPLACE, MetaNode.CMD_REPLACE);
checkValue(node, data);
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode != null && metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot set the value of a permanent node.");
getReadWriteDataSession(node).setNodeValue(node.getPath(), data);
if (isLeafNode(nodeUri))
enqueueEvent(EventList.REPLACE, node);
else {
traverseEvents(EventList.REPLACE, nodeUri);
}
}
private void traverseEvents(int mode, String nodeUri) throws DmtException {
String children[] = getChildNodeNames(nodeUri);
Arrays.sort(children);
for (int i = 0; i < children.length; i++) {
String uri = nodeUri + "/" + children[i];
if (isLeafNode(uri)) {
Node node = makeAbsoluteUri(uri);
enqueueEvent(EventList.REPLACE, node);
} else {
traverseEvents(mode,uri);
}
}
}
// SyncML DMTND 7.5 (p16) Type: only the Get command is applicable!
public synchronized void setNodeType(String nodeUri, String type)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.REPLACE, MetaNode.CMD_REPLACE);
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode != null && metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot set type property of permanent node.");
if (isLeafNodeNoCheck(node))
checkMimeType(node, type);
// could check type string for interior nodes, but this impl. does not
// handle it anyway, so we leave it to the plugins if they need it
// (same in createInteriorNode/2)
getReadWriteDataSession(node).setNodeType(node.getPath(), type);
enqueueEvent(EventList.REPLACE, node);
}
public synchronized void deleteNode(String nodeUri) throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
if (node.isRoot())
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED,
"Cannot delete root node.");
checkOperation(node, Acl.DELETE, MetaNode.CMD_DELETE);
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode != null) {
if (metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot delete permanent node.");
if (!metaNode.isZeroOccurrenceAllowed()) {
// maxOccurrence == 1 means that there cannot be other instances
// of this node, so it cannot be deleted. If maxOccurrence > 1
// then we have to check whether this is the last one.
if (metaNode.getMaxOccurrence() == 1)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Metadata does not allow deleting the only "
+ "instance of this node.");
checkNodeIsInSession(node.getParent(), "(needed to determine"
+ "the number of siblings of the given node) ");
if (getNodeCardinality(node) == 1)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Metadata does not allow deleting the last "
+ "instance of this node.");
}
}
getReadWriteDataSession(node).deleteNode(node.getPath());
moveAclEntries(node, null);
enqueueEvent(EventList.DELETE, node);
}
public synchronized void createInteriorNode(String nodeUri)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateInteriorNode(node, null, true, false);
}
public synchronized void createInteriorNode(String nodeUri, String type)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateInteriorNode(node, type, true, false);
}
// - used by the other createInteriorNode variants
// - also used by copy() to pass an already validated Node instead of a URI
// and to create interior nodes without triggering an event
// - also used by ensureInteriorAncestors, to create missing nodes while
// skipping automatically created nodes
private void commonCreateInteriorNode(Node node, String type,
boolean sendEvent, boolean skipAutomatic) throws DmtException {
checkNode(node, SHOULD_NOT_EXIST);
Node parent = node.getParent();
if (parent == null) // this should never happen, root must always exist
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"Cannot create root node.");
// Return silently if all of the following conditions are met:
// - the parent node has been created while ensuring that the ancestor
// nodes all exist
// - this call is part of creating the ancestors for some sub-node (as
// indicated by 'skipAutomatic')
// - this current node was created automatically, triggered by the
// creation of the parent (i.e. it has AUTOMATIC scope)
if (ensureInteriorAncestors(parent, sendEvent) && skipAutomatic
&& getReadableDataSession(node).isNodeUri(node.getPath()))
return;
checkNodePermission(parent, Acl.ADD);
checkNodeCapability(node, MetaNode.CMD_ADD);
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode != null && metaNode.isLeaf())
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot create the specified interior node, "
+ "meta-data defines it as a leaf node.");
// could check type string, but this impl. does not handle it anyway
// so we leave it to the plugins if they need it (same in setNodeType)
checkNewNode(node);
checkMaxOccurrence(node);
// it is not really useful to allow creating automatic nodes, but this
// is not a hard requirement, and should be enforced by the (lack of
// the) ADD access type instead
getReadWriteDataSession(node).createInteriorNode(node.getPath(), type);
assignNewNodePermissions(node, parent);
if (sendEvent)
enqueueEvent(EventList.ADD, node);
}
public synchronized void createLeafNode(String nodeUri) throws DmtException {
// not calling createLeafNode/3, because it is wrapped
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateLeafNode(node, null, null, true);
}
public synchronized void createLeafNode(String nodeUri, DmtData value)
throws DmtException {
// not calling createLeafNode/3, because it is wrapped
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateLeafNode(node, value, null, true);
}
public synchronized void createLeafNode(String nodeUri, DmtData value,
String mimeType) throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUri(nodeUri);
commonCreateLeafNode(node, value, mimeType, true);
}
// - used by the other createLeafNode variants
// - also used by copy() to pass an already validated Node instead of a URI
// and to create leaf nodes without triggering an event
private void commonCreateLeafNode(Node node, DmtData value,
String mimeType, boolean sendEvent) throws DmtException {
checkNode(node, SHOULD_NOT_EXIST);
Node parent = node.getParent();
if (parent == null) // this should never happen, root must always exist
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"Cannot create root node.");
ensureInteriorAncestors(parent, sendEvent);
checkNodePermission(parent, Acl.ADD);
checkNodeCapability(node, MetaNode.CMD_ADD);
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode != null && !metaNode.isLeaf())
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot create the specified leaf node, meta-data "
+ "defines it as an interior node.");
checkNewNode(node);
checkValue(node, value);
checkMimeType(node, mimeType);
checkMaxOccurrence(node);
// it is not really useful to allow creating automatic nodes, but this
// is not a hard requirement, and should be enforced by the (lack of
// the) ADD access type instead
getReadWriteDataSession(node).createLeafNode(
node.getPath(),
value,
mimeType);
if (sendEvent)
enqueueEvent(EventList.ADD, node);
}
// Tree may be left in an inconsistent state if there is an error when only
// part of the tree has been copied.
public synchronized void copy(String nodeUri, String newNodeUri,
boolean recursive) throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
Node newNode = makeAbsoluteUriAndCheck(newNodeUri, SHOULD_NOT_EXIST);
if (node.isAncestorOf(newNode))
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED,
"Cannot copy node to its descendant, '" + newNode + "'.");
if (context.getPluginDispatcher()
.handledBySameDataPlugin(node, newNode)) {
Node newParentNode = newNode.getParent();
// newParentNode cannot be null, because newNode is a valid absolute
// nonexisting node, so it cannot be the root
ensureInteriorAncestors(newParentNode, false);
// DMTND 7.7.1.5: "needs correct access rights for the equivalent
// Add, Delete, Get, and Replace commands"
copyPermissionCheck(node, newParentNode, newNode, recursive);
checkNodeCapability(node, MetaNode.CMD_GET);
checkNodeCapability(newNode, MetaNode.CMD_ADD);
checkNewNode(newNode);
checkMaxOccurrence(newNode);
// for leaf nodes: since we are not passing a data object to the
// plugin, checking the value and mime-type against the new
// meta-data is the responsibility of the plugin itself
try {
getReadWriteDataSession(newNode).copy(
node.getPath(),
newNode.getPath(),
recursive);
assignNewNodePermissions(newNode, newParentNode);
}
catch (DmtException e) {
// fall back to generic algorithm if plugin doesn't support copy
if (e.getCode() != DmtException.FEATURE_NOT_SUPPORTED)
throw e;
// the above checks will be performed again, but we cannot even
// attempt to call the plugin without them
copyNoCheck(node, newNode, recursive);
}
}
else
copyNoCheck(node, newNode, recursive); // does not trigger events
enqueueEvent(EventList.COPY, node, newNode);
}
public synchronized void renameNode(String nodeUri, String newNodeName)
throws DmtException {
checkWriteSession();
Node node = makeAbsoluteUriAndCheck(nodeUri, SHOULD_EXIST);
checkOperation(node, Acl.REPLACE, MetaNode.CMD_REPLACE);
Node parent = node.getParent();
if (parent == null)
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED,
"Cannot rename root node.");
String newName = Node.validateAndNormalizeNodeName(newNodeName);
Node newNode = parent.appendSegment(newName);
checkNode(newNode, SHOULD_NOT_EXIST);
checkNewNode(newNode);
MetaNode metaNode = getMetaNodeNoCheck(node);
MetaNode newMetaNode = getMetaNodeNoCheck(newNode);
if (metaNode != null) {
if (metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot rename permanent node.");
int maxOcc = metaNode.getMaxOccurrence();
// sanity check: all siblings of a node must either have a
// cardinality of 1, or they must be part of the same multi-node
if (newMetaNode != null && maxOcc != newMetaNode.getMaxOccurrence())
throw new DmtException(
node.getUri(),
DmtException.COMMAND_FAILED,
"Cannot rename node, illegal meta-data found (a "
+ "member of a multi-node has a sibling with different "
+ "meta-data).");
// if this is a multi-node (maxOcc > 1), renaming does not affect
// the cardinality
if (maxOcc == 1 && !metaNode.isZeroOccurrenceAllowed())
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Metadata does not allow deleting last instance of "
+ "this node.");
}
// the new node must be the same (leaf/interior) as the original
if (newMetaNode != null
&& newMetaNode.isLeaf() != isLeafNodeNoCheck(node))
throw new DmtException(
newNode.getUri(),
DmtException.METADATA_MISMATCH,
"The destination of the rename operation is "
+ (newMetaNode.isLeaf() ? "a leaf" : "an interior")
+ " node according to the meta-data, which does not match "
+ "the source node.");
// for leaf nodes: since we are not passing a data object to the
// plugin, checking the value and mime-type against the new
// meta-data is the responsibility of the plugin itself
getReadWriteDataSession(node).renameNode(node.getPath(), newName);
moveAclEntries(node, newNode);
enqueueEvent(EventList.RENAME, node, newNode);
}
/**
* Create an Access Control Context based on the given permissions. The
* Permission objects are first created from the PermissionInfo objects,
* then added to a permission collection, which is added to a protection
* domain with no code source, which is used to create the access control
* context. If the <code>null</code> argument is given, an empty access
* control context is created.
*
* @throws Exception if there is an error creating one of the permission
* objects (can be one of ClassNotFoundException, SecurityException,
* NoSuchMethodException, ClassCastException,
* IllegalArgumentException, InstantiationException,
* IllegalAccessException or InvocationTargetException)
*/
private AccessControlContext getSecurityContext(PermissionInfo[] permissions)
throws Exception {
PermissionCollection permissionCollection = new Permissions();
if (permissions != null)
for (int i = 0; i < permissions.length; i++) {
PermissionInfo info = permissions[i];
Class permissionClass = Class.forName(info.getType());
Constructor constructor = permissionClass
.getConstructor(PERMISSION_CONSTRUCTOR_SIG);
Permission permission = (Permission) constructor
.newInstance(new Object[] {info.getName(),
info.getActions()});
permissionCollection.add(permission);
}
return new AccessControlContext(
new ProtectionDomain[] {new ProtectionDomain(null,
permissionCollection)});
}
private void checkSession() {
if (state != STATE_OPEN)
throw new IllegalStateException(
"Session is not open, cannot perform DMT operations.");
}
private void checkWriteSession() {
checkSession();
if (lockMode == LOCK_TYPE_SHARED)
throw new IllegalStateException(
"Session is not open for writing, cannot perform "
+ "requested write operation.");
}
private void purgeEvents(Node root) {
eventList.excludeRoot(root);
}
private void sendEvent(int type) {
Node[] nodes = eventList.getNodes(type);
Node[] newNodes = eventList.getNewNodes(type);
if (nodes.length != 0)
sendEvent(type, nodes, newNodes);
}
private void enqueueEvent(int type, Node node) {
if (lockMode == LOCK_TYPE_ATOMIC)
eventList.add(type, node);
else
sendEvent(type, new Node[] {node}, null);
}
private void enqueueEvent(int type, Node node, Node newNode) {
if (lockMode == LOCK_TYPE_ATOMIC)
eventList.add(type, node, newNode);
else
sendEvent(type, new Node[] {node}, new Node[] {newNode});
}
private void sendEvent(int type, Node[] nodes, Node[] newNodes) {
String topic = EventList.getTopic(type);
Hashtable properties = new Hashtable();
properties.put("session.id", new Integer(sessionId));
properties.put("nodes", Node.getUriArray(nodes));
if (newNodes != null)
properties.put("newnodes", Node.getUriArray(newNodes));
final Event event = new Event(topic, properties);
// it's an error if Event Admin is missing, but it could also be ignored
final EventAdmin eventChannel = (EventAdmin) context.getTracker(
EventAdmin.class).getService();
if (eventChannel == null)
throw new MissingResourceException("Event Admin not found.",
EventAdmin.class.getName(), null);
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
eventChannel.postEvent(event);
return null;
}
});
}
private boolean isLeafNodeNoCheck(Node node) throws DmtException {
return getReadableDataSession(node).isLeafNode(node.getPath());
}
private MetaNode getMetaNodeNoCheck(Node node) throws DmtException {
return getReadableDataSession(node).getMetaNode(node.getPath());
}
// precondition: 'uri' must point be valid (checked with isNodeUri or
// returned by getChildNodeNames)
private void copyNoCheck(Node node, Node newNode, boolean recursive)
throws DmtException {
boolean isLeaf = isLeafNodeNoCheck(node);
String type = internalGetNodeType(node);
// create new node (without sending a separate event about it)
if (isLeaf)
// if getNodeValue() returns null, we attempt to set the default
commonCreateLeafNode(
newNode,
internalGetNodeValue(node),
type,
false);
else
commonCreateInteriorNode(newNode, type, false, false);
// copy Title property (without sending event) if it is supported by
// both source and target plugins
try {
String title = internalGetNodeTitle(node);
// It could be valid to copy "null" Titles as well, if the
// implementation has default values for the Title property.
if (title != null)
internalSetNodeTitle(newNode, title, false);
}
catch (DmtException e) {
if (e.getCode() != DmtException.FEATURE_NOT_SUPPORTED)
throw new DmtException(node.getUri(),
DmtException.COMMAND_FAILED, "Error copying node to '"
+ newNode + "', cannot copy title.", e);
}
// Format, Name, Size, TStamp and VerNo properties do not need to be
// expicitly copied
// copy children if mode is recursive and node is interior
if (recursive && !isLeaf) {
// 'children' is [] if there are no child nodes
String[] children = internalGetChildNodeNames(node);
for (int i = 0; i < children.length; i++)
copyNoCheck(node.appendSegment(children[i]), newNode
.appendSegment(children[i]), true);
}
}
// precondition: path must be absolute, and the parent of the given node
// must be within the subtree of the session
private int getNodeCardinality(Node node) throws DmtException {
Node parent = node.getParent();
String[] neighbours = getReadableDataSession(parent).getChildNodeNames(
parent.getPath());
return normalizeChildNodeNames(neighbours).size();
}
private void assignNewNodePermissions(Node node, Node parent)
throws DmtException {
// DMTND 7.7.1.3: if parent does not have Replace permissions, give Add,
// Delete and Replace permissions to child. (This rule cannot be
// applied to Java permissions, only to ACLs.)
if (principal != null) {
try {
checkNodePermission(parent, Acl.REPLACE);
}
catch (DmtException e) {
if (e.getCode() != DmtException.PERMISSION_DENIED)
throw e; // should not happen
Acl parentAcl = getEffectiveNodeAclNoCheck(parent);
Acl newAcl = parentAcl.addPermission(principal, Acl.ADD
| Acl.DELETE | Acl.REPLACE);
acls.put(node, newAcl);
}
}
}
private void checkOperation(Node node, int actions, int capability)
throws DmtException {
checkNodePermission(node, actions);
checkNodeCapability(node, capability);
}
// throws SecurityException if principal is local user, and sufficient
// privileges are missing
private void checkNodePermission(Node node, int actions)
throws DmtException {
checkNodeOrParentPermission(principal, node, actions, false);
}
// throws SecurityException if principal is local user, and sufficient
// privileges are missing
private void checkNodeOrParentPermission(Node node, int actions)
throws DmtException {
checkNodeOrParentPermission(principal, node, actions, true);
}
// Performs the necessary permission checks for a copy operation:
// - checks that the caller has GET rights (ACL or Java permission) for all
// source nodes
// - in case of local sessions, checks that the caller has REPLACE Java
// permissions on all nodes where a title needs to be set, and ADD Java
// permissions for the parents of all added nodes
// - in case of remote sessions, only the ACL of the parent of the target
// node needs to be checked, because ACLs cannot be set for nonexitent
// nodes; in this case the ADD ACL is always required, while REPLACE is
// checked only if any of the copied nodes has a non-null Title string
//
// Precondition: 'node' must point be valid (checked with isNodeUri or
// returned by getChildNodeNames)
private void copyPermissionCheck(Node node, Node newParentNode,
Node newNode, boolean recursive) throws DmtException {
boolean hasTitle = copyPermissionCheckRecursive(
node,
newParentNode,
newNode,
recursive);
// ACL not copied, so the parent of the target node only needs
// REPLACE permission if the copied node (or any node in the copied
// subtree) has a title
// remote access permissions for the target only need to be checked once
if (principal != null) {
checkNodePermission(newParentNode, Acl.ADD);
if (hasTitle)
checkNodePermission(newNode, Acl.REPLACE);
}
}
private boolean copyPermissionCheckRecursive(Node node, Node newParentNode,
Node newNode, boolean recursive) throws DmtException {
// check that the caller has GET rights for the current node
checkNodePermission(node, Acl.GET);
// check whether the node has a non-null title
boolean hasTitle = nodeHasTitle(node);
// local access permissions need to be checked for each target node
if (principal == null) {
checkLocalPermission(newParentNode, writeAclCommands(Acl.ADD));
if (hasTitle)
checkLocalPermission(newNode, writeAclCommands(Acl.REPLACE));
}
// perform the checks recursively for the subtree if requested
if (recursive && !isLeafNodeNoCheck(node)) {
// 'children' is [] if there are no child nodes
String[] children = internalGetChildNodeNames(node);
for (int i = 0; i < children.length; i++)
if (copyPermissionCheckRecursive(node
.appendSegment(children[i]), newNode, newNode
.appendSegment(children[i]), true))
hasTitle = true;
}
return hasTitle;
}
// Returns true if the plugin handling the given node supports the Title
// property and value of the property is non-null. This is used for
// determining whether the caller needs to have REPLACE rights for the
// target node of the enclosing copy operation.
private boolean nodeHasTitle(Node node) throws DmtException {
try {
return internalGetNodeTitle(node) != null;
}
catch (DmtException e) {
// FEATURE_NOT_SUPPORTED means that Title is not supported
if (e.getCode() != DmtException.FEATURE_NOT_SUPPORTED)
throw e;
}
return false;
}
private Node makeAbsoluteUriAndCheck(String nodeUri, int check)
throws DmtException {
Node node = makeAbsoluteUri(nodeUri);
checkNode(node, check);
return node;
}
// returns a plugin for read-only use
private ReadableDataSession getReadableDataSession(Node node)
throws DmtException {
return getPluginSession(node, false);
}
// returns a plugin for writing
private ReadWriteDataSession getReadWriteDataSession(Node node)
throws DmtException {
return getPluginSession(node, true);
}
// precondition: if 'writable' is true, session lock type must not be shared
// 'synchronized' is just indication, all entry points are synch'd anyway
private synchronized PluginSessionWrapper getPluginSession(Node node,
boolean writeOperation) throws DmtException {
PluginSessionWrapper wrappedPlugin = null;
Node wrappedPluginRoot = null;
// Look through the open plugin sessions, and find the session with the
// lowest root that handles the given node.
Iterator i = dataPlugins.iterator();
while (i.hasNext()) {
PluginSessionWrapper plugin = (PluginSessionWrapper) i.next();
Node pluginRoot = plugin.getSessionRoot();
if (pluginRoot.isAncestorOf(node)
&& (wrappedPluginRoot == null || wrappedPluginRoot
.isAncestorOf(pluginRoot))) {
wrappedPlugin = plugin;
wrappedPluginRoot = pluginRoot;
}
}
// Find the plugin that would/will handle the given node, and the root
// of the (potential) session opened on it.
PluginRegistration pluginRegistration = context.getPluginDispatcher()
.getDataPlugin(node);
Node root = getRootForPlugin(pluginRegistration, node);
// If we found a plugin session handling the node, and the potential
// new plugin session root (defined by 'root') is not in its subtree,
// then use the open session. If there is no session yet, or if a new
// session could be opened with a deeper root, then a new session is
// opened. (This guarantees that the proper plugin is used instead of
// the root plugin for nodes below the "root tree".)
if (wrappedPlugin != null
&& !wrappedPluginRoot.isAncestorOf(root, true)) {
if (writeOperation
&& wrappedPlugin.getSessionType() == LOCK_TYPE_SHARED)
throw getWriteException(lockMode, node);
return wrappedPlugin;
}
// No previously opened session found, attempting to open session with
// correct lock type.
DataPlugin plugin = pluginRegistration.getDataPlugin();
ReadableDataSession pluginSession = null;
int pluginSessionType = lockMode;
if (lockMode != LOCK_TYPE_SHARED) {
pluginSession = openPluginSession(plugin, root, pluginSessionType);
if (pluginSession == null && writeOperation)
throw getWriteException(lockMode, node);
}
// read-only session if lockMode is LOCK_TYPE_SHARED, or if the
// plugin did not support the writing lock mode, and the current
// operation is for reading
if (pluginSession == null) {
pluginSessionType = LOCK_TYPE_SHARED;
pluginSession = openPluginSession(plugin, root, pluginSessionType);
}
wrappedPlugin = new PluginSessionWrapper(pluginRegistration,
pluginSession, pluginSessionType, root, securityContext);
// this requires synchronized access
dataPlugins.add(wrappedPlugin);
return wrappedPlugin;
}
private Node getRootForPlugin(PluginRegistration plugin, Node node) {
Node[] roots = plugin.getDataRoots();
for (int i = 0; i < roots.length; i++)
if (roots[i].isAncestorOf(node))
return roots[i].isAncestorOf(subtreeNode) ? subtreeNode
: roots[i];
throw new IllegalStateException("Internal error, plugin root not "
+ "found for a URI handled by the plugin.");
}
private ReadableDataSession openPluginSession(final DataPlugin plugin,
Node root, final int pluginSessionType) throws DmtException {
final DmtSession session = this;
final String[] rootPath = root.getPath();
ReadableDataSession pluginSession;
try {
pluginSession = (ReadableDataSession) AccessController
.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws DmtException {
switch (pluginSessionType) {
case LOCK_TYPE_EXCLUSIVE :
return plugin.openReadWriteSession(
rootPath,
session);
case LOCK_TYPE_ATOMIC :
return plugin.openAtomicSession(
rootPath,
session);
default : // LOCK_TYPE_SHARED
return plugin.openReadOnlySession(
rootPath,
session);
}
}
}, securityContext);
}
catch (PrivilegedActionException e) {
throw (DmtException) e.getException();
}
return pluginSession;
}
// precondition: path must be absolute
private void checkNode(Node node, int check) throws DmtException {
boolean shouldExist = (check != SHOULD_NOT_EXIST);
if (getReadableDataSession(node).isNodeUri(node.getPath()) != shouldExist)
throw new DmtException(node.getUri(),
shouldExist ? DmtException.NODE_NOT_FOUND
: DmtException.NODE_ALREADY_EXISTS,
"The specified URI should point to "
+ (shouldExist ? "an existing" : "a non-existent")
+ " node to perform the requested operation.");
boolean shouldBeLeaf = (check == SHOULD_BE_LEAF);
boolean shouldBeInterior = (check == SHOULD_BE_INTERIOR);
if ((shouldBeLeaf || shouldBeInterior)
&& isLeafNodeNoCheck(node) != shouldBeLeaf)
throw new DmtException(node.getUri(),
DmtException.COMMAND_NOT_ALLOWED,
"The specified URI should point to "
+ (shouldBeLeaf ? "a leaf" : "an internal")
+ " node to perform the requested operation.");
}
// precondition: checkNode() must have been called for the given uri
private void checkNodeCapability(Node node, int capability)
throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode != null && !metaNode.can(capability))
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Node meta-data does not allow the "
+ capabilityName(capability)
+ " operation for this node.");
// default for all capabilities is 'true', if no meta-data is provided
}
private void checkValue(Node node, DmtData data) throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode == null)
return;
// if default data was requested, only check that there is a default
if (data == null) {
if (metaNode.getDefault() == null)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"This node has no default value in the meta-data.");
return;
}
if (!metaNode.isValidValue(data))
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"The specified node value is not valid according to "
+ "the meta-data.");
// not checking value meta-data constraints individually, but leaving
// this to the isValidValue method of the meta-node
/*
* if((metaNode.getFormat() & data.getFormat()) == 0) throw new
* DmtException(uri, DmtException.METADATA_MISMATCH, "The format of the
* specified value is not in the list of " + "valid formats given in the
* node meta-data."); if(data.getFormat() == DmtData.FORMAT_INTEGER) {
* if(metaNode.getMax() < data.getInt()) throw new DmtException(uri,
* DmtException.METADATA_MISMATCH, "Attempting to set too large integer,
* meta-data " + "specifies the maximum value of " + metaNode.getMax());
* if(metaNode.getMin() > data.getInt()) throw new DmtException(uri,
* DmtException.METADATA_MISMATCH, "Attempting to set too small integer,
* meta-data " + "specifies the minimum value of " + metaNode.getMin()); }
*
* DmtData[] validValues = metaNode.getValidValues(); if(validValues !=
* null && !Arrays.asList(validValues).contains(data)) throw new
* DmtException(uri, DmtException.METADATA_MISMATCH, "Specified value is
* not in the list of valid values " + "given in the node meta-data.");
*/
}
private void checkNewNode(Node node) throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode == null)
return;
if (metaNode.getScope() == MetaNode.PERMANENT)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot create permanent node.");
if (!metaNode.isValidName(node.getLastSegment()))
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"The specified node name is not valid according to "
+ "the meta-data.");
// not checking valid name list from meta-data, but leaving this to the
// isValidName method of the meta-node
/*
* String[] validNames = metaNode.getValidNames(); if(validNames != null &&
* !Arrays.asList(validNames).contains(name)) throw new
* DmtException(uri, DmtException.METADATA_MISMATCH, "The specified node
* name is not in the list of valid " + "names specified in the node
* meta-data.");
*/
}
private void checkMimeType(Node node, String type) throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode == null)
return;
if (type == null) // default MIME type was requested
return;
int sep = type.indexOf('/');
if (sep == -1 || sep == 0 || sep == type.length() - 1)
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"The given type string does not contain a MIME type.");
String[] validMimeTypes = metaNode.getMimeTypes();
if (validMimeTypes != null
&& !Arrays.asList(validMimeTypes).contains(type))
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"The specified MIME type is not in the list of valid "
+ "types in the node meta-data.");
}
private void checkMaxOccurrence(Node node) throws DmtException {
MetaNode metaNode = getMetaNodeNoCheck(node);
if (metaNode == null)
return;
// If maxOccurrence == 1 then it is not a multi-node, so it can be
// created if it did not exist before. If maxOccurrence > 1, it can
// only be created if the number of existing nodes does not reach it.
int maxOccurrence = metaNode.getMaxOccurrence();
if (maxOccurrence != Integer.MAX_VALUE && maxOccurrence > 1
&& getNodeCardinality(node) >= maxOccurrence)
throw new DmtException(node.getUri(),
DmtException.METADATA_MISMATCH,
"Cannot create the specified node, meta-data maximizes "
+ "the number of instances of this node to "
+ maxOccurrence + ".");
}
private Node makeAbsoluteUri(String nodeUri) throws DmtException {
Node node = Node.validateAndNormalizeUri(nodeUri);
if (node.isAbsolute()) {
checkNodeIsInSession(node, "");
return node;
}
return subtreeNode.appendRelativeNode(node);
}
private void checkNodeIsInSession(Node node, String uriExplanation)
throws DmtException {
if (!subtreeNode.isAncestorOf(node))
throw new DmtException(node.getUri(), DmtException.COMMAND_FAILED,
"Specified URI " + uriExplanation + "points outside the "
+ "subtree of this session.");
}
private boolean ensureInteriorAncestors(Node node, boolean sendEvent)
throws DmtException {
checkNodeIsInSession(node, "(needed to ensure "
+ "a proper creation point for the new node) ");
if (!getReadableDataSession(node).isNodeUri(node.getPath())) {
commonCreateInteriorNode(node, null, sendEvent, true);
return true;
}
checkNode(node, SHOULD_BE_INTERIOR);
return false;
}
private static DmtException getWriteException(int lockMode, Node node) {
boolean atomic = (lockMode == LOCK_TYPE_ATOMIC);
return new DmtException(node.getUri(),
atomic ? DmtException.TRANSACTION_ERROR
: DmtException.COMMAND_NOT_ALLOWED,
"The plugin handling the requested node does not support "
+ (atomic ? "" : "non-") + "atomic writing.");
}
// remove null entries from the returned array (if it is non-null)
private static List normalizeChildNodeNames(String[] pluginChildNodes) {
List processedChildNodes = new Vector();
if (pluginChildNodes != null)
for (int i = 0; i < pluginChildNodes.length; i++)
if (pluginChildNodes[i] != null)
processedChildNodes.add(pluginChildNodes[i]);
return processedChildNodes;
}
// Move ACL entries from 'node' to 'newNode'.
// If 'newNode' is 'null', the ACL entries are removed (moved to nowhere).
private static void moveAclEntries(Node node, Node newNode) {
synchronized (acls) {
Hashtable newEntries = null;
if (newNode != null)
newEntries = new Hashtable();
Iterator i = acls.entrySet().iterator();
while (i.hasNext()) {
Map.Entry entry = (Map.Entry) i.next();
Node relativeNode = node.getRelativeNode((Node) entry.getKey());
if (relativeNode != null) {
if (newNode != null)
newEntries.put(
newNode.appendRelativeNode(relativeNode),
entry.getValue());
i.remove();
}
}
if (newNode != null)
acls.putAll(newEntries);
}
}
private static Acl getEffectiveNodeAclNoCheck(Node node) {
Acl acl;
synchronized (acls) {
acl = (Acl) acls.get(node);
// must finish whithout NullPointerException, because root ACL must
// not be empty
while (acl == null || isEmptyAcl(acl)) {
node = node.getParent();
acl = (Acl) acls.get(node);
}
}
return acl;
}
// precondition: node parameter must be an absolute node
// throws SecurityException if principal is local user, and sufficient
// privileges are missing
private static void checkNodeOrParentPermission(String name, Node node,
int actions, boolean checkParent) throws DmtException {
if (node.isRoot())
checkParent = false;
Node parent = null;
if (checkParent) // not null, as the uri is absolute but not "."
parent = node.getParent();
if (name != null) {
// succeed if the principal has the required permissions on the
// given uri, OR if the checkParent parameter is true and the
// principal has the required permissions for the parent uri
if (!(hasAclPermission(node, name, actions) || checkParent
&& hasAclPermission(parent, name, actions)))
throw new DmtException(node.getUri(),
DmtException.PERMISSION_DENIED, "Principal '" + name
+ "' does not have the required permissions ("
+ writeAclCommands(actions) + ") on the node "
+ (checkParent ? "or its parent " : "")
+ "to perform this operation.");
}
else { // not doing local permission check if ACL check was done
String actionString = writeAclCommands(actions);
checkLocalPermission(node, actionString);
if (checkParent)
checkLocalPermission(parent, actionString);
}
}
private static boolean hasAclPermission(Node node, String name, int actions) {
return getEffectiveNodeAclNoCheck(node).isPermitted(name, actions);
}
private static void checkLocalPermission(Node node, String actions) {
SecurityManager sm = System.getSecurityManager();
if (sm != null)
sm.checkPermission(new DmtPermission(node.getUri(), actions));
}
private static String capabilityName(int capability) {
switch (capability) {
case MetaNode.CMD_ADD :
return "Add";
case MetaNode.CMD_DELETE :
return "Delete";
case MetaNode.CMD_EXECUTE :
return "Execute";
case MetaNode.CMD_GET :
return "Get";
case MetaNode.CMD_REPLACE :
return "Replace";
}
// never reached
throw new IllegalArgumentException(
"Unknown meta-data capability constant " + capability + ".");
}
// ENHANCE define constants for the action names in the Acl class
private static String writeAclCommands(int actions) {
String commands = null;
commands = writeCommand(commands, actions, Acl.ADD, "Add");
commands = writeCommand(commands, actions, Acl.DELETE, "Delete");
commands = writeCommand(commands, actions, Acl.EXEC, "Exec");
commands = writeCommand(commands, actions, Acl.GET, "Get");
commands = writeCommand(commands, actions, Acl.REPLACE, "Replace");
return (commands != null) ? commands : "";
}
private static String writeCommand(String base, int actions, int action,
String entry) {
if ((actions & action) != 0)
return (base != null) ? base + ',' + entry : entry;
return base;
}
private static boolean isEmptyAcl(Acl acl) {
return acl.getPermissions("*") == 0 && acl.getPrincipals().length == 0;
}
static void init_acls() {
acls = new Hashtable();
acls.put(Node.ROOT_NODE, new Acl("Add=*&Get=*&Replace=*"));
}
public String toString() {
StringBuffer info = new StringBuffer();
info.append("DmtSessionImpl(");
info.append(principal).append(", ");
info.append(subtreeNode).append(", ");
if (lockMode == LOCK_TYPE_ATOMIC)
info.append("atomic");
else if (lockMode == LOCK_TYPE_EXCLUSIVE)
info.append("exclusive");
else
info.append("shared");
info.append(", ");
if (state == STATE_CLOSED)
info.append("closed");
else if (state == STATE_OPEN)
info.append("open");
else
info.append("invalid");
return info.append(')').toString();
}
}
// Sets of node URIs for the different types of changes.
// Only used in atomic transactions.
class EventList {
// two-parameter event types
static final int RENAME = 0;
static final int COPY = 1;
// single-parameter event types
static final int ADD = 2;
static final int DELETE = 3;
static final int REPLACE = 4;
private static final int TWO_PARAM_EVENT_TYPE_NUM = 2;
private static final int EVENT_TYPE_NUM = 5;
private List[] nodeLists = new List[EVENT_TYPE_NUM];
private List[] newNodeLists = new List[TWO_PARAM_EVENT_TYPE_NUM];
EventList() {
for (int i = 0; i < EVENT_TYPE_NUM; i++)
nodeLists[i] = new Vector();
for (int i = 0; i < TWO_PARAM_EVENT_TYPE_NUM; i++)
newNodeLists[i] = new Vector();
}
synchronized void clear() {
for (int i = 0; i < EVENT_TYPE_NUM; i++)
nodeLists[i].clear();
for (int i = 0; i < TWO_PARAM_EVENT_TYPE_NUM; i++)
newNodeLists[i].clear();
}
synchronized void excludeRoot(Node root) {
int i = 0;
for (; i < TWO_PARAM_EVENT_TYPE_NUM; i++)
// cannot use iterator here because if there is any match,
// items have to be removed from both lists
for (int k = 0; k < nodeLists[i].size(); k++)
if (root.isAncestorOf((Node) nodeLists[i].get(k))
|| root.isAncestorOf((Node) newNodeLists[i].get(k))) {
nodeLists[i].remove(k);
newNodeLists[i].remove(k);
}
for (; i < EVENT_TYPE_NUM; i++) {
Iterator iterator = nodeLists[i].iterator();
while (iterator.hasNext())
if (root.isAncestorOf((Node) iterator.next()))
iterator.remove();
}
}
synchronized void add(int type, Node node) {
if (type < TWO_PARAM_EVENT_TYPE_NUM)
throw new IllegalArgumentException("Missing parameter for event.");
nodeLists[type].add(node);
}
synchronized void add(int type, Node node, Node newNode) {
if (type >= TWO_PARAM_EVENT_TYPE_NUM)
throw new IllegalArgumentException("Too many parameters for event.");
nodeLists[type].add(node);
newNodeLists[type].add(newNode);
}
synchronized Node[] getNodes(int type) {
return (Node[]) nodeLists[type].toArray(new Node[0]);
}
synchronized Node[] getNewNodes(int type) {
if (type >= TWO_PARAM_EVENT_TYPE_NUM)
return null;
return (Node[]) newNodeLists[type].toArray(new Node[0]);
}
static String getTopic(int type) {
switch (type) {
case ADD :
return "org/osgi/service/dmt/ADDED";
case DELETE :
return "org/osgi/service/dmt/DELETED";
case REPLACE :
return "org/osgi/service/dmt/REPLACED";
case RENAME :
return "org/osgi/service/dmt/RENAMED";
case COPY :
return "org/osgi/service/dmt/COPIED";
}
throw new IllegalArgumentException("Unknown event type.");
}
}
| Minor fixes to the complex value handling (to be continued)
| org.osgi.impl.service.dmt/src/org/osgi/impl/service/dmt/DmtSessionImpl.java | Minor fixes to the complex value handling (to be continued) |
|
Java | apache-2.0 | a4cdc155d1f1285f2d71d9221e1a52a510b8cbe5 | 0 | msaperst/selenified,msaperst/selenified | /*
* Copyright 2019 Coveros, Inc.
*
* This file is part of Selenified.
*
* Selenified is licensed under the Apache License, Version
* 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.coveros.selenified.utilities;
import com.coveros.selenified.Browser;
import com.coveros.selenified.exceptions.InvalidBrowserOptionsException;
import com.coveros.selenified.exceptions.InvalidHTTPException;
import com.coveros.selenified.exceptions.InvalidHubException;
import com.coveros.selenified.exceptions.InvalidProxyException;
import org.testng.ITestContext;
import org.testng.log4testng.Logger;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Properties;
/**
* Reads in properties files provided by the user in order to execute tests. These
* files can be passed in via commandline, or set in a selenified.properties file.
* This file should reside in src/test/resources. If the property exists in the
* system properties, that is returned, overridding anything in the
* selenified.properties file.
*
* @author Max Saperstone
* @version 3.2.0
* @lastupdate 3/29/2019
*/
public class Property {
private static final String PROXY_ISNT_SET = "Proxy isn't set";
private static final double WAIT = 5;
private Property() {
}
private static final Logger log = Logger.getLogger(Property.class);
private static final String SELENIFIED = "src/test/resources/selenified.properties";
private static final String DEFAULT_WAIT = "defaultWait";
private static final String GENERATE_PDF = "generatePDF";
private static final String PACKAGE_RESULTS = "packageResults";
private static final String HUB = "hub";
private static final String PROXY = "proxy";
public static final String APP_URL = "appURL";
public static final String BROWSER = "browser";
public static final String HEADLESS = "headless";
public static final String OPTIONS = "options";
/**
* Retrieves the specified program property. if it exists from the system properties, that is returned, overridding
* all other values. Otherwise, if it exists from the properties file, that is returned, otherwise, null is returned
*
* @param property - what property value to return
* @return String: the property value, null if unset
*/
private static String getProgramProperty(String property) {
if (System.getProperty(property) != null) {
return System.getProperty(property).trim();
}
Properties prop = new Properties();
try (InputStream input = new FileInputStream(SELENIFIED)) {
prop.load(input);
} catch (IOException e) {
log.info(e);
}
String fullProperty = prop.getProperty(property);
if (fullProperty != null) {
fullProperty = fullProperty.trim();
}
return fullProperty;
}
/**
* Determines if we are supposed to generate a pdf of the results or not
*
* @return boolean: generate a pdf or not
*/
public static boolean generatePDF() {
String generatePDF = getProgramProperty(GENERATE_PDF);
if (generatePDF == null) {
return false;
}
if ("".equals(generatePDF)) {
return true;
}
return "true".equalsIgnoreCase(generatePDF);
}
/**
* Determines if we are supposed to zip up the results or not
*
* @return boolean: zip up the results or not
*/
public static boolean packageResults() {
String packageResults = getProgramProperty(PACKAGE_RESULTS);
if (packageResults == null) {
return false;
}
if ("".equals(packageResults)) {
return true;
}
return "true".equalsIgnoreCase(packageResults);
}
/**
* Determines if a hub property is set. This could be to sauce, grid, or any other cloud tool.
* This should be provided with the protocol and address, but leaving out the /wd/hub
*
* @return boolean: is a hub location set
*/
public static boolean isHubSet() {
String hub = getProgramProperty(HUB);
return hub != null && !"".equals(hub);
}
/**
* Retrieves the hub property if it is set. This could be to sauce, grid, or any other cloud tool.
* This should be provided with the protocol and address, but leaving out the /wd/hub
*
* @return String: the set hub address, null if none are set
*/
public static String getHub() throws InvalidHubException {
String hub = getProgramProperty(HUB);
if (hub == null || "".equals(hub)) {
throw new InvalidHubException("Hub isn't set");
}
return hub;
}
/**
* Determines if a proxy property is set. This could be to something local, or in the cloud.
* Provide the protocol, address, and port
*
* @return boolean: is a proxy set
*/
public static boolean isProxySet() {
String proxy = getProgramProperty(PROXY);
return proxy != null && !"".equals(proxy);
}
/**
* Retrieves the proxy property if it is set. This could be to something local, or in the cloud.
* Provide the protocol, address, and port
*
* @return String: the set proxy address, null if none are set
*/
public static String getProxy() throws InvalidProxyException {
String proxy = getProgramProperty(PROXY);
if (proxy == null) {
throw new InvalidProxyException(PROXY_ISNT_SET);
}
String[] proxyParts = proxy.split(":");
if (proxyParts.length != 2) {
throw new InvalidProxyException("Proxy '" + proxy + "' isn't valid. Must contain address and port, without protocol");
}
try {
Integer.parseInt(proxyParts[1]);
} catch (NumberFormatException e) {
throw new InvalidProxyException("Proxy '" + proxy + "' isn't valid. Must contain address and port, without protocol. Invalid port provided. " + e);
}
return proxy;
}
public static String getProxyHost() throws InvalidProxyException {
return getProxy().split(":")[0];
}
public static int getProxyPort() throws InvalidProxyException {
return Integer.parseInt(getProxy().split(":")[1]);
}
/**
* Obtains the application under test, as a URL. If the site was provided as
* a system property, that value will override whatever was set in the
* particular test suite. If no site was set, null will be returned, which
* will causes the tests to error out
*
* @param clazz - the test suite class, used for making threadsafe storage of
* application, allowing suites to have independent applications
* under test, run at the same time
* @param context - the TestNG context associated with the test suite, used for
* storing app url information
* @return String: the URL of the application under test
*/
public static String getAppURL(String clazz, ITestContext context) throws InvalidHTTPException {
String appURL = checkAppURL(null, (String) context.getAttribute(clazz + APP_URL), "The provided app via test case setup '");
Properties prop = new Properties();
try (InputStream input = new FileInputStream(SELENIFIED)) {
prop.load(input);
} catch (IOException e) {
log.info(e);
}
appURL = checkAppURL(appURL, prop.getProperty(APP_URL), "The provided app via Properties file '");
appURL = checkAppURL(appURL, System.getProperty(APP_URL), "The provided app via System Properties '");
if (appURL != null) {
return appURL;
}
throw new InvalidHTTPException("There was not a valid app provided to test. Please properly set the 'appURL'");
}
/**
* A helper method to getAppURL, which checks the provided URL, and if it is valid, overrides the initially
* provided one.
*
* @param originalAppURL - the original and currently set app url
* @param newAppURL - the new app url to check
* @param s - the location being checked (for reporting)
* @return String: the most valid URL, new if it is valid, original if not
*/
private static String checkAppURL(String originalAppURL, String newAppURL, String s) {
if (newAppURL != null && !"".equals(newAppURL)) {
if (!newAppURL.toLowerCase().startsWith("http") && !newAppURL.toLowerCase().startsWith("file")) {
newAppURL = "http://" + newAppURL;
}
try {
new URL(newAppURL);
return newAppURL;
} catch (MalformedURLException e) {
log.error(s + newAppURL + "' is not a valud URL.");
}
}
return originalAppURL;
}
/**
* Retrieves the browser property if it is set. This can be a single browser name, or browser details. If it is
* not set, HTMLUnit will be returned as the default browser to use
*
* @return String: the set browser
*/
public static String getBrowser() {
String browser = getProgramProperty(BROWSER);
if (browser == null || "".equals(browser)) {
browser = Browser.BrowserName.HTMLUNIT.toString();
}
return browser;
}
/**
* Determines if the headless parameter was set, to have the browser run in headless mode. This only
* can be used for Chrome and Firefox.
*
* @return boolean: is headless set or not
*/
public static boolean runHeadless() {
String headless = getProgramProperty(HEADLESS);
if (headless == null) {
return false;
}
if ("".equals(headless)) {
return true;
}
return "true".equalsIgnoreCase(headless);
}
/**
* Determines if options are set.
*
* @return boolean: are options set or not
*/
public static boolean areOptionsSet() {
String options = getProgramProperty(OPTIONS);
return options != null && !"".equals(options);
}
/**
* Retrieves the set options
*
* @return String: the options, null if none are set
*/
public static String getOptions() throws InvalidBrowserOptionsException {
String options = getProgramProperty(OPTIONS);
if (options == null || "".equals(options)) {
throw new InvalidBrowserOptionsException("Browser options aren't set");
}
return options;
}
public static double getDefaultWait() {
String defaultWait = getProgramProperty(DEFAULT_WAIT);
if (defaultWait == null || "".equals(defaultWait)) {
return WAIT;
}
try {
return Double.valueOf(defaultWait);
} catch (Exception e) {
log.error("Provided default wait needs to be a double. " + e);
return WAIT;
}
}
}
| src/main/java/com/coveros/selenified/utilities/Property.java | /*
* Copyright 2019 Coveros, Inc.
*
* This file is part of Selenified.
*
* Selenified is licensed under the Apache License, Version
* 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.coveros.selenified.utilities;
import com.coveros.selenified.Browser;
import com.coveros.selenified.exceptions.InvalidBrowserOptionsException;
import com.coveros.selenified.exceptions.InvalidHTTPException;
import com.coveros.selenified.exceptions.InvalidHubException;
import com.coveros.selenified.exceptions.InvalidProxyException;
import org.testng.ITestContext;
import org.testng.log4testng.Logger;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Properties;
/**
* Reads in properties files provided by the user in order to execute tests. These
* files can be passed in via commandline, or set in a selenified.properties file.
* This file should reside in src/test/resources. If the property exists in the
* system properties, that is returned, overridding anything in the
* selenified.properties file.
*
* @author Max Saperstone
* @version 3.2.0
* @lastupdate 3/29/2019
*/
public class Property {
private static final String PROXY_ISNT_SET = "Proxy isn't set";
private static final double wait = 5;
private Property() {
}
private static final Logger log = Logger.getLogger(Property.class);
private static final String SELENIFIED = "src/test/resources/selenified.properties";
private static final String DEFAULT_WAIT = "defaultWait";
private static final String GENERATE_PDF = "generatePDF";
private static final String PACKAGE_RESULTS = "packageResults";
private static final String HUB = "hub";
private static final String PROXY = "proxy";
public static final String APP_URL = "appURL";
public static final String BROWSER = "browser";
public static final String HEADLESS = "headless";
public static final String OPTIONS = "options";
/**
* Retrieves the specified program property. if it exists from the system properties, that is returned, overridding
* all other values. Otherwise, if it exists from the properties file, that is returned, otherwise, null is returned
*
* @param property - what property value to return
* @return String: the property value, null if unset
*/
private static String getProgramProperty(String property) {
if (System.getProperty(property) != null) {
return System.getProperty(property).trim();
}
Properties prop = new Properties();
try (InputStream input = new FileInputStream(SELENIFIED)) {
prop.load(input);
} catch (IOException e) {
log.info(e);
}
String fullProperty = prop.getProperty(property);
if (fullProperty != null) {
fullProperty = fullProperty.trim();
}
return fullProperty;
}
/**
* Determines if we are supposed to generate a pdf of the results or not
*
* @return boolean: generate a pdf or not
*/
public static boolean generatePDF() {
String generatePDF = getProgramProperty(GENERATE_PDF);
if (generatePDF == null) {
return false;
}
if ("".equals(generatePDF)) {
return true;
}
return "true".equalsIgnoreCase(generatePDF);
}
/**
* Determines if we are supposed to zip up the results or not
*
* @return boolean: zip up the results or not
*/
public static boolean packageResults() {
String packageResults = getProgramProperty(PACKAGE_RESULTS);
if (packageResults == null) {
return false;
}
if ("".equals(packageResults)) {
return true;
}
return "true".equalsIgnoreCase(packageResults);
}
/**
* Determines if a hub property is set. This could be to sauce, grid, or any other cloud tool.
* This should be provided with the protocol and address, but leaving out the /wd/hub
*
* @return boolean: is a hub location set
*/
public static boolean isHubSet() {
String hub = getProgramProperty(HUB);
return hub != null && !"".equals(hub);
}
/**
* Retrieves the hub property if it is set. This could be to sauce, grid, or any other cloud tool.
* This should be provided with the protocol and address, but leaving out the /wd/hub
*
* @return String: the set hub address, null if none are set
*/
public static String getHub() throws InvalidHubException {
String hub = getProgramProperty(HUB);
if (hub == null || "".equals(hub)) {
throw new InvalidHubException("Hub isn't set");
}
return hub;
}
/**
* Determines if a proxy property is set. This could be to something local, or in the cloud.
* Provide the protocol, address, and port
*
* @return boolean: is a proxy set
*/
public static boolean isProxySet() {
String proxy = getProgramProperty(PROXY);
return proxy != null && !"".equals(proxy);
}
/**
* Retrieves the proxy property if it is set. This could be to something local, or in the cloud.
* Provide the protocol, address, and port
*
* @return String: the set proxy address, null if none are set
*/
public static String getProxy() throws InvalidProxyException {
String proxy = getProgramProperty(PROXY);
if (proxy == null) {
throw new InvalidProxyException(PROXY_ISNT_SET);
}
String[] proxyParts = proxy.split(":");
if (proxyParts.length != 2) {
throw new InvalidProxyException("Proxy '" + proxy + "' isn't valid. Must contain address and port, without protocol");
}
try {
Integer.parseInt(proxyParts[1]);
} catch (NumberFormatException e) {
throw new InvalidProxyException("Proxy '" + proxy + "' isn't valid. Must contain address and port, without protocol. Invalid port provided. " + e);
}
return proxy;
}
public static String getProxyHost() throws InvalidProxyException {
return getProxy().split(":")[0];
}
public static int getProxyPort() throws InvalidProxyException {
return Integer.parseInt(getProxy().split(":")[1]);
}
/**
* Obtains the application under test, as a URL. If the site was provided as
* a system property, that value will override whatever was set in the
* particular test suite. If no site was set, null will be returned, which
* will causes the tests to error out
*
* @param clazz - the test suite class, used for making threadsafe storage of
* application, allowing suites to have independent applications
* under test, run at the same time
* @param context - the TestNG context associated with the test suite, used for
* storing app url information
* @return String: the URL of the application under test
*/
public static String getAppURL(String clazz, ITestContext context) throws InvalidHTTPException {
String appURL = checkAppURL(null, (String) context.getAttribute(clazz + APP_URL), "The provided app via test case setup '");
Properties prop = new Properties();
try (InputStream input = new FileInputStream(SELENIFIED)) {
prop.load(input);
} catch (IOException e) {
log.info(e);
}
appURL = checkAppURL(appURL, prop.getProperty(APP_URL), "The provided app via Properties file '");
appURL = checkAppURL(appURL, System.getProperty(APP_URL), "The provided app via System Properties '");
if (appURL != null) {
return appURL;
}
throw new InvalidHTTPException("There was not a valid app provided to test. Please properly set the 'appURL'");
}
/**
* A helper method to getAppURL, which checks the provided URL, and if it is valid, overrides the initially
* provided one.
*
* @param originalAppURL - the original and currently set app url
* @param newAppURL - the new app url to check
* @param s - the location being checked (for reporting)
* @return String: the most valid URL, new if it is valid, original if not
*/
private static String checkAppURL(String originalAppURL, String newAppURL, String s) {
if (newAppURL != null && !"".equals(newAppURL)) {
if (!newAppURL.toLowerCase().startsWith("http") && !newAppURL.toLowerCase().startsWith("file")) {
newAppURL = "http://" + newAppURL;
}
try {
new URL(newAppURL);
return newAppURL;
} catch (MalformedURLException e) {
log.error(s + newAppURL + "' is not a valud URL.");
}
}
return originalAppURL;
}
/**
* Retrieves the browser property if it is set. This can be a single browser name, or browser details. If it is
* not set, HTMLUnit will be returned as the default browser to use
*
* @return String: the set browser
*/
public static String getBrowser() {
String browser = getProgramProperty(BROWSER);
if (browser == null || "".equals(browser)) {
browser = Browser.BrowserName.HTMLUNIT.toString();
}
return browser;
}
/**
* Determines if the headless parameter was set, to have the browser run in headless mode. This only
* can be used for Chrome and Firefox.
*
* @return boolean: is headless set or not
*/
public static boolean runHeadless() {
String headless = getProgramProperty(HEADLESS);
if (headless == null) {
return false;
}
if ("".equals(headless)) {
return true;
}
return "true".equalsIgnoreCase(headless);
}
/**
* Determines if options are set.
*
* @return boolean: are options set or not
*/
public static boolean areOptionsSet() {
String options = getProgramProperty(OPTIONS);
return options != null && !"".equals(options);
}
/**
* Retrieves the set options
*
* @return String: the options, null if none are set
*/
public static String getOptions() throws InvalidBrowserOptionsException {
String options = getProgramProperty(OPTIONS);
if (options == null || "".equals(options)) {
throw new InvalidBrowserOptionsException("Browser options aren't set");
}
return options;
}
public static double getDefaultWait() {
String defaultWait = getProgramProperty(DEFAULT_WAIT);
if (defaultWait == null || "".equals(defaultWait)) {
return wait;
}
try {
return Double.valueOf(defaultWait);
} catch (Exception e) {
log.error("Provided default wait needs to be a double. " + e);
return wait;
}
}
}
| Renaming wait var
| src/main/java/com/coveros/selenified/utilities/Property.java | Renaming wait var |
|
Java | apache-2.0 | 260e93d6d3144256ae6418487e04dd5d4f916be7 | 0 | vam-google/google-cloud-java,vam-google/google-cloud-java,vam-google/google-cloud-java,vam-google/google-cloud-java,vam-google/google-cloud-java,vam-google/google-cloud-java | /*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spanner;
import static com.google.cloud.spanner.SpannerExceptionFactory.newSpannerException;
import com.google.cloud.Timestamp;
import com.google.cloud.grpc.GrpcTransportOptions;
import com.google.cloud.grpc.GrpcTransportOptions.ExecutorFactory;
import com.google.cloud.spanner.Options.QueryOption;
import com.google.cloud.spanner.Options.ReadOption;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.SettableFuture;
import com.google.common.util.concurrent.Uninterruptibles;
import io.opencensus.trace.Annotation;
import io.opencensus.trace.AttributeValue;
import io.opencensus.trace.Span;
import io.opencensus.trace.Tracing;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import org.threeten.bp.Duration;
import org.threeten.bp.Instant;
/**
* Maintains a pool of sessions some of which might be prepared for write by invoking
* BeginTransaction rpc. It maintains two queues of sessions(read and write prepared) and two queues
* of waiters who are waiting for a session to become available. This class itself is thread safe
* and is meant to be used concurrently across multiple threads.
*/
final class SessionPool {
private static final Logger logger = Logger.getLogger(SessionPool.class.getName());
/**
* Wrapper around current time so that we can fake it in tests. TODO(user): Replace with Java 8
* Clock.
*/
static class Clock {
Instant instant() {
return Instant.now();
}
}
/**
* Wrapper around {@code ReadContext} that releases the session to the pool once the call is
* finished, if it is a single use context.
*/
private static class AutoClosingReadContext implements ReadContext {
private final ReadContext delegate;
private final PooledSession session;
private final boolean isSingleUse;
private boolean closed;
private AutoClosingReadContext(
ReadContext delegate, PooledSession session, boolean isSingleUse) {
this.delegate = delegate;
this.session = session;
this.isSingleUse = isSingleUse;
}
private ResultSet wrap(final ResultSet resultSet) {
session.markUsed();
if (!isSingleUse) {
return resultSet;
}
return new ForwardingResultSet(resultSet) {
@Override
public boolean next() throws SpannerException {
try {
boolean ret = super.next();
if (!ret) {
close();
}
return ret;
} catch (SpannerException e) {
if (!closed) {
session.lastException = e;
AutoClosingReadContext.this.close();
}
throw e;
}
}
@Override
public void close() {
super.close();
AutoClosingReadContext.this.close();
}
};
}
@Override
public ResultSet read(
String table, KeySet keys, Iterable<String> columns, ReadOption... options) {
return wrap(delegate.read(table, keys, columns, options));
}
@Override
public ResultSet readUsingIndex(
String table, String index, KeySet keys, Iterable<String> columns, ReadOption... options) {
return wrap(delegate.readUsingIndex(table, index, keys, columns, options));
}
@Override
@Nullable
public Struct readRow(String table, Key key, Iterable<String> columns) {
try {
session.markUsed();
return delegate.readRow(table, key, columns);
} finally {
if (isSingleUse) {
close();
}
}
}
@Override
@Nullable
public Struct readRowUsingIndex(String table, String index, Key key, Iterable<String> columns) {
try {
session.markUsed();
return delegate.readRowUsingIndex(table, index, key, columns);
} finally {
if (isSingleUse) {
close();
}
}
}
@Override
public ResultSet executeQuery(Statement statement, QueryOption... options) {
return wrap(delegate.executeQuery(statement, options));
}
@Override
public ResultSet analyzeQuery(Statement statement, QueryAnalyzeMode queryMode) {
return wrap(delegate.analyzeQuery(statement, queryMode));
}
@Override
public void close() {
if (closed) {
return;
}
closed = true;
delegate.close();
session.close();
}
}
private static class AutoClosingReadTransaction extends AutoClosingReadContext
implements ReadOnlyTransaction {
private final ReadOnlyTransaction txn;
AutoClosingReadTransaction(
ReadOnlyTransaction txn, PooledSession session, boolean isSingleUse) {
super(txn, session, isSingleUse);
this.txn = txn;
}
@Override
public Timestamp getReadTimestamp() {
return txn.getReadTimestamp();
}
}
private static class AutoClosingTransactionManager implements TransactionManager {
final TransactionManager delegate;
final PooledSession session;
private boolean closed;
AutoClosingTransactionManager(TransactionManager delegate, PooledSession session) {
this.delegate = delegate;
this.session = session;
}
@Override
public TransactionContext begin() {
return delegate.begin();
}
@Override
public void commit() {
try {
delegate.commit();
} finally {
if (getState() != TransactionState.ABORTED) {
close();
}
}
}
@Override
public void rollback() {
try {
delegate.rollback();
} finally {
close();
}
}
@Override
public TransactionContext resetForRetry() {
return delegate.resetForRetry();
}
@Override
public Timestamp getCommitTimestamp() {
return delegate.getCommitTimestamp();
}
@Override
public void close() {
if (closed) {
return;
}
closed = true;
try {
delegate.close();
} finally {
session.close();
}
}
@Override
public TransactionState getState() {
return delegate.getState();
}
}
// Exception class used just to track the stack trace at the point when a session was handed out
// from the pool.
private final class LeakedSessionException extends RuntimeException {
private static final long serialVersionUID = 1451131180314064914L;
private LeakedSessionException() {
super("Session was checked out from the pool at " + clock.instant());
}
}
private enum SessionState {
AVAILABLE,
BUSY,
CLOSING,
}
final class PooledSession implements Session {
@VisibleForTesting final Session delegate;
private volatile Instant lastUseTime;
private volatile SpannerException lastException;
private volatile LeakedSessionException leakedException;
@GuardedBy("lock")
private SessionState state;
private PooledSession(Session delegate) {
this.delegate = delegate;
this.state = SessionState.AVAILABLE;
markUsed();
}
private void markBusy() {
this.state = SessionState.BUSY;
this.leakedException = new LeakedSessionException();
}
private void markClosing() {
this.state = SessionState.CLOSING;
}
@Override
public Timestamp write(Iterable<Mutation> mutations) throws SpannerException {
try {
markUsed();
return delegate.write(mutations);
} catch (SpannerException e) {
throw lastException = e;
} finally {
close();
}
}
@Override
public Timestamp writeAtLeastOnce(Iterable<Mutation> mutations) throws SpannerException {
try {
markUsed();
return delegate.writeAtLeastOnce(mutations);
} catch (SpannerException e) {
throw lastException = e;
} finally {
close();
}
}
@Override
public ReadContext singleUse() {
try {
return new AutoClosingReadContext(delegate.singleUse(), this, true);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadContext singleUse(TimestampBound bound) {
try {
return new AutoClosingReadContext(delegate.singleUse(bound), this, true);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadOnlyTransaction singleUseReadOnlyTransaction() {
try {
return new AutoClosingReadTransaction(delegate.singleUseReadOnlyTransaction(), this, true);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadOnlyTransaction singleUseReadOnlyTransaction(TimestampBound bound) {
try {
return new AutoClosingReadTransaction(
delegate.singleUseReadOnlyTransaction(bound), this, true);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadOnlyTransaction readOnlyTransaction() {
try {
return new AutoClosingReadTransaction(delegate.readOnlyTransaction(), this, false);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadOnlyTransaction readOnlyTransaction(TimestampBound bound) {
try {
return new AutoClosingReadTransaction(delegate.readOnlyTransaction(bound), this, false);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public TransactionRunner readWriteTransaction() {
final TransactionRunner runner = delegate.readWriteTransaction();
return new TransactionRunner() {
@Override
@Nullable
public <T> T run(TransactionCallable<T> callable) {
try {
markUsed();
T result = runner.run(callable);
return result;
} catch (SpannerException e) {
throw lastException = e;
} finally {
close();
}
}
@Override
public Timestamp getCommitTimestamp() {
return runner.getCommitTimestamp();
}
};
}
@Override
public void close() {
synchronized (lock) {
numSessionsInUse--;
}
leakedException = null;
if (lastException != null && isSessionNotFound(lastException)) {
invalidateSession(this);
} else {
lastException = null;
if (state != SessionState.CLOSING) {
state = SessionState.AVAILABLE;
}
releaseSession(this);
}
}
@Override
public String getName() {
return delegate.getName();
}
@Override
public void prepareReadWriteTransaction() {
markUsed();
delegate.prepareReadWriteTransaction();
}
private void keepAlive() {
markUsed();
delegate
.singleUse(TimestampBound.ofMaxStaleness(60, TimeUnit.SECONDS))
.executeQuery(Statement.newBuilder("SELECT 1").build())
.next();
}
private void markUsed() {
lastUseTime = clock.instant();
}
@Override
public TransactionManager transactionManager() {
markUsed();
return new AutoClosingTransactionManager(delegate.transactionManager(), this);
}
}
private static final class SessionOrError {
private final PooledSession session;
private final SpannerException e;
SessionOrError(PooledSession session) {
this.session = session;
this.e = null;
}
SessionOrError(SpannerException e) {
this.session = null;
this.e = e;
}
}
private static final class Waiter {
private final SynchronousQueue<SessionOrError> waiter = new SynchronousQueue<>();
private void put(PooledSession session) {
Uninterruptibles.putUninterruptibly(waiter, new SessionOrError(session));
}
private void put(SpannerException e) {
Uninterruptibles.putUninterruptibly(waiter, new SessionOrError(e));
}
private PooledSession take() throws SpannerException {
SessionOrError s = Uninterruptibles.takeUninterruptibly(waiter);
if (s.e != null) {
throw newSpannerException(s.e);
}
return s.session;
}
}
// Background task to maintain the pool. It closes idle sessions, keeps alive sessions that have
// not been used for a user configured time and creates session if needed to bring pool up to
// minimum required sessions. We keep track of the number of concurrent sessions being used.
// The maximum value of that over a window (10 minutes) tells us how many sessions we need in the
// pool. We close the remaining sessions. To prevent bursty traffic, we smear this out over the
// window length. We also smear out the keep alive traffic over the keep alive period.
final class PoolMaintainer {
// Length of the window in millis over which we keep track of maximum number of concurrent
// sessions in use.
private final Duration windowLength = Duration.ofMillis(TimeUnit.MINUTES.toMillis(10));
// Frequency of the timer loop.
@VisibleForTesting static final long LOOP_FREQUENCY = 10 * 1000L;
// Number of loop iterations in which we need to to close all the sessions waiting for closure.
@VisibleForTesting final long numClosureCycles = windowLength.toMillis() / LOOP_FREQUENCY;
private final Duration keepAliveMilis =
Duration.ofMillis(TimeUnit.MINUTES.toMillis(options.getKeepAliveIntervalMinutes()));
// Number of loop iterations in which we need to keep alive all the sessions
@VisibleForTesting final long numKeepAliveCycles = keepAliveMilis.toMillis() / LOOP_FREQUENCY;
Instant lastResetTime = Instant.ofEpochMilli(0);
int numSessionsToClose = 0;
int sessionsToClosePerLoop = 0;
@GuardedBy("lock")
ScheduledFuture<?> scheduledFuture;
@GuardedBy("lock")
boolean running;
void init() {
// Scheduled pool maintenance worker.
synchronized (lock) {
scheduledFuture =
executor.scheduleAtFixedRate(
new Runnable() {
@Override
public void run() {
maintainPool();
}
},
LOOP_FREQUENCY,
LOOP_FREQUENCY,
TimeUnit.MILLISECONDS);
}
}
void close() {
synchronized (lock) {
scheduledFuture.cancel(false);
if (!running) {
decrementPendingClosures();
}
}
}
// Does various pool maintenance activities.
void maintainPool() {
synchronized (lock) {
if (isClosed()) {
return;
}
running = true;
}
Instant currTime = clock.instant();
closeIdleSessions(currTime);
// Now go over all the remaining sessions and see if they need to be kept alive explicitly.
keepAliveSessions(currTime);
replenishPool();
synchronized (lock) {
running = false;
if (isClosed()) {
decrementPendingClosures();
}
}
}
private void closeIdleSessions(Instant currTime) {
LinkedList<PooledSession> sessionsToClose = new LinkedList<>();
synchronized (lock) {
// Every ten minutes figure out how many sessions need to be closed then close them over
// next ten minutes.
if (currTime.isAfter(lastResetTime.plus(windowLength))) {
int sessionsToKeep =
Math.max(options.getMinSessions(), maxSessionsInUse + options.getMaxIdleSessions());
numSessionsToClose = totalSessions() - sessionsToKeep;
sessionsToClosePerLoop = (int) Math.ceil((double) numSessionsToClose / numClosureCycles);
maxSessionsInUse = 0;
lastResetTime = currTime;
}
if (numSessionsToClose > 0) {
while (sessionsToClose.size() < Math.min(numSessionsToClose, sessionsToClosePerLoop)) {
PooledSession sess =
readSessions.size() > 0 ? readSessions.poll() : writePreparedSessions.poll();
if (sess != null) {
if (sess.state != SessionState.CLOSING) {
sess.markClosing();
sessionsToClose.add(sess);
}
} else {
break;
}
}
numSessionsToClose -= sessionsToClose.size();
}
}
for (PooledSession sess : sessionsToClose) {
logger.log(Level.FINE, "Closing session {0}", sess.getName());
closeSession(sess);
}
}
private void keepAliveSessions(Instant currTime) {
long numSessionsToKeepAlive = 0;
synchronized (lock) {
// In each cycle only keep alive a subset of sessions to prevent burst of traffic.
numSessionsToKeepAlive = (long) Math.ceil((double) totalSessions() / numKeepAliveCycles);
}
// Now go over all the remaining sessions and see if they need to be kept alive explicitly.
Instant keepAliveThreshold = currTime.minus(keepAliveMilis);
// Keep chugging till there is no session that needs to be kept alive.
while (numSessionsToKeepAlive > 0) {
PooledSession sessionToKeepAlive = null;
synchronized (lock) {
sessionToKeepAlive = findSessionToKeepAlive(readSessions, keepAliveThreshold);
if (sessionToKeepAlive == null) {
sessionToKeepAlive = findSessionToKeepAlive(writePreparedSessions, keepAliveThreshold);
}
}
if (sessionToKeepAlive == null) {
break;
}
try {
logger.log(Level.FINE, "Keeping alive session " + sessionToKeepAlive.getName());
numSessionsToKeepAlive--;
sessionToKeepAlive.keepAlive();
releaseSession(sessionToKeepAlive);
} catch (SpannerException e) {
handleException(e, sessionToKeepAlive);
}
}
}
private void replenishPool() {
synchronized (lock) {
// If we have gone below min pool size, create that many sessions.
for (int i = 0;
i < options.getMinSessions() - (totalSessions() + numSessionsBeingCreated);
i++) {
createSession();
}
}
}
}
private final SessionPoolOptions options;
private final DatabaseId db;
private final SpannerImpl spanner;
private final ScheduledExecutorService executor;
private final ExecutorFactory<ScheduledExecutorService> executorFactory;
final PoolMaintainer poolMaintainer;
private final Clock clock;
private final Object lock = new Object();
@GuardedBy("lock")
private int pendingClosure;
@GuardedBy("lock")
private SettableFuture<Void> closureFuture;
@GuardedBy("lock")
private final Queue<PooledSession> readSessions = new LinkedList<>();
@GuardedBy("lock")
private final Queue<PooledSession> writePreparedSessions = new LinkedList<>();
@GuardedBy("lock")
private final Queue<Waiter> readWaiters = new LinkedList<>();
@GuardedBy("lock")
private final Queue<Waiter> readWriteWaiters = new LinkedList<>();
@GuardedBy("lock")
private int numSessionsBeingPrepared = 0;
@GuardedBy("lock")
private int numSessionsBeingCreated = 0;
@GuardedBy("lock")
private int numSessionsInUse = 0;
@GuardedBy("lock")
private int maxSessionsInUse = 0;
@GuardedBy("lock")
private final Set<PooledSession> allSessions = new HashSet<>();
/**
* Create a session pool with the given options and for the given database. It will also start
* eagerly creating sessions if {@link SessionPoolOptions#getMinSessions()} is greater than 0.
* Return pool is immediately ready for use, though getting a session might block for sessions to
* be created.
*/
static SessionPool createPool(SpannerOptions spannerOptions, DatabaseId db, SpannerImpl spanner) {
return createPool(
spannerOptions.getSessionPoolOptions(),
((GrpcTransportOptions) spannerOptions.getTransportOptions()).getExecutorFactory(),
db,
spanner);
}
static SessionPool createPool(
SessionPoolOptions poolOptions,
ExecutorFactory<ScheduledExecutorService> executorFactory,
DatabaseId db,
SpannerImpl spanner) {
return createPool(poolOptions, executorFactory, db, spanner, new Clock());
}
static SessionPool createPool(
SessionPoolOptions poolOptions,
ExecutorFactory<ScheduledExecutorService> executorFactory,
DatabaseId db,
SpannerImpl spanner,
Clock clock) {
SessionPool pool =
new SessionPool(poolOptions, executorFactory, executorFactory.get(), db, spanner, clock);
pool.initPool();
return pool;
}
private SessionPool(
SessionPoolOptions options,
ExecutorFactory<ScheduledExecutorService> executorFactory,
ScheduledExecutorService executor,
DatabaseId db,
SpannerImpl spanner,
Clock clock) {
this.options = options;
this.executorFactory = executorFactory;
this.executor = executor;
this.db = db;
this.spanner = spanner;
this.clock = clock;
this.poolMaintainer = new PoolMaintainer();
}
private void initPool() {
synchronized (lock) {
poolMaintainer.init();
for (int i = 0; i < options.getMinSessions(); i++) {
createSession();
}
}
}
private boolean isClosed() {
synchronized (lock) {
return closureFuture != null;
}
}
private void handleException(SpannerException e, PooledSession session) {
if (isSessionNotFound(e)) {
invalidateSession(session);
} else {
releaseSession(session);
}
}
private boolean isSessionNotFound(SpannerException e) {
return e.getErrorCode() == ErrorCode.NOT_FOUND && e.getMessage().contains("Session not found");
}
private void invalidateSession(PooledSession session) {
synchronized (lock) {
if (isClosed()) {
return;
}
allSessions.remove(session);
// replenish the pool.
createSession();
}
}
private PooledSession findSessionToKeepAlive(
Queue<PooledSession> queue, Instant keepAliveThreshold) {
Iterator<PooledSession> iterator = queue.iterator();
while (iterator.hasNext()) {
PooledSession session = iterator.next();
if (session.lastUseTime.isBefore(keepAliveThreshold)) {
iterator.remove();
return session;
}
}
return null;
}
/**
* Returns a session to be used for read requests to spanner. It will block if a session is not
* currently available. In case the pool is exhausted and {@link
* SessionPoolOptions#isFailIfPoolExhausted()} has been set, it will throw an exception. Returned
* session must be closed by calling {@link Session#close()}.
*
* <p>Implementation strategy:
*
* <ol>
* <li>If a read session is available, return that.
* <li>Otherwise if a writePreparedSession is available, return that.
* <li>Otherwise if a session can be created, fire a creation request.
* <li>Wait for a session to become available. Note that this can be unblocked either by a
* session being returned to the pool or a new session being created.
* </ol>
*/
Session getReadSession() throws SpannerException {
Span span = Tracing.getTracer().getCurrentSpan();
span.addAnnotation("Acquiring session");
Waiter waiter = null;
PooledSession sess = null;
synchronized (lock) {
if (closureFuture != null) {
span.addAnnotation("Pool has been closed");
throw new IllegalStateException("Pool has been closed");
}
sess = readSessions.poll();
if (sess == null) {
sess = writePreparedSessions.poll();
if (sess == null) {
span.addAnnotation("No session available");
maybeCreateSession();
waiter = new Waiter();
readWaiters.add(waiter);
} else {
span.addAnnotation("Acquired read write session");
}
} else {
span.addAnnotation("Acquired read only session");
}
}
if (waiter != null) {
logger.log(
Level.FINE,
"No session available in the pool. Blocking for one to become available/created");
span.addAnnotation("Waiting for read only session to be available");
sess = waiter.take();
}
sess.markBusy();
incrementNumSessionsInUse();
span.addAnnotation(sessionAnnotation(sess));
return sess;
}
/**
* Returns a session which has been prepared for writes by invoking BeginTransaction rpc. It will
* block if such a session is not currently available.In case the pool is exhausted and {@link
* SessionPoolOptions#isFailIfPoolExhausted()} has been set, it will throw an exception. Returned
* session must closed by invoking {@link Session#close()}.
*
* <p>Implementation strategy:
*
* <ol>
* <li>If a writePreparedSession is available, return that.
* <li>Otherwise if we have an extra session being prepared for write, wait for that.
* <li>Otherwise, if there is a read session available, start preparing that for write and wait.
* <li>Otherwise start creating a new session and wait.
* <li>Wait for write prepared session to become available. This can be unblocked either by the
* session create/prepare request we fired in above request or by a session being released
* to the pool which is then write prepared.
* </ol>
*/
Session getReadWriteSession() {
Span span = Tracing.getTracer().getCurrentSpan();
span.addAnnotation("Acquiring read write session");
Waiter waiter = null;
PooledSession sess = null;
synchronized (lock) {
if (closureFuture != null) {
throw new IllegalStateException("Pool has been closed");
}
sess = writePreparedSessions.poll();
if (sess == null) {
if (numSessionsBeingPrepared <= readWriteWaiters.size()) {
PooledSession readSession = readSessions.poll();
if (readSession != null) {
span.addAnnotation("Acquired read only session. Preparing for read write transaction");
prepareSession(readSession);
} else {
span.addAnnotation("No session available");
maybeCreateSession();
}
}
waiter = new Waiter();
readWriteWaiters.add(waiter);
} else {
span.addAnnotation("Acquired read write session");
}
}
if (waiter != null) {
logger.log(
Level.FINE,
"No session available in the pool. Blocking for one to become available/created");
span.addAnnotation("Waiting for read write session to be available");
sess = waiter.take();
}
sess.markBusy();
incrementNumSessionsInUse();
span.addAnnotation(sessionAnnotation(sess));
return sess;
}
private Annotation sessionAnnotation(Session session) {
AttributeValue sessionId = AttributeValue.stringAttributeValue(session.getName());
return Annotation.fromDescriptionAndAttributes("Using Session",
ImmutableMap.of("sessionId", sessionId));
}
private void incrementNumSessionsInUse() {
synchronized (lock) {
if (maxSessionsInUse < ++numSessionsInUse) {
maxSessionsInUse = numSessionsInUse;
}
}
}
private void maybeCreateSession() {
Span span = Tracing.getTracer().getCurrentSpan();
synchronized (lock) {
if (numWaiters() >= numSessionsBeingCreated) {
if (canCreateSession()) {
span.addAnnotation("Creating session");
createSession();
} else if (options.isFailIfPoolExhausted()) {
span.addAnnotation("Pool exhausted. Failing");
// throw specific exception
throw newSpannerException(
ErrorCode.RESOURCE_EXHAUSTED,
"No session available in the pool. Maximum number of sessions in the pool can be"
+ " overridden by invoking SessionPoolOptions#Builder#setMaxSessions. Client can be made to block"
+ " rather than fail by setting SessionPoolOptions#Builder#setBlockIfPoolExhausted.");
}
}
}
}
/**
* Releases a session back to the pool. This might cause one of the waiters to be unblocked.
*
* <p>Implementation note:
*
* <ol>
* <li>If there are no pending waiters, either add to the read sessions queue or start preparing
* for write depending on what fraction of sessions are already prepared for writes.
* <li>Otherwise either unblock a waiting reader or start preparing for a write. Exact strategy
* on which option we chose, in case there are both waiting readers and writers, is
* implemented in {@link #shouldUnblockReader}
* </ol>
*/
private void releaseSession(PooledSession session) {
Preconditions.checkNotNull(session);
synchronized (lock) {
if (closureFuture != null) {
return;
}
if (readWaiters.size() == 0 && numSessionsBeingPrepared >= readWriteWaiters.size()) {
// No pending waiters
if (shouldPrepareSession()) {
prepareSession(session);
} else {
readSessions.add(session);
}
} else if (shouldUnblockReader()) {
readWaiters.poll().put(session);
} else {
prepareSession(session);
}
}
}
private void handleCreateSessionFailure(SpannerException e) {
synchronized (lock) {
if (readWaiters.size() > 0) {
readWaiters.poll().put(e);
} else if (readWriteWaiters.size() > 0) {
readWriteWaiters.poll().put(e);
}
}
}
private void handlePrepareSessionFailure(SpannerException e, PooledSession session) {
synchronized (lock) {
if (isSessionNotFound(e)) {
invalidateSession(session);
} else if (readWriteWaiters.size() > 0) {
readWriteWaiters.poll().put(e);
} else {
releaseSession(session);
}
}
}
private void decrementPendingClosures() {
pendingClosure--;
if (pendingClosure == 0) {
closureFuture.set(null);
}
}
/**
* Close all the sessions. Once this method is invoked {@link #getReadSession()} and {@link
* #getReadWriteSession()} will start throwing {@code IllegalStateException}. The returned future
* blocks till all the sessions created in this pool have been closed.
*/
ListenableFuture<Void> closeAsync() {
ListenableFuture<Void> retFuture = null;
synchronized (lock) {
if (closureFuture != null) {
throw new IllegalStateException("Close has already been invoked");
}
// Fail all pending waiters.
Waiter waiter = readWaiters.poll();
while (waiter != null) {
waiter.put(newSpannerException(ErrorCode.INTERNAL, "Client has been closed"));
waiter = readWaiters.poll();
}
waiter = readWriteWaiters.poll();
while (waiter != null) {
waiter.put(newSpannerException(ErrorCode.INTERNAL, "Client has been closed"));
waiter = readWriteWaiters.poll();
}
closureFuture = SettableFuture.create();
retFuture = closureFuture;
pendingClosure =
totalSessions() + numSessionsBeingCreated + 1 /* For pool maintenance thread */;
poolMaintainer.close();
readSessions.clear();
writePreparedSessions.clear();
for (final PooledSession session : ImmutableList.copyOf(allSessions)) {
if (session.leakedException != null) {
logger.log(Level.WARNING, "Leaked session", session.leakedException);
}
if (session.state != SessionState.CLOSING) {
closeSessionAsync(session);
}
}
}
retFuture.addListener(
new Runnable() {
@Override
public void run() {
executorFactory.release(executor);
}
},
MoreExecutors.directExecutor());
return retFuture;
}
private boolean shouldUnblockReader() {
// This might not be the best strategy since a continuous burst of read requests can starve
// a write request. Maybe maintain a timestamp in the queue and unblock according to that
// or just flip a weighted coin.
synchronized (lock) {
int numWriteWaiters = readWriteWaiters.size() - numSessionsBeingPrepared;
return readWaiters.size() > numWriteWaiters;
}
}
private boolean shouldPrepareSession() {
synchronized (lock) {
int preparedSessions = writePreparedSessions.size() + numSessionsBeingPrepared;
return preparedSessions < Math.floor(options.getWriteSessionsFraction() * totalSessions());
}
}
private int numWaiters() {
synchronized (lock) {
return readWaiters.size() + readWriteWaiters.size();
}
}
private int totalSessions() {
synchronized (lock) {
return allSessions.size();
}
}
private void closeSessionAsync(final PooledSession sess) {
executor.submit(
new Runnable() {
@Override
public void run() {
closeSession(sess);
}
});
}
private void closeSession(PooledSession sess) {
try {
sess.delegate.close();
} catch (SpannerException e) {
// Backend will delete these sessions after a while even if we fail to close them.
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE, "Failed to close session: " + sess.getName(), e);
}
} finally {
synchronized (lock) {
allSessions.remove(sess);
if (isClosed()) {
decrementPendingClosures();
return;
}
// Create a new session if needed to unblock some waiter.
if (numWaiters() > numSessionsBeingCreated) {
createSession();
}
}
}
}
private void prepareSession(final PooledSession sess) {
synchronized (lock) {
numSessionsBeingPrepared++;
}
executor.submit(
new Runnable() {
@Override
public void run() {
try {
logger.log(Level.FINE, "Preparing session");
sess.prepareReadWriteTransaction();
logger.log(Level.FINE, "Session prepared");
synchronized (lock) {
numSessionsBeingPrepared--;
if (!isClosed()) {
if (readWriteWaiters.size() > 0) {
readWriteWaiters.poll().put(sess);
} else if (readWaiters.size() > 0) {
readWaiters.poll().put(sess);
} else {
writePreparedSessions.add(sess);
}
}
}
} catch (Throwable t) {
synchronized (lock) {
numSessionsBeingPrepared--;
if (!isClosed()) {
handlePrepareSessionFailure(newSpannerException(t), sess);
}
}
}
}
});
}
private boolean canCreateSession() {
synchronized (lock) {
return totalSessions() + numSessionsBeingCreated < options.getMaxSessions();
}
}
private void createSession() {
logger.log(Level.FINE, "Creating session");
synchronized (lock) {
numSessionsBeingCreated++;
executor.submit(
new Runnable() {
@Override
public void run() {
Session session = null;
try {
session = spanner.createSession(db);
logger.log(Level.FINE, "Session created");
} catch (Throwable t) {
// Expose this to customer via a metric.
synchronized (lock) {
numSessionsBeingCreated--;
if (isClosed()) {
decrementPendingClosures();
}
handleCreateSessionFailure(newSpannerException(t));
}
return;
}
boolean closeSession = false;
PooledSession pooledSession = null;
synchronized (lock) {
pooledSession = new PooledSession(session);
numSessionsBeingCreated--;
if (closureFuture != null) {
closeSession = true;
} else {
Preconditions.checkState(totalSessions() <= options.getMaxSessions() - 1);
allSessions.add(pooledSession);
releaseSession(pooledSession);
}
}
if (closeSession) {
closeSession(pooledSession);
}
}
});
}
}
}
| google-cloud-spanner/src/main/java/com/google/cloud/spanner/SessionPool.java | /*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spanner;
import static com.google.cloud.spanner.SpannerExceptionFactory.newSpannerException;
import com.google.cloud.Timestamp;
import com.google.cloud.grpc.GrpcTransportOptions;
import com.google.cloud.grpc.GrpcTransportOptions.ExecutorFactory;
import com.google.cloud.spanner.Options.QueryOption;
import com.google.cloud.spanner.Options.ReadOption;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.SettableFuture;
import com.google.common.util.concurrent.Uninterruptibles;
import io.opencensus.trace.Annotation;
import io.opencensus.trace.AttributeValue;
import io.opencensus.trace.Span;
import io.opencensus.trace.Tracing;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import org.threeten.bp.Duration;
import org.threeten.bp.Instant;
/**
* Maintains a pool of sessions some of which might be prepared for write by invoking
* BeginTransaction rpc. It maintains two queues of sessions(read and write prepared) and two queues
* of waiters who are waiting for a session to become available. This class itself is thread safe
* and is meant to be used concurrently across multiple threads.
*/
final class SessionPool {
private static final Logger logger = Logger.getLogger(SessionPool.class.getName());
/**
* Wrapper around current time so that we can fake it in tests. TODO(user): Replace with Java 8
* Clock.
*/
static class Clock {
Instant instant() {
return Instant.now();
}
}
/**
* Wrapper around {@code ReadContext} that releases the session to the pool once the call is
* finished, if it is a single use context.
*/
private static class AutoClosingReadContext implements ReadContext {
private final ReadContext delegate;
private final PooledSession session;
private final boolean isSingleUse;
private boolean closed;
private AutoClosingReadContext(
ReadContext delegate, PooledSession session, boolean isSingleUse) {
this.delegate = delegate;
this.session = session;
this.isSingleUse = isSingleUse;
}
private ResultSet wrap(final ResultSet resultSet) {
session.markUsed();
if (!isSingleUse) {
return resultSet;
}
return new ForwardingResultSet(resultSet) {
@Override
public boolean next() throws SpannerException {
try {
boolean ret = super.next();
if (!ret) {
close();
}
return ret;
} catch (SpannerException e) {
if (!closed) {
session.lastException = e;
AutoClosingReadContext.this.close();
}
throw e;
}
}
@Override
public void close() {
super.close();
AutoClosingReadContext.this.close();
}
};
}
@Override
public ResultSet read(
String table, KeySet keys, Iterable<String> columns, ReadOption... options) {
return wrap(delegate.read(table, keys, columns, options));
}
@Override
public ResultSet readUsingIndex(
String table, String index, KeySet keys, Iterable<String> columns, ReadOption... options) {
return wrap(delegate.readUsingIndex(table, index, keys, columns, options));
}
@Override
@Nullable
public Struct readRow(String table, Key key, Iterable<String> columns) {
try {
session.markUsed();
return delegate.readRow(table, key, columns);
} finally {
if (isSingleUse) {
close();
}
}
}
@Override
@Nullable
public Struct readRowUsingIndex(String table, String index, Key key, Iterable<String> columns) {
try {
session.markUsed();
return delegate.readRowUsingIndex(table, index, key, columns);
} finally {
if (isSingleUse) {
close();
}
}
}
@Override
public ResultSet executeQuery(Statement statement, QueryOption... options) {
return wrap(delegate.executeQuery(statement, options));
}
@Override
public ResultSet analyzeQuery(Statement statement, QueryAnalyzeMode queryMode) {
return wrap(delegate.analyzeQuery(statement, queryMode));
}
@Override
public void close() {
if (closed) {
return;
}
closed = true;
delegate.close();
session.close();
}
}
private static class AutoClosingReadTransaction extends AutoClosingReadContext
implements ReadOnlyTransaction {
private final ReadOnlyTransaction txn;
AutoClosingReadTransaction(
ReadOnlyTransaction txn, PooledSession session, boolean isSingleUse) {
super(txn, session, isSingleUse);
this.txn = txn;
}
@Override
public Timestamp getReadTimestamp() {
return txn.getReadTimestamp();
}
}
private static class AutoClosingTransactionManager implements TransactionManager {
final TransactionManager delegate;
final PooledSession session;
private boolean closed;
AutoClosingTransactionManager(TransactionManager delegate, PooledSession session) {
this.delegate = delegate;
this.session = session;
}
@Override
public TransactionContext begin() {
return delegate.begin();
}
@Override
public void commit() {
try {
delegate.commit();
} finally {
if (getState() != TransactionState.ABORTED) {
close();
}
}
}
@Override
public void rollback() {
try {
delegate.rollback();
} finally {
close();
}
}
@Override
public TransactionContext resetForRetry() {
return delegate.resetForRetry();
}
@Override
public Timestamp getCommitTimestamp() {
return delegate.getCommitTimestamp();
}
@Override
public void close() {
if (closed) {
return;
}
closed = true;
try {
delegate.close();
} finally {
session.close();
}
}
@Override
public TransactionState getState() {
return delegate.getState();
}
}
// Exception class used just to track the stack trace at the point when a session was handed out
// from the pool.
private final class LeakedSessionException extends RuntimeException {
private static final long serialVersionUID = 1451131180314064914L;
private LeakedSessionException() {
super("Session was checked out from the pool at " + clock.instant());
}
}
private enum SessionState {
AVAILABLE,
BUSY,
CLOSING,
}
final class PooledSession implements Session {
@VisibleForTesting final Session delegate;
private volatile Instant lastUseTime;
private volatile SpannerException lastException;
private volatile LeakedSessionException leakedException;
@GuardedBy("lock")
private SessionState state;
private PooledSession(Session delegate) {
this.delegate = delegate;
this.state = SessionState.AVAILABLE;
markUsed();
}
private void markBusy() {
this.state = SessionState.BUSY;
this.leakedException = new LeakedSessionException();
}
private void markClosing() {
this.state = SessionState.CLOSING;
}
@Override
public Timestamp write(Iterable<Mutation> mutations) throws SpannerException {
try {
markUsed();
return delegate.write(mutations);
} catch (SpannerException e) {
throw lastException = e;
} finally {
close();
}
}
@Override
public Timestamp writeAtLeastOnce(Iterable<Mutation> mutations) throws SpannerException {
try {
markUsed();
return delegate.writeAtLeastOnce(mutations);
} catch (SpannerException e) {
throw lastException = e;
} finally {
close();
}
}
@Override
public ReadContext singleUse() {
try {
return new AutoClosingReadContext(delegate.singleUse(), this, true);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadContext singleUse(TimestampBound bound) {
try {
return new AutoClosingReadContext(delegate.singleUse(bound), this, true);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadOnlyTransaction singleUseReadOnlyTransaction() {
try {
return new AutoClosingReadTransaction(delegate.singleUseReadOnlyTransaction(), this, true);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadOnlyTransaction singleUseReadOnlyTransaction(TimestampBound bound) {
try {
return new AutoClosingReadTransaction(
delegate.singleUseReadOnlyTransaction(bound), this, true);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadOnlyTransaction readOnlyTransaction() {
try {
return new AutoClosingReadTransaction(delegate.readOnlyTransaction(), this, false);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public ReadOnlyTransaction readOnlyTransaction(TimestampBound bound) {
try {
return new AutoClosingReadTransaction(delegate.readOnlyTransaction(bound), this, false);
} catch (Exception e) {
close();
throw e;
}
}
@Override
public TransactionRunner readWriteTransaction() {
final TransactionRunner runner = delegate.readWriteTransaction();
return new TransactionRunner() {
@Override
@Nullable
public <T> T run(TransactionCallable<T> callable) {
try {
markUsed();
T result = runner.run(callable);
return result;
} catch (SpannerException e) {
throw lastException = e;
} finally {
close();
}
}
@Override
public Timestamp getCommitTimestamp() {
return runner.getCommitTimestamp();
}
};
}
@Override
public void close() {
synchronized (lock) {
numSessionsInUse--;
}
leakedException = null;
if (lastException != null && isSessionNotFound(lastException)) {
invalidateSession(this);
} else {
lastException = null;
if (state != SessionState.CLOSING) {
state = SessionState.AVAILABLE;
}
releaseSession(this);
}
}
@Override
public String getName() {
return delegate.getName();
}
@Override
public void prepareReadWriteTransaction() {
markUsed();
delegate.prepareReadWriteTransaction();
}
private void keepAlive() {
markUsed();
delegate
.singleUse(TimestampBound.ofMaxStaleness(60, TimeUnit.SECONDS))
.executeQuery(Statement.newBuilder("SELECT 1").build())
.next();
}
private void markUsed() {
lastUseTime = clock.instant();
}
@Override
public TransactionManager transactionManager() {
markUsed();
return new AutoClosingTransactionManager(delegate.transactionManager(), this);
}
}
private static final class SessionOrError {
private final PooledSession session;
private final SpannerException e;
SessionOrError(PooledSession session) {
this.session = session;
this.e = null;
}
SessionOrError(SpannerException e) {
this.session = null;
this.e = e;
}
}
private static final class Waiter {
private final SynchronousQueue<SessionOrError> waiter = new SynchronousQueue<>();
private void put(PooledSession session) {
Uninterruptibles.putUninterruptibly(waiter, new SessionOrError(session));
}
private void put(SpannerException e) {
Uninterruptibles.putUninterruptibly(waiter, new SessionOrError(e));
}
private PooledSession take() throws SpannerException {
SessionOrError s = Uninterruptibles.takeUninterruptibly(waiter);
if (s.e != null) {
throw newSpannerException(s.e);
}
return s.session;
}
}
// Background task to maintain the pool. It closes idle sessions, keeps alive sessions that have
// not been used for a user configured time and creates session if needed to bring pool up to
// minimum required sessions. We keep track of the number of concurrent sessions being used.
// The maximum value of that over a window (10 minutes) tells us how many sessions we need in the
// pool. We close the remaining sessions. To prevent bursty traffic, we smear this out over the
// window length. We also smear out the keep alive traffic over the keep alive period.
final class PoolMaintainer {
// Length of the window in millis over which we keep track of maximum number of concurrent
// sessions in use.
private final Duration windowLength = Duration.ofMillis(TimeUnit.MINUTES.toMillis(10));
// Frequency of the timer loop.
@VisibleForTesting static final long LOOP_FREQUENCY = 10 * 1000L;
// Number of loop iterations in which we need to to close all the sessions waiting for closure.
@VisibleForTesting final long numClosureCycles = windowLength.toMillis() / LOOP_FREQUENCY;
private final Duration keepAliveMilis =
Duration.ofMillis(TimeUnit.MINUTES.toMillis(options.getKeepAliveIntervalMinutes()));
// Number of loop iterations in which we need to keep alive all the sessions
@VisibleForTesting final long numKeepAliveCycles = keepAliveMilis.toMillis() / LOOP_FREQUENCY;
Instant lastResetTime = Instant.ofEpochMilli(0);
int numSessionsToClose = 0;
int sessionsToClosePerLoop = 0;
@GuardedBy("lock")
ScheduledFuture<?> scheduledFuture;
@GuardedBy("lock")
boolean running;
void init() {
// Scheduled pool maintenance worker.
synchronized (lock) {
scheduledFuture =
executor.scheduleAtFixedRate(
new Runnable() {
@Override
public void run() {
maintainPool();
}
},
LOOP_FREQUENCY,
LOOP_FREQUENCY,
TimeUnit.MILLISECONDS);
}
}
void close() {
synchronized (lock) {
scheduledFuture.cancel(false);
if (!running) {
decrementPendingClosures();
}
}
}
// Does various pool maintenance activities.
void maintainPool() {
synchronized (lock) {
if (isClosed()) {
return;
}
running = true;
}
Instant currTime = clock.instant();
closeIdleSessions(currTime);
// Now go over all the remaining sessions and see if they need to be kept alive explicitly.
keepAliveSessions(currTime);
replenishPool();
synchronized (lock) {
running = false;
if (isClosed()) {
decrementPendingClosures();
}
}
}
private void closeIdleSessions(Instant currTime) {
LinkedList<PooledSession> sessionsToClose = new LinkedList<>();
synchronized (lock) {
// Every ten minutes figure out how many sessions need to be closed then close them over
// next ten minutes.
if (currTime.isAfter(lastResetTime.plus(windowLength))) {
int sessionsToKeep =
Math.max(options.getMinSessions(), maxSessionsInUse + options.getMaxIdleSessions());
numSessionsToClose = totalSessions() - sessionsToKeep;
sessionsToClosePerLoop = (int) Math.ceil((double) numSessionsToClose / numClosureCycles);
maxSessionsInUse = 0;
lastResetTime = currTime;
}
if (numSessionsToClose > 0) {
while (sessionsToClose.size() < Math.min(numSessionsToClose, sessionsToClosePerLoop)) {
PooledSession sess =
readSessions.size() > 0 ? readSessions.poll() : writePreparedSessions.poll();
if (sess != null) {
if (sess.state != SessionState.CLOSING) {
sess.markClosing();
sessionsToClose.add(sess);
}
} else {
break;
}
}
numSessionsToClose -= sessionsToClose.size();
}
}
for (PooledSession sess : sessionsToClose) {
logger.log(Level.FINE, "Closing session %s", sess.getName());
closeSession(sess);
}
}
private void keepAliveSessions(Instant currTime) {
long numSessionsToKeepAlive = 0;
synchronized (lock) {
// In each cycle only keep alive a subset of sessions to prevent burst of traffic.
numSessionsToKeepAlive = (long) Math.ceil((double) totalSessions() / numKeepAliveCycles);
}
// Now go over all the remaining sessions and see if they need to be kept alive explicitly.
Instant keepAliveThreshold = currTime.minus(keepAliveMilis);
// Keep chugging till there is no session that needs to be kept alive.
while (numSessionsToKeepAlive > 0) {
PooledSession sessionToKeepAlive = null;
synchronized (lock) {
sessionToKeepAlive = findSessionToKeepAlive(readSessions, keepAliveThreshold);
if (sessionToKeepAlive == null) {
sessionToKeepAlive = findSessionToKeepAlive(writePreparedSessions, keepAliveThreshold);
}
}
if (sessionToKeepAlive == null) {
break;
}
try {
logger.log(Level.FINE, "Keeping alive session " + sessionToKeepAlive.getName());
numSessionsToKeepAlive--;
sessionToKeepAlive.keepAlive();
releaseSession(sessionToKeepAlive);
} catch (SpannerException e) {
handleException(e, sessionToKeepAlive);
}
}
}
private void replenishPool() {
synchronized (lock) {
// If we have gone below min pool size, create that many sessions.
for (int i = 0;
i < options.getMinSessions() - (totalSessions() + numSessionsBeingCreated);
i++) {
createSession();
}
}
}
}
private final SessionPoolOptions options;
private final DatabaseId db;
private final SpannerImpl spanner;
private final ScheduledExecutorService executor;
private final ExecutorFactory<ScheduledExecutorService> executorFactory;
final PoolMaintainer poolMaintainer;
private final Clock clock;
private final Object lock = new Object();
@GuardedBy("lock")
private int pendingClosure;
@GuardedBy("lock")
private SettableFuture<Void> closureFuture;
@GuardedBy("lock")
private final Queue<PooledSession> readSessions = new LinkedList<>();
@GuardedBy("lock")
private final Queue<PooledSession> writePreparedSessions = new LinkedList<>();
@GuardedBy("lock")
private final Queue<Waiter> readWaiters = new LinkedList<>();
@GuardedBy("lock")
private final Queue<Waiter> readWriteWaiters = new LinkedList<>();
@GuardedBy("lock")
private int numSessionsBeingPrepared = 0;
@GuardedBy("lock")
private int numSessionsBeingCreated = 0;
@GuardedBy("lock")
private int numSessionsInUse = 0;
@GuardedBy("lock")
private int maxSessionsInUse = 0;
@GuardedBy("lock")
private final Set<PooledSession> allSessions = new HashSet<>();
/**
* Create a session pool with the given options and for the given database. It will also start
* eagerly creating sessions if {@link SessionPoolOptions#getMinSessions()} is greater than 0.
* Return pool is immediately ready for use, though getting a session might block for sessions to
* be created.
*/
static SessionPool createPool(SpannerOptions spannerOptions, DatabaseId db, SpannerImpl spanner) {
return createPool(
spannerOptions.getSessionPoolOptions(),
((GrpcTransportOptions) spannerOptions.getTransportOptions()).getExecutorFactory(),
db,
spanner);
}
static SessionPool createPool(
SessionPoolOptions poolOptions,
ExecutorFactory<ScheduledExecutorService> executorFactory,
DatabaseId db,
SpannerImpl spanner) {
return createPool(poolOptions, executorFactory, db, spanner, new Clock());
}
static SessionPool createPool(
SessionPoolOptions poolOptions,
ExecutorFactory<ScheduledExecutorService> executorFactory,
DatabaseId db,
SpannerImpl spanner,
Clock clock) {
SessionPool pool =
new SessionPool(poolOptions, executorFactory, executorFactory.get(), db, spanner, clock);
pool.initPool();
return pool;
}
private SessionPool(
SessionPoolOptions options,
ExecutorFactory<ScheduledExecutorService> executorFactory,
ScheduledExecutorService executor,
DatabaseId db,
SpannerImpl spanner,
Clock clock) {
this.options = options;
this.executorFactory = executorFactory;
this.executor = executor;
this.db = db;
this.spanner = spanner;
this.clock = clock;
this.poolMaintainer = new PoolMaintainer();
}
private void initPool() {
synchronized (lock) {
poolMaintainer.init();
for (int i = 0; i < options.getMinSessions(); i++) {
createSession();
}
}
}
private boolean isClosed() {
synchronized (lock) {
return closureFuture != null;
}
}
private void handleException(SpannerException e, PooledSession session) {
if (isSessionNotFound(e)) {
invalidateSession(session);
} else {
releaseSession(session);
}
}
private boolean isSessionNotFound(SpannerException e) {
return e.getErrorCode() == ErrorCode.NOT_FOUND && e.getMessage().contains("Session not found");
}
private void invalidateSession(PooledSession session) {
synchronized (lock) {
if (isClosed()) {
return;
}
allSessions.remove(session);
// replenish the pool.
createSession();
}
}
private PooledSession findSessionToKeepAlive(
Queue<PooledSession> queue, Instant keepAliveThreshold) {
Iterator<PooledSession> iterator = queue.iterator();
while (iterator.hasNext()) {
PooledSession session = iterator.next();
if (session.lastUseTime.isBefore(keepAliveThreshold)) {
iterator.remove();
return session;
}
}
return null;
}
/**
* Returns a session to be used for read requests to spanner. It will block if a session is not
* currently available. In case the pool is exhausted and {@link
* SessionPoolOptions#isFailIfPoolExhausted()} has been set, it will throw an exception. Returned
* session must be closed by calling {@link Session#close()}.
*
* <p>Implementation strategy:
*
* <ol>
* <li>If a read session is available, return that.
* <li>Otherwise if a writePreparedSession is available, return that.
* <li>Otherwise if a session can be created, fire a creation request.
* <li>Wait for a session to become available. Note that this can be unblocked either by a
* session being returned to the pool or a new session being created.
* </ol>
*/
Session getReadSession() throws SpannerException {
Span span = Tracing.getTracer().getCurrentSpan();
span.addAnnotation("Acquiring session");
Waiter waiter = null;
PooledSession sess = null;
synchronized (lock) {
if (closureFuture != null) {
span.addAnnotation("Pool has been closed");
throw new IllegalStateException("Pool has been closed");
}
sess = readSessions.poll();
if (sess == null) {
sess = writePreparedSessions.poll();
if (sess == null) {
span.addAnnotation("No session available");
maybeCreateSession();
waiter = new Waiter();
readWaiters.add(waiter);
} else {
span.addAnnotation("Acquired read write session");
}
} else {
span.addAnnotation("Acquired read only session");
}
}
if (waiter != null) {
logger.log(
Level.FINE,
"No session available in the pool. Blocking for one to become available/created");
span.addAnnotation("Waiting for read only session to be available");
sess = waiter.take();
}
sess.markBusy();
incrementNumSessionsInUse();
span.addAnnotation(sessionAnnotation(sess));
return sess;
}
/**
* Returns a session which has been prepared for writes by invoking BeginTransaction rpc. It will
* block if such a session is not currently available.In case the pool is exhausted and {@link
* SessionPoolOptions#isFailIfPoolExhausted()} has been set, it will throw an exception. Returned
* session must closed by invoking {@link Session#close()}.
*
* <p>Implementation strategy:
*
* <ol>
* <li>If a writePreparedSession is available, return that.
* <li>Otherwise if we have an extra session being prepared for write, wait for that.
* <li>Otherwise, if there is a read session available, start preparing that for write and wait.
* <li>Otherwise start creating a new session and wait.
* <li>Wait for write prepared session to become available. This can be unblocked either by the
* session create/prepare request we fired in above request or by a session being released
* to the pool which is then write prepared.
* </ol>
*/
Session getReadWriteSession() {
Span span = Tracing.getTracer().getCurrentSpan();
span.addAnnotation("Acquiring read write session");
Waiter waiter = null;
PooledSession sess = null;
synchronized (lock) {
if (closureFuture != null) {
throw new IllegalStateException("Pool has been closed");
}
sess = writePreparedSessions.poll();
if (sess == null) {
if (numSessionsBeingPrepared <= readWriteWaiters.size()) {
PooledSession readSession = readSessions.poll();
if (readSession != null) {
span.addAnnotation("Acquired read only session. Preparing for read write transaction");
prepareSession(readSession);
} else {
span.addAnnotation("No session available");
maybeCreateSession();
}
}
waiter = new Waiter();
readWriteWaiters.add(waiter);
} else {
span.addAnnotation("Acquired read write session");
}
}
if (waiter != null) {
logger.log(
Level.FINE,
"No session available in the pool. Blocking for one to become available/created");
span.addAnnotation("Waiting for read write session to be available");
sess = waiter.take();
}
sess.markBusy();
incrementNumSessionsInUse();
span.addAnnotation(sessionAnnotation(sess));
return sess;
}
private Annotation sessionAnnotation(Session session) {
AttributeValue sessionId = AttributeValue.stringAttributeValue(session.getName());
return Annotation.fromDescriptionAndAttributes("Using Session",
ImmutableMap.of("sessionId", sessionId));
}
private void incrementNumSessionsInUse() {
synchronized (lock) {
if (maxSessionsInUse < ++numSessionsInUse) {
maxSessionsInUse = numSessionsInUse;
}
}
}
private void maybeCreateSession() {
Span span = Tracing.getTracer().getCurrentSpan();
synchronized (lock) {
if (numWaiters() >= numSessionsBeingCreated) {
if (canCreateSession()) {
span.addAnnotation("Creating session");
createSession();
} else if (options.isFailIfPoolExhausted()) {
span.addAnnotation("Pool exhausted. Failing");
// throw specific exception
throw newSpannerException(
ErrorCode.RESOURCE_EXHAUSTED,
"No session available in the pool. Maximum number of sessions in the pool can be"
+ " overridden by invoking SessionPoolOptions#Builder#setMaxSessions. Client can be made to block"
+ " rather than fail by setting SessionPoolOptions#Builder#setBlockIfPoolExhausted.");
}
}
}
}
/**
* Releases a session back to the pool. This might cause one of the waiters to be unblocked.
*
* <p>Implementation note:
*
* <ol>
* <li>If there are no pending waiters, either add to the read sessions queue or start preparing
* for write depending on what fraction of sessions are already prepared for writes.
* <li>Otherwise either unblock a waiting reader or start preparing for a write. Exact strategy
* on which option we chose, in case there are both waiting readers and writers, is
* implemented in {@link #shouldUnblockReader}
* </ol>
*/
private void releaseSession(PooledSession session) {
Preconditions.checkNotNull(session);
synchronized (lock) {
if (closureFuture != null) {
return;
}
if (readWaiters.size() == 0 && numSessionsBeingPrepared >= readWriteWaiters.size()) {
// No pending waiters
if (shouldPrepareSession()) {
prepareSession(session);
} else {
readSessions.add(session);
}
} else if (shouldUnblockReader()) {
readWaiters.poll().put(session);
} else {
prepareSession(session);
}
}
}
private void handleCreateSessionFailure(SpannerException e) {
synchronized (lock) {
if (readWaiters.size() > 0) {
readWaiters.poll().put(e);
} else if (readWriteWaiters.size() > 0) {
readWriteWaiters.poll().put(e);
}
}
}
private void handlePrepareSessionFailure(SpannerException e, PooledSession session) {
synchronized (lock) {
if (isSessionNotFound(e)) {
invalidateSession(session);
} else if (readWriteWaiters.size() > 0) {
readWriteWaiters.poll().put(e);
} else {
releaseSession(session);
}
}
}
private void decrementPendingClosures() {
pendingClosure--;
if (pendingClosure == 0) {
closureFuture.set(null);
}
}
/**
* Close all the sessions. Once this method is invoked {@link #getReadSession()} and {@link
* #getReadWriteSession()} will start throwing {@code IllegalStateException}. The returned future
* blocks till all the sessions created in this pool have been closed.
*/
ListenableFuture<Void> closeAsync() {
ListenableFuture<Void> retFuture = null;
synchronized (lock) {
if (closureFuture != null) {
throw new IllegalStateException("Close has already been invoked");
}
// Fail all pending waiters.
Waiter waiter = readWaiters.poll();
while (waiter != null) {
waiter.put(newSpannerException(ErrorCode.INTERNAL, "Client has been closed"));
waiter = readWaiters.poll();
}
waiter = readWriteWaiters.poll();
while (waiter != null) {
waiter.put(newSpannerException(ErrorCode.INTERNAL, "Client has been closed"));
waiter = readWriteWaiters.poll();
}
closureFuture = SettableFuture.create();
retFuture = closureFuture;
pendingClosure =
totalSessions() + numSessionsBeingCreated + 1 /* For pool maintenance thread */;
poolMaintainer.close();
readSessions.clear();
writePreparedSessions.clear();
for (final PooledSession session : ImmutableList.copyOf(allSessions)) {
if (session.leakedException != null) {
logger.log(Level.WARNING, "Leaked session", session.leakedException);
}
if (session.state != SessionState.CLOSING) {
closeSessionAsync(session);
}
}
}
retFuture.addListener(
new Runnable() {
@Override
public void run() {
executorFactory.release(executor);
}
},
MoreExecutors.directExecutor());
return retFuture;
}
private boolean shouldUnblockReader() {
// This might not be the best strategy since a continuous burst of read requests can starve
// a write request. Maybe maintain a timestamp in the queue and unblock according to that
// or just flip a weighted coin.
synchronized (lock) {
int numWriteWaiters = readWriteWaiters.size() - numSessionsBeingPrepared;
return readWaiters.size() > numWriteWaiters;
}
}
private boolean shouldPrepareSession() {
synchronized (lock) {
int preparedSessions = writePreparedSessions.size() + numSessionsBeingPrepared;
return preparedSessions < Math.floor(options.getWriteSessionsFraction() * totalSessions());
}
}
private int numWaiters() {
synchronized (lock) {
return readWaiters.size() + readWriteWaiters.size();
}
}
private int totalSessions() {
synchronized (lock) {
return allSessions.size();
}
}
private void closeSessionAsync(final PooledSession sess) {
executor.submit(
new Runnable() {
@Override
public void run() {
closeSession(sess);
}
});
}
private void closeSession(PooledSession sess) {
try {
sess.delegate.close();
} catch (SpannerException e) {
// Backend will delete these sessions after a while even if we fail to close them.
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE, "Failed to close session: " + sess.getName(), e);
}
} finally {
synchronized (lock) {
allSessions.remove(sess);
if (isClosed()) {
decrementPendingClosures();
return;
}
// Create a new session if needed to unblock some waiter.
if (numWaiters() > numSessionsBeingCreated) {
createSession();
}
}
}
}
private void prepareSession(final PooledSession sess) {
synchronized (lock) {
numSessionsBeingPrepared++;
}
executor.submit(
new Runnable() {
@Override
public void run() {
try {
logger.log(Level.FINE, "Preparing session");
sess.prepareReadWriteTransaction();
logger.log(Level.FINE, "Session prepared");
synchronized (lock) {
numSessionsBeingPrepared--;
if (!isClosed()) {
if (readWriteWaiters.size() > 0) {
readWriteWaiters.poll().put(sess);
} else if (readWaiters.size() > 0) {
readWaiters.poll().put(sess);
} else {
writePreparedSessions.add(sess);
}
}
}
} catch (Throwable t) {
synchronized (lock) {
numSessionsBeingPrepared--;
if (!isClosed()) {
handlePrepareSessionFailure(newSpannerException(t), sess);
}
}
}
}
});
}
private boolean canCreateSession() {
synchronized (lock) {
return totalSessions() + numSessionsBeingCreated < options.getMaxSessions();
}
}
private void createSession() {
logger.log(Level.FINE, "Creating session");
synchronized (lock) {
numSessionsBeingCreated++;
executor.submit(
new Runnable() {
@Override
public void run() {
Session session = null;
try {
session = spanner.createSession(db);
logger.log(Level.FINE, "Session created");
} catch (Throwable t) {
// Expose this to customer via a metric.
synchronized (lock) {
numSessionsBeingCreated--;
if (isClosed()) {
decrementPendingClosures();
}
handleCreateSessionFailure(newSpannerException(t));
}
return;
}
boolean closeSession = false;
PooledSession pooledSession = null;
synchronized (lock) {
pooledSession = new PooledSession(session);
numSessionsBeingCreated--;
if (closureFuture != null) {
closeSession = true;
} else {
Preconditions.checkState(totalSessions() <= options.getMaxSessions() - 1);
allSessions.add(pooledSession);
releaseSession(pooledSession);
}
}
if (closeSession) {
closeSession(pooledSession);
}
}
});
}
}
}
| spanner: fix log syntax (#3241)
Fixes #3235. | google-cloud-spanner/src/main/java/com/google/cloud/spanner/SessionPool.java | spanner: fix log syntax (#3241) |
|
Java | apache-2.0 | 61a51b61522b210f38d617c09c1a80c20749b240 | 0 | nedrichards/Fuzzy-Time | /*
* Copyright (C) 2014 The Android Open Source Project
* Copyright (C) 2015 Nick Richards <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nedrichards.fuzzytime;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.wearable.watchface.CanvasWatchFaceService;
import android.support.wearable.watchface.WatchFaceStyle;
import android.text.DynamicLayout;
import android.text.Layout;
import android.text.TextPaint;
import android.text.format.Time;
import android.view.SurfaceHolder;
import android.view.WindowInsets;
//import android.widget.FrameLayout;
//import android.view.LayoutInflater;
//import android.content.res.AssetManager;
import java.lang.ref.WeakReference;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
public class FuzzyTime extends CanvasWatchFaceService {
private static final Typeface NORMAL_TYPEFACE =
Typeface.create(Typeface.SANS_SERIF, Typeface.NORMAL);
// the legacy type
// this currently crashes as it isn't called from an activity - afaik we don't have an activity here. considering.
//AssetManager am = getAssets();
//Typeface fontArvo = Typeface.createFromAsset(getAssets(), "fonts/Arvo-Regular.ttf");
/**
* Update rate in milliseconds for interactive mode.
*/
private static final long INTERACTIVE_UPDATE_RATE_MS = TimeUnit.MINUTES.toMillis(1);
/**
* Handler message id for updating the time periodically in interactive mode.
*/
private static final int MSG_UPDATE_TIME = 0;
@Override
public Engine onCreateEngine() {
return new Engine();
}
private static class EngineHandler extends Handler {
private final WeakReference<FuzzyTime.Engine> mWeakReference;
public EngineHandler(FuzzyTime.Engine reference) {
mWeakReference = new WeakReference<>(reference);
}
@Override
public void handleMessage(Message msg) {
FuzzyTime.Engine engine = mWeakReference.get();
if (engine != null) {
switch (msg.what) {
case MSG_UPDATE_TIME:
engine.handleUpdateTimeMessage();
break;
}
}
}
}
private class Engine extends CanvasWatchFaceService.Engine {
final Handler mUpdateTimeHandler = new EngineHandler(this);
boolean mRegisteredTimeZoneReceiver = false;
Paint mBackgroundPaint;
final TextPaint mTextPaint = new TextPaint();
boolean mAmbient;
Time mTime;
final BroadcastReceiver mTimeZoneReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
mTime.clear(intent.getStringExtra("time-zone"));
mTime.setToNow();
}
};
float mXOffset;
float mYOffset;
/** Width specified when {@link #mLayout} was created. */
int mLayoutWidth;
/** Layout to wrap text onto multiple lines. */
DynamicLayout mLayout;
/**
* Whether the display supports fewer bits for each color in ambient mode. When true, we
* disable anti-aliasing in ambient mode.
*/
boolean mLowBitAmbient;
@Override
public void onCreate(SurfaceHolder holder) {
super.onCreate(holder);
setWatchFaceStyle(new WatchFaceStyle.Builder(FuzzyTime.this)
.setCardPeekMode(WatchFaceStyle.PEEK_MODE_VARIABLE)
.setBackgroundVisibility(WatchFaceStyle.BACKGROUND_VISIBILITY_INTERRUPTIVE)
.setShowSystemUiTime(false)
.build());
Resources resources = FuzzyTime.this.getResources();
mYOffset = resources.getDimension(R.dimen.digital_y_offset);
mBackgroundPaint = new Paint();
mBackgroundPaint.setColor(resources.getColor(R.color.background));
//mTextPaint = new Paint();
mTextPaint.setColor(resources.getColor(R.color.digital_text));
mTextPaint.setTextAlign(Paint.Align.CENTER);
mTextPaint.setTextSize(18);
mTextPaint.setTypeface(NORMAL_TYPEFACE);
mTextPaint.setAntiAlias(true);
mTime = new Time();
}
@Override
public void onDestroy() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
super.onDestroy();
}
/*
private Paint createTextPaint(int textColor) {
Paint paint = new Paint();
paint.setColor(textColor);
paint.setTypeface(NORMAL_TYPEFACE);
paint.setAntiAlias(true);
return paint;
}
*/
@Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
if (visible) {
registerReceiver();
// Update time zone in case it changed while we weren't visible.
mTime.clear(TimeZone.getDefault().getID());
mTime.setToNow();
} else {
unregisterReceiver();
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
private void registerReceiver() {
if (mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = true;
IntentFilter filter = new IntentFilter(Intent.ACTION_TIMEZONE_CHANGED);
FuzzyTime.this.registerReceiver(mTimeZoneReceiver, filter);
}
private void unregisterReceiver() {
if (!mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = false;
FuzzyTime.this.unregisterReceiver(mTimeZoneReceiver);
}
@Override
public void onApplyWindowInsets(WindowInsets insets) {
super.onApplyWindowInsets(insets);
// Load resources that have alternate values for round watches.
Resources resources = FuzzyTime.this.getResources();
boolean isRound = insets.isRound();
mXOffset = resources.getDimension(isRound
? R.dimen.digital_x_offset_round : R.dimen.digital_x_offset);
float textSize = resources.getDimension(isRound
? R.dimen.digital_text_size_round : R.dimen.digital_text_size);
mTextPaint.setTextSize(textSize);
}
@Override
public void onPropertiesChanged(Bundle properties) {
super.onPropertiesChanged(properties);
mLowBitAmbient = properties.getBoolean(PROPERTY_LOW_BIT_AMBIENT, false);
}
@Override
public void onTimeTick() {
super.onTimeTick();
invalidate();
}
@Override
public void onAmbientModeChanged(boolean inAmbientMode) {
super.onAmbientModeChanged(inAmbientMode);
if (mAmbient != inAmbientMode) {
mAmbient = inAmbientMode;
if (mLowBitAmbient) {
mTextPaint.setAntiAlias(!inAmbientMode);
}
invalidate();
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
@Override
public void onDraw(Canvas canvas, Rect bounds) {
// Draw the background.
if (isInAmbientMode()) {
canvas.drawColor(Color.BLACK);
} else {
canvas.drawRect(0, 0, bounds.width(), bounds.height(), mBackgroundPaint);
}
mTime.setToNow();
// time text patterns, Java seems to make selecting these really annoying
String hourText = null;
switch (mTime.hour) {
case 0:
hourText = "midnight";
break;
case 1:
hourText = "one";
break;
case 2:
hourText = "two";
break;
case 3:
hourText = "three";
break;
case 4:
hourText = "four";
break;
case 5:
hourText = "five";
break;
case 6:
hourText = "six";
break;
case 7:
hourText = "seven";
break;
case 8:
hourText = "eight";
break;
case 9:
hourText = "nine";
break;
case 10:
hourText = "ten";
break;
case 11:
hourText = "eleven";
break;
case 12:
hourText = "noon";
break;
case 13:
hourText = "one";
break;
case 14:
hourText = "two";
break;
case 15:
hourText = "three";
break;
case 16:
hourText = "four";
break;
case 17:
hourText = "five";
break;
case 18:
hourText = "six";
break;
case 19:
hourText = "seven";
break;
case 20:
hourText = "eight";
break;
case 21:
hourText = "nine";
break;
case 22:
hourText = "ten";
break;
case 23:
hourText = "eleven";
break;
}
String minuteText = null;
switch (mTime.minute) {
case 58:
case 59:
case 0:
case 1:
case 2:
minuteText = " o'clock";
break;
case 3:
case 4:
case 5:
case 6:
case 7:
minuteText = "five past ";
break;
case 8:
case 9:
case 10:
case 11:
case 12:
minuteText = "ten past ";
break;
case 13:
case 14:
case 15:
case 16:
case 17:
minuteText = "quarter past ";
break;
case 18:
case 19:
case 20:
case 21:
case 22:
minuteText = "twenty past ";
break;
case 23:
case 24:
case 25:
case 26:
case 27:
minuteText = "twentyfive past ";
break;
case 28:
case 29:
case 30:
case 31:
case 32:
minuteText = "half past ";
break;
case 33:
case 34:
case 35:
case 36:
case 37:
minuteText = "twentyfive to ";
break;
case 38:
case 39:
case 40:
case 41:
case 42:
minuteText = "twenty to ";
break;
case 43:
case 44:
case 45:
case 46:
case 47:
minuteText = "quarter to ";
break;
case 48:
case 49:
case 50:
case 51:
case 52:
case 53:
minuteText = "ten to ";
break;
case 54:
case 55:
case 56:
case 57:
minuteText = "five to ";
break;
}
String timeText = null;
switch (mTime.minute) {
case 58:
case 59:
case 0:
case 1:
case 2:
// make sure to treat midday and midnight correctly
if (mTime.hour == 0) {
timeText = hourText;
}
if (mTime.hour == 12) {
timeText = hourText;
} else {
timeText = hourText + minuteText;
}
break;
default:
timeText = minuteText + hourText;
break;
}
/* I'm not treating ambient mode differently, everything is ambient
String text = mAmbient
canvas.drawText(timeText, mXOffset, mYOffset, mTextPaint);
*/
// find the center of the screen in order to translate from the middle of the screen
int width = bounds.width();
int height = bounds.height();
float textWidthFloat = width - mXOffset;
int textWidth;
textWidth = Math.round(textWidthFloat);
float centerX = width / 2f;
float centerY = height / 2f;
//@TODO actually center the text vertically, right margin may not be happening
//Create or update mLayout if necessary.
if (mLayout == null || mLayoutWidth != textWidth) {
mLayoutWidth = textWidth;
mLayout = new DynamicLayout(timeText, mTextPaint, mLayoutWidth,
Layout.Alignment.ALIGN_NORMAL, 1 /* spacingMult */, 0 /* spacingAdd */,
false /* includePad */);
}
canvas.save();
canvas.translate(mXOffset, mYOffset);
mLayout.draw(canvas);
canvas.restore();
/* working dynamic layout that's drawn off the top right edge
DynamicLayout dynamicLayout = new DynamicLayout(timeText, mTextPaint, bounds.width(),
Layout.Alignment.ALIGN_CENTER, 1, 1, true);
dynamicLayout.draw(canvas); */
}
/**
* Starts the {@link #mUpdateTimeHandler} timer if it should be running and isn't currently
* or stops it if it shouldn't be running but currently is.
*/
private void updateTimer() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
if (shouldTimerBeRunning()) {
mUpdateTimeHandler.sendEmptyMessage(MSG_UPDATE_TIME);
}
}
/**
* Returns whether the {@link #mUpdateTimeHandler} timer should be running. The timer should
* only run when we're visible and in interactive mode.
*/
private boolean shouldTimerBeRunning() {
return isVisible() && !isInAmbientMode();
}
/**
* Handle updating the time periodically in interactive mode.
* @TODO is this even happening, time is not updating
*/
private void handleUpdateTimeMessage() {
invalidate();
if (shouldTimerBeRunning()) {
long timeMs = System.currentTimeMillis();
long delayMs = INTERACTIVE_UPDATE_RATE_MS
- (timeMs % INTERACTIVE_UPDATE_RATE_MS);
mUpdateTimeHandler.sendEmptyMessageDelayed(MSG_UPDATE_TIME, delayMs);
}
}
}
}
| wear/src/main/java/com/nedrichards/fuzzytime/FuzzyTime.java | /*
* Copyright (C) 2014 The Android Open Source Project
* Copyright (C) 2015 Nick Richards <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nedrichards.fuzzytime;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.wearable.watchface.CanvasWatchFaceService;
import android.support.wearable.watchface.WatchFaceStyle;
import android.text.DynamicLayout;
import android.text.Layout;
import android.text.TextPaint;
import android.text.format.Time;
import android.view.SurfaceHolder;
import android.view.WindowInsets;
//import android.widget.FrameLayout;
//import android.view.LayoutInflater;
//import android.content.res.AssetManager;
import java.lang.ref.WeakReference;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
public class FuzzyTime extends CanvasWatchFaceService {
private static final Typeface NORMAL_TYPEFACE =
Typeface.create(Typeface.SANS_SERIF, Typeface.NORMAL);
// the legacy type
// this currently crashes as it isn't called from an activity - afaik we don't have an activity here. considering.
//AssetManager am = getAssets();
//Typeface fontArvo = Typeface.createFromAsset(getAssets(), "fonts/Arvo-Regular.ttf");
/**
* Update rate in milliseconds for interactive mode.
*/
private static final long INTERACTIVE_UPDATE_RATE_MS = TimeUnit.MINUTES.toMillis(1);
/**
* Handler message id for updating the time periodically in interactive mode.
*/
private static final int MSG_UPDATE_TIME = 0;
@Override
public Engine onCreateEngine() {
return new Engine();
}
private static class EngineHandler extends Handler {
private final WeakReference<FuzzyTime.Engine> mWeakReference;
public EngineHandler(FuzzyTime.Engine reference) {
mWeakReference = new WeakReference<>(reference);
}
@Override
public void handleMessage(Message msg) {
FuzzyTime.Engine engine = mWeakReference.get();
if (engine != null) {
switch (msg.what) {
case MSG_UPDATE_TIME:
engine.handleUpdateTimeMessage();
break;
}
}
}
}
private class Engine extends CanvasWatchFaceService.Engine {
final Handler mUpdateTimeHandler = new EngineHandler(this);
boolean mRegisteredTimeZoneReceiver = false;
Paint mBackgroundPaint;
final TextPaint mTextPaint = new TextPaint();
boolean mAmbient;
Time mTime;
final BroadcastReceiver mTimeZoneReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
mTime.clear(intent.getStringExtra("time-zone"));
mTime.setToNow();
}
};
float mXOffset;
float mYOffset;
/** Width specified when {@link #mLayout} was created. */
int mLayoutWidth;
/** Layout to wrap text onto multiple lines. */
DynamicLayout mLayout;
/**
* Whether the display supports fewer bits for each color in ambient mode. When true, we
* disable anti-aliasing in ambient mode.
*/
boolean mLowBitAmbient;
@Override
public void onCreate(SurfaceHolder holder) {
super.onCreate(holder);
setWatchFaceStyle(new WatchFaceStyle.Builder(FuzzyTime.this)
.setCardPeekMode(WatchFaceStyle.PEEK_MODE_VARIABLE)
.setBackgroundVisibility(WatchFaceStyle.BACKGROUND_VISIBILITY_INTERRUPTIVE)
.setShowSystemUiTime(false)
.build());
Resources resources = FuzzyTime.this.getResources();
mYOffset = resources.getDimension(R.dimen.digital_y_offset);
mBackgroundPaint = new Paint();
mBackgroundPaint.setColor(resources.getColor(R.color.background));
//mTextPaint = new Paint();
mTextPaint.setColor(resources.getColor(R.color.digital_text));
mTextPaint.setTextAlign(Paint.Align.CENTER);
mTextPaint.setTextSize(18);
mTextPaint.setTypeface(NORMAL_TYPEFACE);
mTextPaint.setAntiAlias(true);
mTime = new Time();
}
@Override
public void onDestroy() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
super.onDestroy();
}
/*
private Paint createTextPaint(int textColor) {
Paint paint = new Paint();
paint.setColor(textColor);
paint.setTypeface(NORMAL_TYPEFACE);
paint.setAntiAlias(true);
return paint;
}
*/
@Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
if (visible) {
registerReceiver();
// Update time zone in case it changed while we weren't visible.
mTime.clear(TimeZone.getDefault().getID());
mTime.setToNow();
} else {
unregisterReceiver();
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
private void registerReceiver() {
if (mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = true;
IntentFilter filter = new IntentFilter(Intent.ACTION_TIMEZONE_CHANGED);
FuzzyTime.this.registerReceiver(mTimeZoneReceiver, filter);
}
private void unregisterReceiver() {
if (!mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = false;
FuzzyTime.this.unregisterReceiver(mTimeZoneReceiver);
}
@Override
public void onApplyWindowInsets(WindowInsets insets) {
super.onApplyWindowInsets(insets);
// Load resources that have alternate values for round watches.
Resources resources = FuzzyTime.this.getResources();
boolean isRound = insets.isRound();
mXOffset = resources.getDimension(isRound
? R.dimen.digital_x_offset_round : R.dimen.digital_x_offset);
float textSize = resources.getDimension(isRound
? R.dimen.digital_text_size_round : R.dimen.digital_text_size);
mTextPaint.setTextSize(textSize);
}
@Override
public void onPropertiesChanged(Bundle properties) {
super.onPropertiesChanged(properties);
mLowBitAmbient = properties.getBoolean(PROPERTY_LOW_BIT_AMBIENT, false);
}
@Override
public void onTimeTick() {
super.onTimeTick();
invalidate();
}
@Override
public void onAmbientModeChanged(boolean inAmbientMode) {
super.onAmbientModeChanged(inAmbientMode);
if (mAmbient != inAmbientMode) {
mAmbient = inAmbientMode;
if (mLowBitAmbient) {
mTextPaint.setAntiAlias(!inAmbientMode);
}
invalidate();
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
@Override
public void onDraw(Canvas canvas, Rect bounds) {
// Draw the background.
if (isInAmbientMode()) {
canvas.drawColor(Color.BLACK);
} else {
canvas.drawRect(0, 0, bounds.width(), bounds.height(), mBackgroundPaint);
}
mTime.setToNow();
// time text patterns, Java seems to make selecting these really annoying
String hourText = null;
switch (mTime.hour) {
case 0:
hourText = "midnight";
break;
case 1:
hourText = "one";
break;
case 2:
hourText = "two";
break;
case 3:
hourText = "three";
break;
case 4:
hourText = "four";
break;
case 5:
hourText = "five";
break;
case 6:
hourText = "six";
break;
case 7:
hourText = "seven";
break;
case 8:
hourText = "eight";
break;
case 9:
hourText = "nine";
break;
case 10:
hourText = "ten";
break;
case 11:
hourText = "eleven";
break;
case 12:
hourText = "noon";
break;
case 13:
hourText = "one";
break;
case 14:
hourText = "two";
break;
case 15:
hourText = "three";
break;
case 16:
hourText = "four";
break;
case 17:
hourText = "five";
break;
case 18:
hourText = "six";
break;
case 19:
hourText = "seven";
break;
case 20:
hourText = "eight";
break;
case 21:
hourText = "nine";
break;
case 22:
hourText = "ten";
break;
case 23:
hourText = "eleven";
break;
}
String minuteText = null;
switch (mTime.minute) {
case 58:
case 59:
case 0:
case 1:
case 2:
minuteText = " o'clock";
break;
case 3:
case 4:
case 5:
case 6:
case 7:
minuteText = "five past ";
break;
case 8:
case 9:
case 10:
case 11:
case 12:
minuteText = "ten past ";
break;
case 13:
case 14:
case 15:
case 16:
case 17:
minuteText = "quarter past ";
break;
case 18:
case 19:
case 20:
case 21:
case 22:
minuteText = "twenty past ";
break;
case 23:
case 24:
case 25:
case 26:
case 27:
minuteText = "twentyfive past ";
break;
case 28:
case 29:
case 30:
case 31:
case 32:
minuteText = "half past ";
break;
case 33:
case 34:
case 35:
case 36:
case 37:
minuteText = "twentyfive to ";
break;
case 38:
case 39:
case 40:
case 41:
case 42:
minuteText = "twenty to ";
break;
case 43:
case 44:
case 45:
case 46:
case 47:
minuteText = "quarter to ";
break;
case 48:
case 49:
case 50:
case 51:
case 52:
case 53:
minuteText = "ten to ";
break;
case 54:
case 55:
case 56:
case 57:
minuteText = "five to ";
break;
}
String timeText = null;
switch (mTime.minute) {
case 58:
case 59:
case 0:
case 1:
case 2:
// make sure to treat midday and midnight correctly
if (mTime.hour == 0) {
timeText = hourText;
}
if (mTime.hour == 12) {
timeText = hourText;
} else {
timeText = hourText + minuteText;
}
break;
default:
timeText = minuteText + hourText;
break;
}
/* I'm not treating ambient mode differently, everything is ambient
String text = mAmbient
canvas.drawText(timeText, mXOffset, mYOffset, mTextPaint);
*/
// find the center of the screen in order to translate from the middle of the screen
int width = bounds.width();
int height = bounds.height();
float textWidthFloat = width - mXOffset;
int textWidth;
textWidth = Math.round(textWidthFloat);
float centerX = width / 2f;
float centerY = height / 2f;
//Create or update mLayout if necessary.
if (mLayout == null || mLayoutWidth != textWidth) {
mLayoutWidth = textWidth;
mLayout = new DynamicLayout(timeText, mTextPaint, mLayoutWidth,
Layout.Alignment.ALIGN_NORMAL, 1 /* spacingMult */, 0 /* spacingAdd */,
false /* includePad */);
}
canvas.save();
canvas.translate(mXOffset, mYOffset);
mLayout.draw(canvas);
canvas.restore();
/* working dynamic layout that's drawn off the top right edge
DynamicLayout dynamicLayout = new DynamicLayout(timeText, mTextPaint, bounds.width(),
Layout.Alignment.ALIGN_CENTER, 1, 1, true);
dynamicLayout.draw(canvas); */
}
/**
* Starts the {@link #mUpdateTimeHandler} timer if it should be running and isn't currently
* or stops it if it shouldn't be running but currently is.
*/
private void updateTimer() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
if (shouldTimerBeRunning()) {
mUpdateTimeHandler.sendEmptyMessage(MSG_UPDATE_TIME);
}
}
/**
* Returns whether the {@link #mUpdateTimeHandler} timer should be running. The timer should
* only run when we're visible and in interactive mode.
*/
private boolean shouldTimerBeRunning() {
return isVisible() && !isInAmbientMode();
}
/**
* Handle updating the time periodically in interactive mode.
*/
private void handleUpdateTimeMessage() {
invalidate();
if (shouldTimerBeRunning()) {
long timeMs = System.currentTimeMillis();
long delayMs = INTERACTIVE_UPDATE_RATE_MS
- (timeMs % INTERACTIVE_UPDATE_RATE_MS);
mUpdateTimeHandler.sendEmptyMessageDelayed(MSG_UPDATE_TIME, delayMs);
}
}
}
}
| add some TODO notes
| wear/src/main/java/com/nedrichards/fuzzytime/FuzzyTime.java | add some TODO notes |
|
Java | apache-2.0 | 063d746e7ec45c8593420b9247a20c311b3275ab | 0 | tkrajina/GraphAnything | package info.puzz.graphanything.services;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.ToneGenerator;
import android.os.SystemClock;
import android.util.Log;
import junit.framework.Assert;
import java.sql.Timestamp;
import java.util.concurrent.TimeUnit;
import info.puzz.graphanything.broadcast.TimerSoundPlayer;
import info.puzz.graphanything.dao.DAO;
import info.puzz.graphanything.models2.Graph;
import info.puzz.graphanything.models2.GraphColumn;
import info.puzz.graphanything.models2.enums.GraphUnitType;
import info.puzz.graphanything.utils.TimeUtils;
public final class GraphAlarms {
private static final String TAG = GraphAlarms.class.getSimpleName();
public static final String FINAL = "final";
private static final String GRAPH_ID = "gr_id";
private GraphAlarms() throws Exception {
throw new Exception();
}
public static void resetNextTimerAlarm(Context context, Graph graph) {
DAO dao = new DAO(context).open();
GraphColumn column = dao.getColumnsByColumnNo(graph._id).get(0);
if (!graph.isTimerActive()) {
Log.i(TAG, "Timer not active");
return;
}
if (graph.isPaused()) {
Log.i(TAG, "Graph paused");
return;
}
if (column.getGraphUnitType() != GraphUnitType.TIMER) {
return;
}
Log.i(TAG, "Started " + TimeUtils.YYYYMMDDHHMMSS_FORMATTER.format(new Timestamp(graph.timerStarted)));
long elapsedTimeOnTimerStart = SystemClock.elapsedRealtime() - (System.currentTimeMillis() - graph.timerStarted);
AlarmManager alarmMgr = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
if (graph.finalTimerSound > 0) {
long time = elapsedTimeOnTimerStart + TimeUnit.MINUTES.toMillis(graph.finalTimerSound);
if (time > SystemClock.elapsedRealtime()) {
Intent finalAlarmIntent = new Intent(context, TimerSoundPlayer.class)
.putExtra(GRAPH_ID, graph._id)
.putExtra(FINAL, true);
PendingIntent alarmIntent = PendingIntent.getBroadcast(context, 0, finalAlarmIntent, 0);
alarmMgr.cancel(alarmIntent);
alarmMgr.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, time, alarmIntent);
}
}
if (graph.reminderTimerSound > 0) {
for (int minutes = 0; minutes < Math.min(60, graph.finalTimerSound - 1); minutes += graph.reminderTimerSound) {
long time = elapsedTimeOnTimerStart + TimeUnit.MINUTES.toMillis(minutes);
if (time > SystemClock.elapsedRealtime()) {
Log.i(TAG, String.format("Alarm in %d minutes", TimeUnit.MILLISECONDS.toMinutes(time - SystemClock.elapsedRealtime())));
Intent intent = new Intent(context, TimerSoundPlayer.class)
.putExtra(GRAPH_ID, graph._id)
.putExtra(FINAL, false);
PendingIntent alarmIntent = PendingIntent.getBroadcast(context, 0, intent, 0);
alarmMgr.cancel(alarmIntent);
alarmMgr.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, time, alarmIntent);
return;
}
}
}
}
public static void alarm(Context context, Intent intent) {
DAO dao = new DAO(context).open();
boolean isFinal = intent.getBooleanExtra(FINAL, false);
Long graphID = intent.getLongExtra(GRAPH_ID, 0);
Assert.assertNotNull(graphID);
Assert.assertTrue(graphID.longValue() > 0);
Graph graph = dao.loadGraph(graphID);
Assert.assertNotNull(graph);
if (!graph.isTimerActive()) {
Log.i(TAG, "Timer not active");
return;
}
if (graph.isPaused()) {
Log.i(TAG, "Graph paused");
return;
}
Log.i(TAG, (isFinal ? "final" : "nonfinal") + " alarm " + TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - graph.timerStarted) + "s after start");
if (isFinal) {
finalAlarm();
} else {
intermediateAlarm();
}
resetNextTimerAlarm(context, graph);
}
public static void intermediateAlarm() {
new ToneGenerator(AudioManager.STREAM_ALARM, 50).startTone(ToneGenerator.TONE_CDMA_ALERT_CALL_GUARD, 50);
}
public static void finalAlarm() {
new ToneGenerator(AudioManager.STREAM_ALARM, 80).startTone(ToneGenerator.TONE_CDMA_ALERT_CALL_GUARD, 400);
}
}
| graphanything/src/main/java/info/puzz/graphanything/services/GraphAlarms.java | package info.puzz.graphanything.services;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.ToneGenerator;
import android.os.SystemClock;
import android.util.Log;
import junit.framework.Assert;
import java.util.concurrent.TimeUnit;
import info.puzz.graphanything.broadcast.TimerSoundPlayer;
import info.puzz.graphanything.dao.DAO;
import info.puzz.graphanything.models2.Graph;
import info.puzz.graphanything.models2.GraphColumn;
import info.puzz.graphanything.models2.enums.GraphUnitType;
public final class GraphAlarms {
private static final String TAG = GraphAlarms.class.getSimpleName();
public static final String FINAL = "final";
private static final String GRAPH_ID = "gr_id";
private GraphAlarms() throws Exception {
throw new Exception();
}
public static void resetNextTimerAlarm(Context context, Graph graph) {
DAO dao = new DAO(context).open();
GraphColumn column = dao.getColumnsByColumnNo(graph._id).get(0);
if (!graph.isTimerActive()) {
Log.i(TAG, "Timer not active");
return;
}
if (graph.isPaused()) {
Log.i(TAG, "Graph paused");
return;
}
if (column.getGraphUnitType() != GraphUnitType.TIMER) {
return;
}
Log.i(TAG, TimeUnit.MILLISECONDS.toMinutes(System.currentTimeMillis() - graph.timerStarted) + " from timer start");
long elapsedTimeOnTimerStart = SystemClock.elapsedRealtime() - (System.currentTimeMillis() - graph.timerStarted);
AlarmManager alarmMgr = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
if (graph.finalTimerSound > 0) {
long time = elapsedTimeOnTimerStart + TimeUnit.MINUTES.toMillis(graph.finalTimerSound);
if (time > SystemClock.elapsedRealtime()) {
Intent finalAlarmIntent = new Intent(context, TimerSoundPlayer.class);
finalAlarmIntent.putExtra(GRAPH_ID, graph._id);
finalAlarmIntent.putExtra(FINAL, true);
PendingIntent alarmIntent = PendingIntent.getBroadcast(context, 0, finalAlarmIntent, 0);
alarmMgr.cancel(alarmIntent);
alarmMgr.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, time, alarmIntent);
}
}
if (graph.reminderTimerSound > 0) {
for (int minutes = 1; minutes < Math.min(60, graph.finalTimerSound - 1); minutes += graph.reminderTimerSound) {
long time = elapsedTimeOnTimerStart + TimeUnit.MINUTES.toMillis(minutes);
if (time > SystemClock.elapsedRealtime()) {
Log.i(TAG, String.format("Alarm in %d minutes", TimeUnit.MILLISECONDS.toMinutes(time - SystemClock.elapsedRealtime())));
Intent intent = new Intent(context, TimerSoundPlayer.class);
intent.putExtra(GRAPH_ID, graph._id);
intent.putExtra(FINAL, false);
PendingIntent alarmIntent = PendingIntent.getBroadcast(context, 0, intent, 0);
alarmMgr.cancel(alarmIntent);
alarmMgr.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, time, alarmIntent);
return;
}
}
}
}
public static void alarm(Context context, Intent intent) {
DAO dao = new DAO(context).open();
boolean isFinal = intent.getBooleanExtra(FINAL, false);
Long graphID = intent.getLongExtra(GRAPH_ID, 0);
Assert.assertNotNull(graphID);
Assert.assertTrue(graphID.longValue() > 0);
Graph graph = dao.loadGraph(graphID);
Assert.assertNotNull(graph);
if (!graph.isTimerActive()) {
Log.i(TAG, "Timer not active");
return;
}
if (graph.isPaused()) {
Log.i(TAG, "Graph paused");
return;
}
if (isFinal) {
new ToneGenerator(AudioManager.STREAM_ALARM, 150).startTone(ToneGenerator.TONE_CDMA_ALERT_CALL_GUARD, 2000);
} else {
new ToneGenerator(AudioManager.STREAM_ALARM, 50).startTone(ToneGenerator.TONE_CDMA_ALERT_CALL_GUARD, 200);
}
resetNextTimerAlarm(context, graph);
}
}
| Timer sound volume
| graphanything/src/main/java/info/puzz/graphanything/services/GraphAlarms.java | Timer sound volume |
|
Java | apache-2.0 | 4ef28bdd5fca6ee6b4db6c6cd98b9c5c7c38a8c7 | 0 | Murdock01/izpack,tomas-forsman/izpack,rkrell/izpack,akuhtz/izpack,stenix71/izpack,izpack/izpack,Helpstone/izpack,Helpstone/izpack,codehaus/izpack,optotronic/izpack,yukron/izpack,Helpstone/izpack,maichler/izpack,izpack/izpack,yukron/izpack,akuhtz/izpack,rsharipov/izpack,tomas-forsman/izpack,Murdock01/izpack,Murdock01/izpack,akuhtz/izpack,optotronic/izpack,Helpstone/izpack,mtjandra/izpack,bradcfisher/izpack,akuhtz/izpack,optotronic/izpack,izpack/izpack,tomas-forsman/izpack,stenix71/izpack,rsharipov/izpack,bradcfisher/izpack,stenix71/izpack,codehaus/izpack,rkrell/izpack,akuhtz/izpack,Murdock01/izpack,codehaus/izpack,tomas-forsman/izpack,maichler/izpack,rkrell/izpack,maichler/izpack,codehaus/izpack,bradcfisher/izpack,stenix71/izpack,Helpstone/izpack,maichler/izpack,codehaus/izpack,maichler/izpack,yukron/izpack,optotronic/izpack,codehaus/izpack,rsharipov/izpack,Helpstone/izpack,yukron/izpack,stenix71/izpack,rkrell/izpack,izpack/izpack,mtjandra/izpack,yukron/izpack,rsharipov/izpack,codehaus/izpack,rsharipov/izpack,mtjandra/izpack,Murdock01/izpack,mtjandra/izpack,izpack/izpack,mtjandra/izpack,bradcfisher/izpack,bradcfisher/izpack,bradcfisher/izpack,optotronic/izpack,optotronic/izpack,Helpstone/izpack,rkrell/izpack,stenix71/izpack,rkrell/izpack,optotronic/izpack,rsharipov/izpack,izpack/izpack,mtjandra/izpack,Murdock01/izpack,tomas-forsman/izpack,rkrell/izpack,tomas-forsman/izpack,maichler/izpack,yukron/izpack,izpack/izpack,tomas-forsman/izpack,akuhtz/izpack,maichler/izpack,rsharipov/izpack,bradcfisher/izpack,yukron/izpack,Murdock01/izpack,mtjandra/izpack,stenix71/izpack,akuhtz/izpack | /*
* $Id$
* IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/
* http://izpack.codehaus.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.izforge.izpack.installer;
import com.izforge.izpack.data.GUIPrefs;
import com.izforge.izpack.data.LocaleDatabase;
import com.izforge.izpack.data.ResourceManager;
import com.izforge.izpack.gui.ButtonFactory;
import com.izforge.izpack.gui.LabelFactory;
import com.izforge.izpack.util.Debug;
import com.izforge.izpack.util.FileExecutor;
import com.izforge.izpack.util.OsVersion;
import com.izforge.izpack.util.VariableSubstitutor;
import javax.swing.*;
import javax.swing.plaf.metal.MetalLookAndFeel;
import javax.swing.plaf.metal.MetalTheme;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.lang.reflect.Method;
import java.util.*;
import java.util.List;
/**
* The IzPack graphical installer class.
*
* @author Julien Ponge
*/
public class GUIInstaller extends InstallerBase {
/**
* The installation data.
*/
private InstallData installdata;
/**
* The L&F.
*/
protected String lnf;
/**
* defined modifier for language display type.
*/
private static final String[] LANGUAGE_DISPLAY_TYPES = {"iso3", "native", "default"};
private static final String[][] LANG_CODES = {{"cat", "ca"}, {"chn", "zh"}, {"cze", "cs"},
{"dan", "da"}, {"deu", "de"}, {"eng", "en"}, {"fin", "fi"}, {"fra", "fr"},
{"hun", "hu"}, {"ita", "it"}, {"jpn", "ja"}, {"mys", "ms"}, {"ned", "nl"},
{"nor", "no"}, {"pol", "pl"}, {"por", "pt"}, {"rom", "or"}, {"rus", "ru"},
{"spa", "es"}, {"svk", "sk"}, {"swe", "sv"}, {"tur", "tr"}, {"ukr", "uk"}};
/**
* holds language to ISO-3 language code translation
*/
private static HashMap isoTable;
/**
* The constructor.
*
* @throws Exception Description of the Exception
*/
public GUIInstaller() throws Exception {
try {
init();
} catch (Exception e) {
showFatalError(e);
throw e;
} catch (Error e) {
showFatalError(e);
throw e;
}
}
private void showFatalError(Throwable e) {
try {
JOptionPane.showMessageDialog(null, "Error: " + e.toString(), "Error", JOptionPane.ERROR_MESSAGE);
} catch (Exception e2) {
e2.printStackTrace();
}
}
private void init() throws Exception {
this.installdata = new InstallData();
// Loads the installation data
loadInstallData(installdata);
// add the GUI install data
loadGUIInstallData();
// Sets up the GUI L&F
loadLookAndFeel();
// Checks the Java version
checkJavaVersion();
checkJDKAvailable();
// Check for already running instance
checkLockFile();
// Loads the suitable langpack
SwingUtilities.invokeAndWait(new Runnable() {
public void run() {
try {
loadLangPack();
}
catch (Exception e) {
e.printStackTrace();
}
}
});
// create the resource manager (after the language selection!)
ResourceManager.create(this.installdata);
// load conditions
loadConditions(installdata);
// loads installer conditions
loadInstallerRequirements();
// load dynamic variables
loadDynamicVariables();
// check installer conditions
if (!checkInstallerRequirements(installdata)) {
Debug.log("not all installerconditions are fulfilled.");
System.exit(-1);
return;
}
// Load custom langpack if exist.
addCustomLangpack(installdata);
// We launch the installer GUI
SwingUtilities.invokeLater(new Runnable() {
public void run() {
try {
loadGUI();
}
catch (Exception e) {
e.printStackTrace();
}
}
});
}
public void showMissingRequirementMessage(String message) {
JOptionPane.showMessageDialog(null, message);
}
/**
* Load GUI preference information.
*
* @throws Exception
*/
public void loadGUIInstallData() throws Exception {
InputStream in = GUIInstaller.class.getResourceAsStream("/GUIPrefs");
ObjectInputStream objIn = new ObjectInputStream(in);
this.installdata.guiPrefs = (GUIPrefs) objIn.readObject();
objIn.close();
}
/**
* Sets a lock file. Not using java.nio.channels.FileLock to prevent
* the installer from accidentally keeping a lock on a file if the install
* fails or is killed.
*
* @throws Exception Description of the Exception
*/
private void checkLockFile() throws Exception {
String tempDir = System.getProperty("java.io.tmpdir");
String appName = this.installdata.getInfo().getAppName();
String fileName = "iz-" + appName + ".tmp";
Debug.trace("Making temp file: " + fileName);
Debug.trace("In temp directory: " + tempDir);
File file = new File(tempDir, fileName);
if (file.exists()) {
// Ask user if they want to proceed.
Debug.trace("Lock File Exists, asking user for permission to proceed.");
StringBuffer msg = new StringBuffer();
msg.append("<html>");
msg.append("The " + appName + " installer you are attempting to run seems to have a copy already running.<br><br>");
msg.append("This could be from a previous failed installation attempt or you may have accidentally launched <br>");
msg.append("the installer twice. <b>The recommended action is to select 'Exit'</b> and wait for the other copy of <br>");
msg.append("the installer to start. If you are sure there is no other copy of the installer running, click <br>");
msg.append("the 'Continue' button to allow this installer to run. <br><br>");
msg.append("Are you sure you want to continue with this installation?");
msg.append("</html>");
JLabel label = new JLabel(msg.toString());
label.setFont(new Font("Sans Serif", Font.PLAIN, 12));
Object[] optionValues = {"Continue", "Exit"};
int selectedOption = JOptionPane.showOptionDialog(null, label, "Warning",
JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE, null, optionValues,
optionValues[1]);
Debug.trace("Selected option: " + selectedOption);
if (selectedOption == 0) {
// Take control of the file so it gets deleted after this installer instance exits.
Debug.trace("Setting temp file to delete on exit");
file.deleteOnExit();
} else {
// Leave the file as it is.
Debug.trace("Leaving temp file alone and exiting");
System.exit(1);
}
} else {
try {
// Create the new lock file
if (file.createNewFile()) {
Debug.trace("Temp file created");
file.deleteOnExit();
} else {
Debug.trace("Temp file could not be created");
Debug.trace("*** Multiple instances of installer will be allowed ***");
}
}
catch (Exception e) {
Debug.trace("Temp file could not be created: " + e);
Debug.trace("*** Multiple instances of installer will be allowed ***");
}
}
}
/**
* Checks the Java version.
*
* @throws Exception Description of the Exception
*/
private void checkJavaVersion() throws Exception {
String version = System.getProperty("java.version");
String required = this.installdata.getInfo().getJavaVersion();
if (version.compareTo(required) < 0) {
StringBuffer msg = new StringBuffer();
msg.append("The application that you are trying to install requires a ");
msg.append(required);
msg.append(" version or later of the Java platform.\n");
msg.append("You are running a ");
msg.append(version);
msg.append(" version of the Java platform.\n");
msg.append("Please upgrade to a newer version.");
System.out.println(msg.toString());
JOptionPane.showMessageDialog(null, msg.toString(), "Error", JOptionPane.ERROR_MESSAGE);
System.exit(1);
}
}
/**
* Checks if a JDK is available.
*/
private void checkJDKAvailable() {
if (!this.installdata.getInfo().isJdkRequired()) {
return;
}
FileExecutor exec = new FileExecutor();
String[] output = new String[2];
String[] params = {"javac", "-help"};
if (exec.executeCommand(params, output) != 0) {
String[] message = {
"It looks like your system does not have a Java Development Kit (JDK) available.",
"The software that you plan to install requires a JDK for both its installation and execution.",
"\n",
"Do you still want to proceed with the installation process?"
};
int status = JOptionPane.showConfirmDialog(null, message, "Warning", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE);
if (status == JOptionPane.NO_OPTION) {
System.exit(1);
}
}
}
/**
* Loads the suitable langpack.
*
* @throws Exception Description of the Exception
*/
private void loadLangPack() throws Exception {
// Initialisations
List availableLangPacks = getAvailableLangPacks();
int npacks = availableLangPacks.size();
if (npacks == 0) {
throw new Exception("no language pack available");
}
String selectedPack;
// Dummy Frame
JFrame frame = new JFrame();
frame.setIconImage(new ImageIcon(this.getClass().getResource("/img/JFrameIcon.png"))
.getImage());
Dimension frameSize = frame.getSize();
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
frame.setLocation((screenSize.width - frameSize.width) / 2,
(screenSize.height - frameSize.height) / 2 - 10);
// We get the langpack name
if (npacks != 1) {
LanguageDialog picker = new LanguageDialog(frame, availableLangPacks.toArray());
picker.setSelection(Locale.getDefault().getISO3Language().toLowerCase());
picker.setModal(true);
picker.toFront();
// frame.setVisible(true);
frame.setVisible(false);
picker.setVisible(true);
selectedPack = (String) picker.getSelection();
if (selectedPack == null) {
throw new Exception("installation canceled");
}
} else {
selectedPack = (String) availableLangPacks.get(0);
}
// We add an xml data information
this.installdata.getXmlData().setAttribute("langpack", selectedPack);
// We load the langpack
installdata.setLocaleISO3(selectedPack);
installdata.setVariable(ScriptParser.ISO3_LANG, installdata.getLocaleISO3());
InputStream in = getClass().getResourceAsStream("/langpacks/" + selectedPack + ".xml");
this.installdata.setLangpack(new LocaleDatabase(in));
}
/**
* Loads the suitable L&F.
*
* @throws Exception Description of the Exception
*/
protected void loadLookAndFeel() throws Exception {
// Do we have any preference for this OS ?
String syskey = "unix";
if (OsVersion.IS_WINDOWS) {
syskey = "windows";
} else if (OsVersion.IS_OSX) {
syskey = "mac";
}
String laf = null;
if (installdata.guiPrefs.lookAndFeelMapping.containsKey(syskey)) {
laf = installdata.guiPrefs.lookAndFeelMapping.get(syskey);
}
// Let's use the system LAF
// Resolve whether button icons should be used or not.
boolean useButtonIcons = true;
if (installdata.guiPrefs.modifier.containsKey("useButtonIcons")
&& "no".equalsIgnoreCase(installdata.guiPrefs.modifier
.get("useButtonIcons"))) {
useButtonIcons = false;
}
ButtonFactory.useButtonIcons(useButtonIcons);
boolean useLabelIcons = true;
if (installdata.guiPrefs.modifier.containsKey("useLabelIcons")
&& "no".equalsIgnoreCase(installdata.guiPrefs.modifier
.get("useLabelIcons"))) {
useLabelIcons = false;
}
LabelFactory.setUseLabelIcons(useLabelIcons);
if (installdata.guiPrefs.modifier.containsKey("labelFontSize")) { //'labelFontSize' modifier found in 'guiprefs'
final String valStr =
installdata.guiPrefs.modifier.get("labelFontSize");
try { //parse value and enter as label-font-size multiplier:
LabelFactory.setLabelFontSize(Float.parseFloat(valStr));
}
catch (NumberFormatException ex) { //error parsing value; log message
Debug.log("Error parsing guiprefs 'labelFontSize' value (" +
valStr + ')');
}
}
if (laf == null) {
if (!"mac".equals(syskey)) {
// In Linux we will use the English locale, because of a bug in
// JRE6. In Korean, Persian, Chinese, japanese and some other
// locales the installer throws and exception and doesn't load
// at all. See http://jira.jboss.com/jira/browse/JBINSTALL-232.
// This is a workaround until this bug gets fixed.
if ("unix".equals(syskey)) {
Locale.setDefault(Locale.ENGLISH);
}
String syslaf = UIManager.getSystemLookAndFeelClassName();
UIManager.setLookAndFeel(syslaf);
if (UIManager.getLookAndFeel() instanceof MetalLookAndFeel) {
ButtonFactory.useButtonIcons(useButtonIcons);
}
}
lnf = "swing";
return;
}
// Kunststoff (http://www.incors.org/)
if ("kunststoff".equals(laf)) {
ButtonFactory.useHighlightButtons();
// Reset the use button icons state because useHighlightButtons
// make it always true.
ButtonFactory.useButtonIcons(useButtonIcons);
installdata.buttonsHColor = new Color(255, 255, 255);
Class<LookAndFeel> lafClass = (Class<LookAndFeel>) Class.forName("com.incors.plaf.kunststoff.KunststoffLookAndFeel");
Class mtheme = Class.forName("javax.swing.plaf.metal.MetalTheme");
Class[] params = {mtheme};
Class<MetalTheme> theme = (Class<MetalTheme>) Class.forName("com.izforge.izpack.gui.IzPackKMetalTheme");
Method setCurrentThemeMethod = lafClass.getMethod("setCurrentTheme", params);
// We invoke and place Kunststoff as our L&F
LookAndFeel kunststoff = lafClass.newInstance();
MetalTheme ktheme = theme.newInstance();
Object[] kparams = {ktheme};
UIManager.setLookAndFeel(kunststoff);
setCurrentThemeMethod.invoke(kunststoff, kparams);
lnf = "kunststoff";
return;
}
// Liquid (http://liquidlnf.sourceforge.net/)
if ("liquid".equals(laf)) {
UIManager.setLookAndFeel("com.birosoft.liquid.LiquidLookAndFeel");
lnf = "liquid";
Map<String, String> params = installdata.guiPrefs.lookAndFeelParams.get(laf);
if (params.containsKey("decorate.frames")) {
String value = params.get("decorate.frames");
if ("yes".equals(value)) {
JFrame.setDefaultLookAndFeelDecorated(true);
}
}
if (params.containsKey("decorate.dialogs")) {
String value = params.get("decorate.dialogs");
if ("yes".equals(value)) {
JDialog.setDefaultLookAndFeelDecorated(true);
}
}
return;
}
// Metouia (http://mlf.sourceforge.net/)
if ("metouia".equals(laf)) {
UIManager.setLookAndFeel("net.sourceforge.mlf.metouia.MetouiaLookAndFeel");
lnf = "metouia";
return;
}
// Nimbus (http://nimbus.dev.java.net/)
if ("nimbus".equals(laf)) {
UIManager.setLookAndFeel("org.jdesktop.swingx.plaf.nimbus.NimbusLookAndFeel");
return;
}
// JGoodies Looks (http://looks.dev.java.net/)
if ("looks".equals(laf)) {
Map<String, String> variants = new TreeMap<String, String>();
variants.put("windows", "com.jgoodies.looks.windows.WindowsLookAndFeel");
variants.put("plastic", "com.jgoodies.looks.plastic.PlasticLookAndFeel");
variants.put("plastic3D", "com.jgoodies.looks.plastic.Plastic3DLookAndFeel");
variants.put("plasticXP", "com.jgoodies.looks.plastic.Plastic3DLookAndFeel");
String variant = variants.get("plasticXP");
Map<String, String> params = installdata.guiPrefs.lookAndFeelParams.get(laf);
if (params.containsKey("variant")) {
String param = params.get("variant");
if (variants.containsKey(param)) {
variant = variants.get(param);
}
}
UIManager.setLookAndFeel(variant);
return;
}
// Substance (http://substance.dev.java.net/)
if ("substance".equals(laf)) {
Map<String, String> variants = new TreeMap<String, String>();
variants.put("default", "org.jvnet.substance.SubstanceLookAndFeel"); // Ugly!!!
variants.put("business", "org.jvnet.substance.skin.SubstanceBusinessLookAndFeel");
variants.put("business-blue", "org.jvnet.substance.skin.SubstanceBusinessBlueSteelLookAndFeel");
variants.put("business-black", "org.jvnet.substance.skin.SubstanceBusinessBlackSteelLookAndFeel");
variants.put("creme", "org.jvnet.substance.skin.SubstanceCremeLookAndFeel");
variants.put("sahara", "org.jvnet.substance.skin.SubstanceSaharaLookAndFeel");
variants.put("moderate", "org.jvnet.substance.skin.SubstanceModerateLookAndFeel");
variants.put("officesilver", "org.jvnet.substance.skin.SubstanceOfficeSilver2007LookAndFeel");
String variant = variants.get("default");
Map<String, String> params = installdata.guiPrefs.lookAndFeelParams.get(laf);
if (params.containsKey("variant")) {
String param = params.get("variant");
if (variants.containsKey(param)) {
variant = variants.get(param);
}
}
UIManager.setLookAndFeel(variant);
}
}
private String getTitle() {
// Use a alternate message if defined.
final String key = "installer.reversetitle";
String message = installdata.getLangpack().getString(key);
// message equal to key -> no message defined.
if (message.indexOf(key) > -1) {
return installdata.getLangpack().getString("installer.title")
+ installdata.getInfo().getAppName();
} else { // Attention! The alternate message has to contain the whole message including
// $APP_NAME and may be $APP_VER.
VariableSubstitutor vs = new VariableSubstitutor(installdata.getVariables());
return vs.substitute(message, null);
}
}
/**
* Loads the GUI.
*
* @throws Exception Description of the Exception
*/
private void loadGUI() throws Exception {
UIManager.put("OptionPane.yesButtonText", installdata.getLangpack().getString("installer.yes"));
UIManager.put("OptionPane.noButtonText", installdata.getLangpack().getString("installer.no"));
UIManager.put("OptionPane.cancelButtonText", installdata.getLangpack()
.getString("installer.cancel"));
String title = getTitle();
new InstallerFrame(title, this.installdata, this);
}
/**
* Returns whether flags should be used in the language selection dialog or not.
*
* @return whether flags should be used in the language selection dialog or not
*/
protected boolean useFlags() {
if (installdata.guiPrefs.modifier.containsKey("useFlags")
&& "no".equalsIgnoreCase(installdata.guiPrefs.modifier.get("useFlags"))) {
return (false);
}
return (true);
}
/**
* Returns the type in which the language should be displayed in the language selction dialog.
* Possible are "iso3", "native" and "usingDefault".
*
* @return language display type
*/
protected String getLangType() {
if (installdata.guiPrefs.modifier.containsKey("langDisplayType")) {
String val = installdata.guiPrefs.modifier.get("langDisplayType");
val = val.toLowerCase();
// Verify that the value is valid, else return the default.
for (String aLANGUAGE_DISPLAY_TYPES : LANGUAGE_DISPLAY_TYPES) {
if (val.equalsIgnoreCase(aLANGUAGE_DISPLAY_TYPES)) {
return (val);
}
}
Debug.trace("Value for language display type not valid; value: " + val);
}
return (LANGUAGE_DISPLAY_TYPES[0]);
}
/**
* Used to prompt the user for the language. Languages can be displayed in iso3 or the native
* notation or the notation of the default locale. Revising to native notation is based on code
* from Christian Murphy (patch #395).
*
* @author Julien Ponge
* @author Christian Murphy
* @author Klaus Bartz
*/
private final class LanguageDialog extends JDialog implements ActionListener {
private static final long serialVersionUID = 3256443616359887667L;
/**
* The combo box.
*/
private JComboBox comboBox;
/**
* The ISO3 to ISO2 HashMap
*/
private HashMap<String, String> iso3Toiso2 = null;
/**
* iso3Toiso2 expanded ?
*/
private boolean isoMapExpanded = false;
/**
* The constructor.
*
* @param items The items to display in the box.
*/
public LanguageDialog(JFrame frame, Object[] items) {
super(frame);
try {
loadLookAndFeel();
}
catch (Exception err) {
err.printStackTrace();
}
// We build the GUI
addWindowListener(new WindowHandler());
JPanel contentPane = (JPanel) getContentPane();
setTitle("Language Selection");
GridBagLayout layout = new GridBagLayout();
contentPane.setLayout(layout);
GridBagConstraints gbConstraints = new GridBagConstraints();
gbConstraints.anchor = GridBagConstraints.CENTER;
gbConstraints.insets = new Insets(5, 5, 5, 5);
gbConstraints.fill = GridBagConstraints.HORIZONTAL;
gbConstraints.gridx = 0;
gbConstraints.weightx = 1.0;
gbConstraints.weighty = 1.0;
gbConstraints.ipadx = 0;
gbConstraints.ipady = 6;
ImageIcon img = getImage();
JLabel imgLabel = new JLabel(img);
gbConstraints.gridy = 0;
contentPane.add(imgLabel);
String firstMessage = "Please select your language";
if (getLangType().equals(LANGUAGE_DISPLAY_TYPES[0]))
// iso3
{
firstMessage = "Please select your language below";
}
JLabel label1 = new JLabel(firstMessage, SwingConstants.LEADING);
gbConstraints.gridy = 1;
gbConstraints.insets = new Insets(15, 5, 5, 5);
layout.addLayoutComponent(label1, gbConstraints);
contentPane.add(label1);
gbConstraints.insets = new Insets(5, 5, 5, 5);
items = reviseItems(items);
comboBox = new JComboBox(items);
if (useFlags()) {
comboBox.setRenderer(new FlagRenderer());
}
gbConstraints.gridy = 3;
layout.addLayoutComponent(comboBox, gbConstraints);
contentPane.add(comboBox);
gbConstraints.insets = new Insets(15, 5, 15, 5);
JButton okButton = new JButton("OK");
okButton.addActionListener(this);
gbConstraints.fill = GridBagConstraints.NONE;
gbConstraints.gridy = 4;
gbConstraints.anchor = GridBagConstraints.CENTER;
layout.addLayoutComponent(okButton, gbConstraints);
contentPane.add(okButton);
getRootPane().setDefaultButton(okButton);
// Packs and centers
// Fix for bug "Installer won't show anything on OSX"
if (System.getProperty("mrj.version") == null) {
pack();
}
setSize(getPreferredSize());
Dimension frameSize = getSize();
Point center = GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint();
setLocation(center.x - frameSize.width / 2, center.y - frameSize.height / 2 - 10);
setResizable(true);
}
/**
* Revises iso3 language items depending on the language display type.
*
* @param items item array to be revised
* @return the revised array
*/
private Object[] reviseItems(Object[] items) {
String langType = getLangType();
// iso3: nothing todo.
if (langType.equals(LANGUAGE_DISPLAY_TYPES[0])) {
return (items);
}
// native: get the names as they are written in that language.
if (langType.equals(LANGUAGE_DISPLAY_TYPES[1])) {
return (expandItems(items, (new JComboBox()).getFont()));
}
// default: get the names as they are written in the default
// language.
if (langType.equals(LANGUAGE_DISPLAY_TYPES[2])) {
return (expandItems(items, null));
}
// Should never be.
return (items);
}
/**
* Expands the given iso3 codes to language names. If a testFont is given, the codes are
* tested whether they can displayed or not. If not, or no font given, the language name
* will be returned as written in the default language of this VM.
*
* @param items item array to be expanded to the language name
* @param testFont font to test wheter a name is displayable
* @return aray of expanded items
*/
private Object[] expandItems(Object[] items, Font testFont) {
int i;
if (iso3Toiso2 == null) { // Loasd predefined langs into HashMap.
iso3Toiso2 = new HashMap<String, String>(32);
isoTable = new HashMap();
for (i = 0; i < LANG_CODES.length; ++i) {
iso3Toiso2.put(LANG_CODES[i][0], LANG_CODES[i][1]);
}
}
for (i = 0; i < items.length; i++) {
Object it = expandItem(items[i], testFont);
isoTable.put(it, items[i]);
items[i] = it;
}
return items;
}
/**
* Expands the given iso3 code to a language name. If a testFont is given, the code will be
* tested whether it is displayable or not. If not, or no font given, the language name will
* be returned as written in the default language of this VM.
*
* @param item item to be expanded to the language name
* @param testFont font to test wheter the name is displayable
* @return expanded item
*/
private Object expandItem(Object item, Font testFont) {
Object iso2Str = iso3Toiso2.get(item);
int i;
if (iso2Str == null && !isoMapExpanded) { // Expand iso3toiso2 only if needed because it needs some time.
isoMapExpanded = true;
Locale[] loc = Locale.getAvailableLocales();
for (i = 0; i < loc.length; ++i) {
iso3Toiso2.put(loc[i].getISO3Language(), loc[i].getLanguage());
}
iso2Str = iso3Toiso2.get(item);
}
if (iso2Str == null)
// Unknown item, return it self.
{
return (item);
}
Locale locale = new Locale((String) iso2Str);
if (testFont == null)
// Return the language name in the spelling of the default locale.
{
return (locale.getDisplayLanguage());
}
// Get the language name in the spelling of that language.
String str = locale.getDisplayLanguage(locale);
int cdut = testFont.canDisplayUpTo(str);
if (cdut > -1)
// Test font cannot render it;
// use language name in the spelling of the default locale.
{
str = locale.getDisplayLanguage();
}
return (str);
}
/**
* Loads an image.
*
* @return The image icon.
*/
public ImageIcon getImage() {
ImageIcon img;
try {
img = new ImageIcon(LanguageDialog.class.getResource("/res/installer.langsel.img"));
}
catch (NullPointerException err) {
img = null;
}
return img;
}
/**
* Gets the selected object.
*
* @return The selected item.
*/
public Object getSelection() {
Object retval = null;
if (isoTable != null) {
retval = isoTable.get(comboBox.getSelectedItem());
}
return (retval != null) ? retval : comboBox.getSelectedItem();
}
/**
* Sets the selection.
*
* @param item The item to be selected.
*/
public void setSelection(Object item) {
Object mapped = null;
if (isoTable != null) {
Iterator iter = isoTable.keySet().iterator();
while (iter.hasNext()) {
Object key = iter.next();
if (isoTable.get(key).equals(item)) {
mapped = key;
break;
}
}
}
if (mapped == null) {
mapped = item;
}
comboBox.setSelectedItem(mapped);
}
/**
* Closer.
*
* @param e The event.
*/
public void actionPerformed(ActionEvent e) {
dispose();
}
/**
* The window events handler.
*
* @author Julien Ponge
*/
private class WindowHandler extends WindowAdapter {
/**
* We can't avoid the exit here, so don't call exit anywhere else.
*
* @param e the event.
*/
public void windowClosing(WindowEvent e) {
System.exit(0);
}
}
}
/**
* A list cell renderer that adds the flags on the display.
*
* @author Julien Ponge
*/
private static class FlagRenderer extends JLabel implements ListCellRenderer {
private static final long serialVersionUID = 3832899961942782769L;
/**
* Icons cache.
*/
private TreeMap<String, ImageIcon> icons = new TreeMap<String, ImageIcon>();
/**
* Grayed icons cache.
*/
private TreeMap<String, ImageIcon> grayIcons = new TreeMap<String, ImageIcon>();
public FlagRenderer() {
setOpaque(true);
}
/**
* Returns a suitable cell.
*
* @param list The list.
* @param value The object.
* @param index The index.
* @param isSelected true if it is selected.
* @param cellHasFocus Description of the Parameter
* @return The cell.
*/
public Component getListCellRendererComponent(JList list, Object value, int index,
boolean isSelected, boolean cellHasFocus) {
// We put the label
String iso3 = (String) value;
setText(iso3);
if (isoTable != null) {
iso3 = (String) isoTable.get(iso3);
}
if (isSelected) {
setForeground(list.getSelectionForeground());
setBackground(list.getSelectionBackground());
} else {
setForeground(list.getForeground());
setBackground(list.getBackground());
}
// We put the icon
if (!icons.containsKey(iso3)) {
ImageIcon icon;
icon = new ImageIcon(this.getClass().getResource("/res/flag." + iso3));
icons.put(iso3, icon);
icon = new ImageIcon(GrayFilter.createDisabledImage(icon.getImage()));
grayIcons.put(iso3, icon);
}
if (isSelected || index == -1) {
setIcon(icons.get(iso3));
} else {
setIcon(grayIcons.get(iso3));
}
// We return
return this;
}
}
}
| izpack-installer/src/main/java/com/izforge/izpack/installer/GUIInstaller.java | /*
* $Id$
* IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/
* http://izpack.codehaus.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.izforge.izpack.installer;
import com.izforge.izpack.data.GUIPrefs;
import com.izforge.izpack.data.LocaleDatabase;
import com.izforge.izpack.data.ResourceManager;
import com.izforge.izpack.gui.ButtonFactory;
import com.izforge.izpack.gui.LabelFactory;
import com.izforge.izpack.util.Debug;
import com.izforge.izpack.util.FileExecutor;
import com.izforge.izpack.util.OsVersion;
import com.izforge.izpack.util.VariableSubstitutor;
import javax.swing.*;
import javax.swing.plaf.metal.MetalLookAndFeel;
import javax.swing.plaf.metal.MetalTheme;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.lang.reflect.Method;
import java.util.*;
import java.util.List;
/**
* The IzPack graphical installer class.
*
* @author Julien Ponge
*/
public class GUIInstaller extends InstallerBase {
/**
* The installation data.
*/
private InstallData installdata;
/**
* The L&F.
*/
protected String lnf;
/**
* defined modifier for language display type.
*/
private static final String[] LANGUAGE_DISPLAY_TYPES = {"iso3", "native", "default"};
private static final String[][] LANG_CODES = {{"cat", "ca"}, {"chn", "zh"}, {"cze", "cs"},
{"dan", "da"}, {"deu", "de"}, {"eng", "en"}, {"fin", "fi"}, {"fra", "fr"},
{"hun", "hu"}, {"ita", "it"}, {"jpn", "ja"}, {"mys", "ms"}, {"ned", "nl"},
{"nor", "no"}, {"pol", "pl"}, {"por", "pt"}, {"rom", "or"}, {"rus", "ru"},
{"spa", "es"}, {"svk", "sk"}, {"swe", "sv"}, {"tur", "tr"}, {"ukr", "uk"}};
/**
* holds language to ISO-3 language code translation
*/
private static HashMap isoTable;
/**
* The constructor.
*
* @throws Exception Description of the Exception
*/
public GUIInstaller() throws Exception {
try {
init();
} catch (Exception e) {
showFatalError(e);
throw e;
} catch (Error e) {
showFatalError(e);
throw e;
}
}
private void showFatalError(Throwable e) {
try {
JOptionPane.showMessageDialog(null, "Error: " + e.toString(), "Error", JOptionPane.ERROR_MESSAGE);
} catch (Exception e2) {
e2.printStackTrace();
}
}
private void init() throws Exception {
this.installdata = new InstallData();
// Loads the installation data
loadInstallData(installdata);
// add the GUI install data
loadGUIInstallData();
// Sets up the GUI L&F
loadLookAndFeel();
// Checks the Java version
checkJavaVersion();
checkJDKAvailable();
// Check for already running instance
checkLockFile();
// Loads the suitable langpack
SwingUtilities.invokeAndWait(new Runnable() {
public void run() {
try {
loadLangPack();
}
catch (Exception e) {
e.printStackTrace();
}
}
});
// create the resource manager (after the language selection!)
ResourceManager.create(this.installdata);
// load conditions
loadConditions(installdata);
// loads installer conditions
loadInstallerRequirements();
// load dynamic variables
loadDynamicVariables();
// check installer conditions
if (!checkInstallerRequirements(installdata)) {
Debug.log("not all installerconditions are fulfilled.");
System.exit(-1);
return;
}
// Load custom langpack if exist.
addCustomLangpack(installdata);
// We launch the installer GUI
SwingUtilities.invokeLater(new Runnable() {
public void run() {
try {
loadGUI();
}
catch (Exception e) {
e.printStackTrace();
}
}
});
}
public void showMissingRequirementMessage(String message) {
JOptionPane.showMessageDialog(null, message);
}
/**
* Load GUI preference information.
*
* @throws Exception
*/
public void loadGUIInstallData() throws Exception {
InputStream in = GUIInstaller.class.getResourceAsStream("/GUIPrefs");
ObjectInputStream objIn = new ObjectInputStream(in);
this.installdata.guiPrefs = (GUIPrefs) objIn.readObject();
objIn.close();
}
/**
* Sets a lock file. Not using java.nio.channels.FileLock to prevent
* the installer from accidentally keeping a lock on a file if the install
* fails or is killed.
*
* @throws Exception Description of the Exception
*/
private void checkLockFile() throws Exception {
String tempDir = System.getProperty("java.io.tmpdir");
String appName = this.installdata.getInfo().getAppName();
String fileName = "iz-" + appName + ".tmp";
Debug.trace("Making temp file: " + fileName);
Debug.trace("In temp directory: " + tempDir);
File file = new File(tempDir, fileName);
if (file.exists()) {
// Ask user if they want to proceed.
Debug.trace("Lock File Exists, asking user for permission to proceed.");
StringBuffer msg = new StringBuffer();
msg.append("<html>");
msg.append("The " + appName + " installer you are attempting to run seems to have a copy already running.<br><br>");
msg.append("This could be from a previous failed installation attempt or you may have accidentally launched <br>");
msg.append("the installer twice. <b>The recommended action is to select 'Exit'</b> and wait for the other copy of <br>");
msg.append("the installer to start. If you are sure there is no other copy of the installer running, click <br>");
msg.append("the 'Continue' button to allow this installer to run. <br><br>");
msg.append("Are you sure you want to continue with this installation?");
msg.append("</html>");
JLabel label = new JLabel(msg.toString());
label.setFont(new Font("Sans Serif", Font.PLAIN, 12));
Object[] optionValues = {"Continue", "Exit"};
int selectedOption = JOptionPane.showOptionDialog(null, label, "Warning",
JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE, null, optionValues,
optionValues[1]);
Debug.trace("Selected option: " + selectedOption);
if (selectedOption == 0) {
// Take control of the file so it gets deleted after this installer instance exits.
Debug.trace("Setting temp file to delete on exit");
file.deleteOnExit();
} else {
// Leave the file as it is.
Debug.trace("Leaving temp file alone and exiting");
System.exit(1);
}
} else {
try {
// Create the new lock file
if (file.createNewFile()) {
Debug.trace("Temp file created");
file.deleteOnExit();
} else {
Debug.trace("Temp file could not be created");
Debug.trace("*** Multiple instances of installer will be allowed ***");
}
}
catch (Exception e) {
Debug.trace("Temp file could not be created: " + e);
Debug.trace("*** Multiple instances of installer will be allowed ***");
}
}
}
/**
* Checks the Java version.
*
* @throws Exception Description of the Exception
*/
private void checkJavaVersion() throws Exception {
String version = System.getProperty("java.version");
String required = this.installdata.getInfo().getJavaVersion();
if (version.compareTo(required) < 0) {
StringBuffer msg = new StringBuffer();
msg.append("The application that you are trying to install requires a ");
msg.append(required);
msg.append(" version or later of the Java platform.\n");
msg.append("You are running a ");
msg.append(version);
msg.append(" version of the Java platform.\n");
msg.append("Please upgrade to a newer version.");
System.out.println(msg.toString());
JOptionPane.showMessageDialog(null, msg.toString(), "Error", JOptionPane.ERROR_MESSAGE);
System.exit(1);
}
}
/**
* Checks if a JDK is available.
*/
private void checkJDKAvailable() {
if (!this.installdata.getInfo().isJdkRequired()) {
return;
}
FileExecutor exec = new FileExecutor();
String[] output = new String[2];
String[] params = {"javac", "-help"};
if (exec.executeCommand(params, output) != 0) {
String[] message = {
"It looks like your system does not have a Java Development Kit (JDK) available.",
"The software that you plan to install requires a JDK for both its installation and execution.",
"\n",
"Do you still want to proceed with the installation process?"
};
int status = JOptionPane.showConfirmDialog(null, message, "Warning", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE);
if (status == JOptionPane.NO_OPTION) {
System.exit(1);
}
}
}
/**
* Loads the suitable langpack.
*
* @throws Exception Description of the Exception
*/
private void loadLangPack() throws Exception {
// Initialisations
List availableLangPacks = getAvailableLangPacks();
int npacks = availableLangPacks.size();
if (npacks == 0) {
throw new Exception("no language pack available");
}
String selectedPack;
// Dummy Frame
JFrame frame = new JFrame();
frame.setIconImage(new ImageIcon(this.getClass().getResource("/img/JFrameIcon.png"))
.getImage());
Dimension frameSize = frame.getSize();
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
frame.setLocation((screenSize.width - frameSize.width) / 2,
(screenSize.height - frameSize.height) / 2 - 10);
// We get the langpack name
if (npacks != 1) {
LanguageDialog picker = new LanguageDialog(frame, availableLangPacks.toArray());
picker.setSelection(Locale.getDefault().getISO3Language().toLowerCase());
picker.setModal(true);
picker.toFront();
// frame.setVisible(true);
frame.setVisible(false);
picker.setVisible(true);
selectedPack = (String) picker.getSelection();
if (selectedPack == null) {
throw new Exception("installation canceled");
}
} else {
selectedPack = (String) availableLangPacks.get(0);
}
// We add an xml data information
this.installdata.getXmlData().setAttribute("langpack", selectedPack);
// We load the langpack
installdata.setLocaleISO3(selectedPack);
installdata.setVariable(ScriptParser.ISO3_LANG, installdata.getLocaleISO3());
InputStream in = getClass().getResourceAsStream("/langpacks/" + selectedPack + ".xml");
this.installdata.setLangpack(new LocaleDatabase(in));
}
/**
* Loads the suitable L&F.
*
* @throws Exception Description of the Exception
*/
protected void loadLookAndFeel() throws Exception {
// Do we have any preference for this OS ?
String syskey = "unix";
if (OsVersion.IS_WINDOWS) {
syskey = "windows";
} else if (OsVersion.IS_OSX) {
syskey = "mac";
}
String laf = null;
if (installdata.guiPrefs.lookAndFeelMapping.containsKey(syskey)) {
laf = installdata.guiPrefs.lookAndFeelMapping.get(syskey);
}
// Let's use the system LAF
// Resolve whether button icons should be used or not.
boolean useButtonIcons = true;
if (installdata.guiPrefs.modifier.containsKey("useButtonIcons")
&& "no".equalsIgnoreCase(installdata.guiPrefs.modifier
.get("useButtonIcons"))) {
useButtonIcons = false;
}
ButtonFactory.useButtonIcons(useButtonIcons);
boolean useLabelIcons = true;
if (installdata.guiPrefs.modifier.containsKey("useLabelIcons")
&& "no".equalsIgnoreCase(installdata.guiPrefs.modifier
.get("useLabelIcons"))) {
useLabelIcons = false;
}
LabelFactory.setUseLabelIcons(useLabelIcons);
if (installdata.guiPrefs.modifier.containsKey("labelFontSize")) { //'labelFontSize' modifier found in 'guiprefs'
final String valStr =
installdata.guiPrefs.modifier.get("labelFontSize");
try { //parse value and enter as label-font-size multiplier:
LabelFactory.setLabelFontSize(Float.parseFloat(valStr));
}
catch (NumberFormatException ex) { //error parsing value; log message
Debug.log("Error parsing guiprefs 'labelFontSize' value (" +
valStr + ')');
}
}
if (laf == null) {
if (!"mac".equals(syskey)) {
// In Linux we will use the English locale, because of a bug in
// JRE6. In Korean, Persian, Chinese, japanese and some other
// locales the installer throws and exception and doesn't load
// at all. See http://jira.jboss.com/jira/browse/JBINSTALL-232.
// This is a workaround until this bug gets fixed.
if ("unix".equals(syskey)) {
Locale.setDefault(Locale.ENGLISH);
}
String syslaf = UIManager.getSystemLookAndFeelClassName();
UIManager.setLookAndFeel(syslaf);
if (UIManager.getLookAndFeel() instanceof MetalLookAndFeel) {
ButtonFactory.useButtonIcons(useButtonIcons);
}
}
lnf = "swing";
return;
}
// Kunststoff (http://www.incors.org/)
if ("kunststoff".equals(laf)) {
ButtonFactory.useHighlightButtons();
// Reset the use button icons state because useHighlightButtons
// make it always true.
ButtonFactory.useButtonIcons(useButtonIcons);
installdata.buttonsHColor = new Color(255, 255, 255);
Class<LookAndFeel> lafClass = (Class<LookAndFeel>) Class.forName("com.incors.plaf.kunststoff.KunststoffLookAndFeel");
Class mtheme = Class.forName("javax.swing.plaf.metal.MetalTheme");
Class[] params = {mtheme};
Class<MetalTheme> theme = (Class<MetalTheme>) Class.forName("com.izforge.izpack.gui.IzPackKMetalTheme");
Method setCurrentThemeMethod = lafClass.getMethod("setCurrentTheme", params);
// We invoke and place Kunststoff as our L&F
LookAndFeel kunststoff = lafClass.newInstance();
MetalTheme ktheme = theme.newInstance();
Object[] kparams = {ktheme};
UIManager.setLookAndFeel(kunststoff);
setCurrentThemeMethod.invoke(kunststoff, kparams);
lnf = "kunststoff";
return;
}
// Liquid (http://liquidlnf.sourceforge.net/)
if ("liquid".equals(laf)) {
UIManager.setLookAndFeel("com.birosoft.liquid.LiquidLookAndFeel");
lnf = "liquid";
Map<String, String> params = installdata.guiPrefs.lookAndFeelParams.get(laf);
if (params.containsKey("decorate.frames")) {
String value = params.get("decorate.frames");
if ("yes".equals(value)) {
JFrame.setDefaultLookAndFeelDecorated(true);
}
}
if (params.containsKey("decorate.dialogs")) {
String value = params.get("decorate.dialogs");
if ("yes".equals(value)) {
JDialog.setDefaultLookAndFeelDecorated(true);
}
}
return;
}
// Metouia (http://mlf.sourceforge.net/)
if ("metouia".equals(laf)) {
UIManager.setLookAndFeel("net.sourceforge.mlf.metouia.MetouiaLookAndFeel");
lnf = "metouia";
return;
}
// Nimbus (http://nimbus.dev.java.net/)
if ("nimbus".equals(laf)) {
UIManager.setLookAndFeel("org.jdesktop.swingx.plaf.nimbus.NimbusLookAndFeel");
return;
}
// JGoodies Looks (http://looks.dev.java.net/)
if ("looks".equals(laf)) {
Map<String, String> variants = new TreeMap<String, String>();
variants.put("windows", "com.jgoodies.looks.windows.WindowsLookAndFeel");
variants.put("plastic", "com.jgoodies.looks.plastic.PlasticLookAndFeel");
variants.put("plastic3D", "com.jgoodies.looks.plastic.Plastic3DLookAndFeel");
variants.put("plasticXP", "com.jgoodies.looks.plastic.Plastic3DLookAndFeel");
String variant = variants.get("plasticXP");
Map<String, String> params = installdata.guiPrefs.lookAndFeelParams.get(laf);
if (params.containsKey("variant")) {
String param = params.get("variant");
if (variants.containsKey(param)) {
variant = variants.get(param);
}
}
UIManager.setLookAndFeel(variant);
return;
}
// Substance (http://substance.dev.java.net/)
if ("substance".equals(laf)) {
Map<String, String> variants = new TreeMap<String, String>();
variants.put("default", "org.jvnet.substance.SubstanceLookAndFeel"); // Ugly!!!
variants.put("business", "org.jvnet.substance.skin.SubstanceBusinessLookAndFeel");
variants.put("business-blue", "org.jvnet.substance.skin.SubstanceBusinessBlueSteelLookAndFeel");
variants.put("business-black", "org.jvnet.substance.skin.SubstanceBusinessBlackSteelLookAndFeel");
variants.put("creme", "org.jvnet.substance.skin.SubstanceCremeLookAndFeel");
variants.put("sahara", "org.jvnet.substance.skin.SubstanceSaharaLookAndFeel");
variants.put("moderate", "org.jvnet.substance.skin.SubstanceModerateLookAndFeel");
variants.put("officesilver", "org.jvnet.substance.skin.SubstanceOfficeSilver2007LookAndFeel");
String variant = variants.get("default");
Map<String, String> params = installdata.guiPrefs.lookAndFeelParams.get(laf);
if (params.containsKey("variant")) {
String param = params.get("variant");
if (variants.containsKey(param)) {
variant = variants.get(param);
}
}
UIManager.setLookAndFeel(variant);
}
}
/**
* Loads the GUI.
*
* @throws Exception Description of the Exception
*/
private void loadGUI() throws Exception {
UIManager.put("OptionPane.yesButtonText", installdata.getLangpack().getString("installer.yes"));
UIManager.put("OptionPane.noButtonText", installdata.getLangpack().getString("installer.no"));
UIManager.put("OptionPane.cancelButtonText", installdata.getLangpack()
.getString("installer.cancel"));
String title;
// Use a alternate message if defined.
final String key = "installer.reversetitle";
String message = installdata.getLangpack().getString(key);
// message equal to key -> no message defined.
if (message.indexOf(key) > -1) {
title = installdata.getLangpack().getString("installer.title")
+ installdata.getInfo().getAppName();
} else { // Attention! The alternate message has to contain the whole message including
// $APP_NAME and may be $APP_VER.
VariableSubstitutor vs = new VariableSubstitutor(installdata.getVariables());
title = vs.substitute(message, null);
}
new InstallerFrame(title, this.installdata, this);
}
/**
* Returns whether flags should be used in the language selection dialog or not.
*
* @return whether flags should be used in the language selection dialog or not
*/
protected boolean useFlags() {
if (installdata.guiPrefs.modifier.containsKey("useFlags")
&& "no".equalsIgnoreCase(installdata.guiPrefs.modifier.get("useFlags"))) {
return (false);
}
return (true);
}
/**
* Returns the type in which the language should be displayed in the language selction dialog.
* Possible are "iso3", "native" and "usingDefault".
*
* @return language display type
*/
protected String getLangType() {
if (installdata.guiPrefs.modifier.containsKey("langDisplayType")) {
String val = installdata.guiPrefs.modifier.get("langDisplayType");
val = val.toLowerCase();
// Verify that the value is valid, else return the default.
for (String aLANGUAGE_DISPLAY_TYPES : LANGUAGE_DISPLAY_TYPES) {
if (val.equalsIgnoreCase(aLANGUAGE_DISPLAY_TYPES)) {
return (val);
}
}
Debug.trace("Value for language display type not valid; value: " + val);
}
return (LANGUAGE_DISPLAY_TYPES[0]);
}
/**
* Used to prompt the user for the language. Languages can be displayed in iso3 or the native
* notation or the notation of the default locale. Revising to native notation is based on code
* from Christian Murphy (patch #395).
*
* @author Julien Ponge
* @author Christian Murphy
* @author Klaus Bartz
*/
private final class LanguageDialog extends JDialog implements ActionListener {
private static final long serialVersionUID = 3256443616359887667L;
/**
* The combo box.
*/
private JComboBox comboBox;
/**
* The ISO3 to ISO2 HashMap
*/
private HashMap<String, String> iso3Toiso2 = null;
/**
* iso3Toiso2 expanded ?
*/
private boolean isoMapExpanded = false;
/**
* The constructor.
*
* @param items The items to display in the box.
*/
public LanguageDialog(JFrame frame, Object[] items) {
super(frame);
try {
loadLookAndFeel();
}
catch (Exception err) {
err.printStackTrace();
}
// We build the GUI
addWindowListener(new WindowHandler());
JPanel contentPane = (JPanel) getContentPane();
setTitle("Language Selection");
GridBagLayout layout = new GridBagLayout();
contentPane.setLayout(layout);
GridBagConstraints gbConstraints = new GridBagConstraints();
gbConstraints.anchor = GridBagConstraints.CENTER;
gbConstraints.insets = new Insets(5, 5, 5, 5);
gbConstraints.fill = GridBagConstraints.HORIZONTAL;
gbConstraints.gridx = 0;
gbConstraints.weightx = 1.0;
gbConstraints.weighty = 1.0;
gbConstraints.ipadx = 0;
gbConstraints.ipady = 6;
ImageIcon img = getImage();
JLabel imgLabel = new JLabel(img);
gbConstraints.gridy = 0;
contentPane.add(imgLabel);
String firstMessage = "Please select your language";
if (getLangType().equals(LANGUAGE_DISPLAY_TYPES[0]))
// iso3
{
firstMessage = "Please select your language below";
}
JLabel label1 = new JLabel(firstMessage, SwingConstants.LEADING);
gbConstraints.gridy = 1;
gbConstraints.insets = new Insets(15, 5, 5, 5);
layout.addLayoutComponent(label1, gbConstraints);
contentPane.add(label1);
gbConstraints.insets = new Insets(5, 5, 5, 5);
items = reviseItems(items);
comboBox = new JComboBox(items);
if (useFlags()) {
comboBox.setRenderer(new FlagRenderer());
}
gbConstraints.gridy = 3;
layout.addLayoutComponent(comboBox, gbConstraints);
contentPane.add(comboBox);
gbConstraints.insets = new Insets(15, 5, 15, 5);
JButton okButton = new JButton("OK");
okButton.addActionListener(this);
gbConstraints.fill = GridBagConstraints.NONE;
gbConstraints.gridy = 4;
gbConstraints.anchor = GridBagConstraints.CENTER;
layout.addLayoutComponent(okButton, gbConstraints);
contentPane.add(okButton);
getRootPane().setDefaultButton(okButton);
// Packs and centers
// Fix for bug "Installer won't show anything on OSX"
if (System.getProperty("mrj.version") == null) {
pack();
}
setSize(getPreferredSize());
Dimension frameSize = getSize();
Point center = GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint();
setLocation(center.x - frameSize.width / 2, center.y - frameSize.height / 2 - 10);
setResizable(true);
}
/**
* Revises iso3 language items depending on the language display type.
*
* @param items item array to be revised
* @return the revised array
*/
private Object[] reviseItems(Object[] items) {
String langType = getLangType();
// iso3: nothing todo.
if (langType.equals(LANGUAGE_DISPLAY_TYPES[0])) {
return (items);
}
// native: get the names as they are written in that language.
if (langType.equals(LANGUAGE_DISPLAY_TYPES[1])) {
return (expandItems(items, (new JComboBox()).getFont()));
}
// default: get the names as they are written in the default
// language.
if (langType.equals(LANGUAGE_DISPLAY_TYPES[2])) {
return (expandItems(items, null));
}
// Should never be.
return (items);
}
/**
* Expands the given iso3 codes to language names. If a testFont is given, the codes are
* tested whether they can displayed or not. If not, or no font given, the language name
* will be returned as written in the default language of this VM.
*
* @param items item array to be expanded to the language name
* @param testFont font to test wheter a name is displayable
* @return aray of expanded items
*/
private Object[] expandItems(Object[] items, Font testFont) {
int i;
if (iso3Toiso2 == null) { // Loasd predefined langs into HashMap.
iso3Toiso2 = new HashMap<String, String>(32);
isoTable = new HashMap();
for (i = 0; i < LANG_CODES.length; ++i) {
iso3Toiso2.put(LANG_CODES[i][0], LANG_CODES[i][1]);
}
}
for (i = 0; i < items.length; i++) {
Object it = expandItem(items[i], testFont);
isoTable.put(it, items[i]);
items[i] = it;
}
return items;
}
/**
* Expands the given iso3 code to a language name. If a testFont is given, the code will be
* tested whether it is displayable or not. If not, or no font given, the language name will
* be returned as written in the default language of this VM.
*
* @param item item to be expanded to the language name
* @param testFont font to test wheter the name is displayable
* @return expanded item
*/
private Object expandItem(Object item, Font testFont) {
Object iso2Str = iso3Toiso2.get(item);
int i;
if (iso2Str == null && !isoMapExpanded) { // Expand iso3toiso2 only if needed because it needs some time.
isoMapExpanded = true;
Locale[] loc = Locale.getAvailableLocales();
for (i = 0; i < loc.length; ++i) {
iso3Toiso2.put(loc[i].getISO3Language(), loc[i].getLanguage());
}
iso2Str = iso3Toiso2.get(item);
}
if (iso2Str == null)
// Unknown item, return it self.
{
return (item);
}
Locale locale = new Locale((String) iso2Str);
if (testFont == null)
// Return the language name in the spelling of the default locale.
{
return (locale.getDisplayLanguage());
}
// Get the language name in the spelling of that language.
String str = locale.getDisplayLanguage(locale);
int cdut = testFont.canDisplayUpTo(str);
if (cdut > -1)
// Test font cannot render it;
// use language name in the spelling of the default locale.
{
str = locale.getDisplayLanguage();
}
return (str);
}
/**
* Loads an image.
*
* @return The image icon.
*/
public ImageIcon getImage() {
ImageIcon img;
try {
img = new ImageIcon(LanguageDialog.class.getResource("/res/installer.langsel.img"));
}
catch (NullPointerException err) {
img = null;
}
return img;
}
/**
* Gets the selected object.
*
* @return The selected item.
*/
public Object getSelection() {
Object retval = null;
if (isoTable != null) {
retval = isoTable.get(comboBox.getSelectedItem());
}
return (retval != null) ? retval : comboBox.getSelectedItem();
}
/**
* Sets the selection.
*
* @param item The item to be selected.
*/
public void setSelection(Object item) {
Object mapped = null;
if (isoTable != null) {
Iterator iter = isoTable.keySet().iterator();
while (iter.hasNext()) {
Object key = iter.next();
if (isoTable.get(key).equals(item)) {
mapped = key;
break;
}
}
}
if (mapped == null) {
mapped = item;
}
comboBox.setSelectedItem(mapped);
}
/**
* Closer.
*
* @param e The event.
*/
public void actionPerformed(ActionEvent e) {
dispose();
}
/**
* The window events handler.
*
* @author Julien Ponge
*/
private class WindowHandler extends WindowAdapter {
/**
* We can't avoid the exit here, so don't call exit anywhere else.
*
* @param e the event.
*/
public void windowClosing(WindowEvent e) {
System.exit(0);
}
}
}
/**
* A list cell renderer that adds the flags on the display.
*
* @author Julien Ponge
*/
private static class FlagRenderer extends JLabel implements ListCellRenderer {
private static final long serialVersionUID = 3832899961942782769L;
/**
* Icons cache.
*/
private TreeMap<String, ImageIcon> icons = new TreeMap<String, ImageIcon>();
/**
* Grayed icons cache.
*/
private TreeMap<String, ImageIcon> grayIcons = new TreeMap<String, ImageIcon>();
public FlagRenderer() {
setOpaque(true);
}
/**
* Returns a suitable cell.
*
* @param list The list.
* @param value The object.
* @param index The index.
* @param isSelected true if it is selected.
* @param cellHasFocus Description of the Parameter
* @return The cell.
*/
public Component getListCellRendererComponent(JList list, Object value, int index,
boolean isSelected, boolean cellHasFocus) {
// We put the label
String iso3 = (String) value;
setText(iso3);
if (isoTable != null) {
iso3 = (String) isoTable.get(iso3);
}
if (isSelected) {
setForeground(list.getSelectionForeground());
setBackground(list.getSelectionBackground());
} else {
setForeground(list.getForeground());
setBackground(list.getBackground());
}
// We put the icon
if (!icons.containsKey(iso3)) {
ImageIcon icon;
icon = new ImageIcon(this.getClass().getResource("/res/flag." + iso3));
icons.put(iso3, icon);
icon = new ImageIcon(GrayFilter.createDisabledImage(icon.getImage()));
grayIcons.put(iso3, icon);
}
if (isSelected || index == -1) {
setIcon(icons.get(iso3));
} else {
setIcon(grayIcons.get(iso3));
}
// We return
return this;
}
}
}
| A bit of refactoring on GuiInstaller
| izpack-installer/src/main/java/com/izforge/izpack/installer/GUIInstaller.java | A bit of refactoring on GuiInstaller |
|
Java | apache-2.0 | 7fdd40d897694da924d151024379c0db4fb032c1 | 0 | youdonghai/intellij-community,caot/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,kool79/intellij-community,xfournet/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,ahb0327/intellij-community,kool79/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,vladmm/intellij-community,supersven/intellij-community,apixandru/intellij-community,izonder/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,allotria/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,signed/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,robovm/robovm-studio,robovm/robovm-studio,xfournet/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,blademainer/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,caot/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,supersven/intellij-community,blademainer/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,da1z/intellij-community,signed/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,kool79/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,Lekanich/intellij-community,semonte/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,signed/intellij-community,caot/intellij-community,wreckJ/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,ibinti/intellij-community,jagguli/intellij-community,vladmm/intellij-community,semonte/intellij-community,dslomov/intellij-community,hurricup/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,samthor/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,izonder/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,ryano144/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,holmes/intellij-community,signed/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,suncycheng/intellij-community,signed/intellij-community,vladmm/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,adedayo/intellij-community,holmes/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,adedayo/intellij-community,FHannes/intellij-community,clumsy/intellij-community,blademainer/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,fitermay/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,caot/intellij-community,izonder/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,fnouama/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,supersven/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,semonte/intellij-community,dslomov/intellij-community,caot/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,hurricup/intellij-community,diorcety/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,semonte/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,caot/intellij-community,vladmm/intellij-community,apixandru/intellij-community,asedunov/intellij-community,allotria/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,jagguli/intellij-community,apixandru/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,Lekanich/intellij-community,slisson/intellij-community,samthor/intellij-community,amith01994/intellij-community,fnouama/intellij-community,samthor/intellij-community,allotria/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,izonder/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,Distrotech/intellij-community,izonder/intellij-community,diorcety/intellij-community,allotria/intellij-community,ryano144/intellij-community,petteyg/intellij-community,robovm/robovm-studio,retomerz/intellij-community,allotria/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,holmes/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,retomerz/intellij-community,signed/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,da1z/intellij-community,semonte/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,semonte/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,retomerz/intellij-community,adedayo/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,semonte/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,semonte/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,kool79/intellij-community,da1z/intellij-community,clumsy/intellij-community,allotria/intellij-community,ibinti/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,fitermay/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,samthor/intellij-community,signed/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,jagguli/intellij-community,vladmm/intellij-community,kdwink/intellij-community,fnouama/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,petteyg/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,slisson/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,blademainer/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,da1z/intellij-community,clumsy/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,FHannes/intellij-community,apixandru/intellij-community,slisson/intellij-community,izonder/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,holmes/intellij-community,fnouama/intellij-community,robovm/robovm-studio,caot/intellij-community,signed/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,ibinti/intellij-community,asedunov/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,holmes/intellij-community,fnouama/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,nicolargo/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,diorcety/intellij-community,blademainer/intellij-community,ibinti/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,robovm/robovm-studio,izonder/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,signed/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,ahb0327/intellij-community,samthor/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,hurricup/intellij-community,supersven/intellij-community,amith01994/intellij-community,ibinti/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,kool79/intellij-community,akosyakov/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,FHannes/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,samthor/intellij-community,dslomov/intellij-community,adedayo/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,blademainer/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,da1z/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,caot/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,izonder/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,signed/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,allotria/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,caot/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,caot/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,supersven/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,apixandru/intellij-community,holmes/intellij-community,FHannes/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,blademainer/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,apixandru/intellij-community,allotria/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,signed/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,blademainer/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,hurricup/intellij-community,amith01994/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,retomerz/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,caot/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,clumsy/intellij-community,kool79/intellij-community,retomerz/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,wreckJ/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,xfournet/intellij-community,fitermay/intellij-community,xfournet/intellij-community,petteyg/intellij-community,FHannes/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,asedunov/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,kool79/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,samthor/intellij-community,signed/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,ryano144/intellij-community,suncycheng/intellij-community,holmes/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.deployment;
import com.intellij.openapi.compiler.CompileContext;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.StringBuilderSpinAllocator;
import com.intellij.util.descriptors.ConfigFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.Set;
public abstract class DeploymentUtil {
public static DeploymentUtil getInstance() {
return ServiceManager.getService(DeploymentUtil.class);
}
@Deprecated
public abstract void copyFile(@NotNull File fromFile,
@NotNull File toFile,
@NotNull CompileContext context,
@Nullable Set<String> writtenPaths,
@Nullable FileFilter fileFilter) throws IOException;
public static String trimForwardSlashes(@NotNull String path) {
while (path.length() != 0 && (path.charAt(0) == '/' || path.charAt(0) == File.separatorChar)) {
path = path.substring(1);
}
return path;
}
@Deprecated
public abstract void reportDeploymentDescriptorDoesNotExists(ConfigFile descriptor, CompileContext context, Module module);
public static String concatPaths(String... paths) {
final StringBuilder builder = new StringBuilder();
for (String path : paths) {
if (path.length() == 0) continue;
final int len = builder.length();
if (len > 0 && builder.charAt(len - 1) != '/' && builder.charAt(len - 1) != File.separatorChar) {
builder.append('/');
}
builder.append(len != 0 ? trimForwardSlashes(path) : path);
}
return builder.toString();
}
public static String appendToPath(@NotNull String basePath, @NotNull String relativePath) {
final boolean endsWithSlash = StringUtil.endsWithChar(basePath, '/') || StringUtil.endsWithChar(basePath, '\\');
final boolean startsWithSlash = StringUtil.startsWithChar(relativePath, '/') || StringUtil.startsWithChar(relativePath, '\\');
String tail;
if (endsWithSlash && startsWithSlash) {
tail = trimForwardSlashes(relativePath);
}
else if (!endsWithSlash && !startsWithSlash && basePath.length() > 0 && relativePath.length() > 0) {
tail = "/" + relativePath;
}
else {
tail = relativePath;
}
return basePath + tail;
}
@Nullable
public abstract String getConfigFileErrorMessage(ConfigFile configFile);
/**
* @deprecated use {@link com.intellij.openapi.util.io.FileUtil#getRelativePath}
*/
@Nullable
public static String getRelativePath(@NotNull String basePath, @NotNull final String filePath) {
if (basePath.equals(filePath)) return "";
if (!basePath.endsWith(File.separator)) basePath += File.separatorChar;
int len = 0;
int lastSeparatorIndex = 0; // need this for cases like this: base="/temp/abcde/baseDir" and file="/temp/ab"
while (len < filePath.length() && len < basePath.length() && filePath.charAt(len) == basePath.charAt(len)) {
if (basePath.charAt(len) == File.separatorChar) {
lastSeparatorIndex = len;
}
len++;
}
if (len == 0) {
return null;
}
final StringBuilder relativePath = StringBuilderSpinAllocator.alloc();
try {
for (int i=len; i < basePath.length(); i++) {
if (basePath.charAt(i) == File.separatorChar) {
relativePath.append("..");
relativePath.append(File.separatorChar);
}
}
relativePath.append(filePath.substring(lastSeparatorIndex + 1));
return relativePath.toString();
}
finally {
StringBuilderSpinAllocator.dispose(relativePath);
}
}
@Deprecated
public abstract void checkConfigFile(final ConfigFile descriptor, final CompileContext compileContext, final Module module);
}
| java/compiler/openapi/src/com/intellij/openapi/deployment/DeploymentUtil.java | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.deployment;
import com.intellij.openapi.compiler.CompileContext;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.StringBuilderSpinAllocator;
import com.intellij.util.descriptors.ConfigFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.Set;
public abstract class DeploymentUtil {
public static DeploymentUtil getInstance() {
return ServiceManager.getService(DeploymentUtil.class);
}
public abstract void copyFile(@NotNull File fromFile,
@NotNull File toFile,
@NotNull CompileContext context,
@Nullable Set<String> writtenPaths,
@Nullable FileFilter fileFilter) throws IOException;
public static String trimForwardSlashes(@NotNull String path) {
while (path.length() != 0 && (path.charAt(0) == '/' || path.charAt(0) == File.separatorChar)) {
path = path.substring(1);
}
return path;
}
public abstract void reportDeploymentDescriptorDoesNotExists(ConfigFile descriptor, CompileContext context, Module module);
public static String concatPaths(String... paths) {
final StringBuilder builder = new StringBuilder();
for (String path : paths) {
if (path.length() == 0) continue;
final int len = builder.length();
if (len > 0 && builder.charAt(len - 1) != '/' && builder.charAt(len - 1) != File.separatorChar) {
builder.append('/');
}
builder.append(len != 0 ? trimForwardSlashes(path) : path);
}
return builder.toString();
}
public static String appendToPath(@NotNull String basePath, @NotNull String relativePath) {
final boolean endsWithSlash = StringUtil.endsWithChar(basePath, '/') || StringUtil.endsWithChar(basePath, '\\');
final boolean startsWithSlash = StringUtil.startsWithChar(relativePath, '/') || StringUtil.startsWithChar(relativePath, '\\');
String tail;
if (endsWithSlash && startsWithSlash) {
tail = trimForwardSlashes(relativePath);
}
else if (!endsWithSlash && !startsWithSlash && basePath.length() > 0 && relativePath.length() > 0) {
tail = "/" + relativePath;
}
else {
tail = relativePath;
}
return basePath + tail;
}
@Nullable
public abstract String getConfigFileErrorMessage(ConfigFile configFile);
/**
* @deprecated use {@link com.intellij.openapi.util.io.FileUtil#getRelativePath}
*/
@Nullable
public static String getRelativePath(@NotNull String basePath, @NotNull final String filePath) {
if (basePath.equals(filePath)) return "";
if (!basePath.endsWith(File.separator)) basePath += File.separatorChar;
int len = 0;
int lastSeparatorIndex = 0; // need this for cases like this: base="/temp/abcde/baseDir" and file="/temp/ab"
while (len < filePath.length() && len < basePath.length() && filePath.charAt(len) == basePath.charAt(len)) {
if (basePath.charAt(len) == File.separatorChar) {
lastSeparatorIndex = len;
}
len++;
}
if (len == 0) {
return null;
}
final StringBuilder relativePath = StringBuilderSpinAllocator.alloc();
try {
for (int i=len; i < basePath.length(); i++) {
if (basePath.charAt(i) == File.separatorChar) {
relativePath.append("..");
relativePath.append(File.separatorChar);
}
}
relativePath.append(filePath.substring(lastSeparatorIndex + 1));
return relativePath.toString();
}
finally {
StringBuilderSpinAllocator.dispose(relativePath);
}
}
public abstract void checkConfigFile(final ConfigFile descriptor, final CompileContext compileContext, final Module module);
}
| obsolete unused methods deprecated
| java/compiler/openapi/src/com/intellij/openapi/deployment/DeploymentUtil.java | obsolete unused methods deprecated |
|
Java | apache-2.0 | 3831eaf0bb7a38becf4d45843da8310eeee32a46 | 0 | codenameupik/MaterialDrawer,yunarta/MaterialDrawer,maxi182/MaterialDrawer,Ribeiro/MaterialDrawer,JohnTsaiAndroid/MaterialDrawer,jiguoling/MaterialDrawer,fxyzj/MaterialDrawer,chenanze/MaterialDrawer,jiangzhonghui/MaterialDrawer,sandeepnegi/MaterialDrawer,WeRockStar/MaterialDrawer,lyxwll/MaterialDrawer,mikepenz/MaterialDrawer,java02014/MaterialDrawer,riezkykenzie/MaterialDrawer,Bloody-Badboy/MaterialDrawer,hongnguyenpro/MaterialDrawer,amithub/Material-Drawer-Sample,liqk2014/MaterialDrawer,RacZo/MaterialDrawer,yongjiliu/MaterialDrawer,jaohoang/MaterialDrawer,hanhailong/MaterialDrawer,FWest98/MaterialDrawer,Ornolfr/MaterialDrawer,untoms/MaterialDrawer,chaoyang805/MaterialDrawer,umitems/MaterialDrawer,webmasteraxe/MaterialDrawer,chteuchteu/MaterialDrawer,lstNull/MaterialDrawer,DrNadson/MaterialDrawer,flystaros/MaterialDrawer,maitho/MaterialDrawer,rodnois/MaterialDrawer,Sshah88/MaterialDrawer,mmazzarolo/MaterialDrawer,focus-forked-open-source-license/MaterialDrawer,guffyWave/MaterialDrawer,valokafor/MaterialDrawer,s8871404/MaterialDrawer,rayzone107/MaterialDrawer,McUsaVsUrss/MaterialDrawer,bendroid/MaterialDrawer,Papuh/MaterialDrawer,Kondasamy/MaterialDrawer,fairyzoro/MaterialDrawer,EnterPrayz/MaterialDrawer,generalzou/MaterialDrawer,rabyunghwa/MaterialDrawer,nousmotards/MaterialDrawer,mikepenz/MaterialDrawer,StNekroman/MaterialDrawer,yunarta/MaterialDrawer,irfankhoirul/MaterialDrawer,democedes/MaterialDrawer,honeyflyfish/MaterialDrawer,Ryan---Yang/MaterialDrawer,MaTriXy/MaterialDrawer,rameshvoltella/MaterialDrawer,hejunbinlan/MaterialDrawer,heriproj/MaterialDrawer,MaTriXy/MaterialDrawer,idrisfab/MaterialDrawer,jgabrielfreitas/MaterialDrawer,mikepenz/MaterialDrawer,mychaelgo/MaterialDrawer,Rowandjj/MaterialDrawer | package com.mikepenz.materialdrawer;
import android.app.Activity;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.FrameLayout;
import android.widget.ListView;
import android.widget.RelativeLayout;
import com.mikepenz.iconics.typeface.IIcon;
import com.mikepenz.materialdrawer.adapter.BaseDrawerAdapter;
import com.mikepenz.materialdrawer.model.interfaces.Badgeable;
import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem;
import com.mikepenz.materialdrawer.model.interfaces.Iconable;
import com.mikepenz.materialdrawer.model.interfaces.Nameable;
import com.mikepenz.materialdrawer.util.KeyboardUtil;
import com.mikepenz.materialdrawer.util.UIUtils;
import com.mikepenz.materialdrawer.view.ScrimInsetsFrameLayout;
import java.util.ArrayList;
import java.util.Collections;
/**
* Created by mikepenz on 03.02.15.
*/
public class Drawer {
/**
* BUNDLE param to store the selection
*/
protected static final String BUNDLE_SELECTION = "bundle_selection";
protected static final String BUNDLE_FOOTER_SELECTION = "bundle_footer_selection";
/**
* Per the design guidelines, you should show the drawer on launch until the user manually
* expands it. This shared preference tracks this.
*/
protected static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned";
private final DrawerBuilder mDrawerBuilder;
private FrameLayout mContentView;
private KeyboardUtil mKeyboardUtil = null;
/**
* the protected Constructor for the result
*
* @param drawerBuilder
*/
protected Drawer(DrawerBuilder drawerBuilder) {
this.mDrawerBuilder = drawerBuilder;
}
/**
* Get the DrawerLayout of the current drawer
*
* @return
*/
public DrawerLayout getDrawerLayout() {
return this.mDrawerBuilder.mDrawerLayout;
}
/**
* Open the drawer
*/
public void openDrawer() {
if (mDrawerBuilder.mDrawerLayout != null && mDrawerBuilder.mSliderLayout != null) {
mDrawerBuilder.mDrawerLayout.openDrawer(mDrawerBuilder.mDrawerGravity);
}
}
/**
* close the drawer
*/
public void closeDrawer() {
if (mDrawerBuilder.mDrawerLayout != null) {
mDrawerBuilder.mDrawerLayout.closeDrawer(mDrawerBuilder.mDrawerGravity);
}
}
/**
* Get the current state of the drawer.
* True if the drawer is currently open.
*
* @return
*/
public boolean isDrawerOpen() {
if (mDrawerBuilder.mDrawerLayout != null && mDrawerBuilder.mSliderLayout != null) {
return mDrawerBuilder.mDrawerLayout.isDrawerOpen(mDrawerBuilder.mDrawerGravity);
}
return false;
}
/**
* set the insetsFrameLayout to display the content in fullscreen
* under the statusBar and navigationBar
*
* @param fullscreen
*/
public void setFullscreen(boolean fullscreen) {
if (mDrawerBuilder.mDrawerContentRoot != null) {
mDrawerBuilder.mDrawerContentRoot.setEnabled(!fullscreen);
}
}
/**
* Set the color for the statusBar
*
* @param statusBarColor
*/
public void setStatusBarColor(int statusBarColor) {
if (mDrawerBuilder.mDrawerContentRoot != null) {
mDrawerBuilder.mDrawerContentRoot.setInsetForeground(statusBarColor);
mDrawerBuilder.mDrawerContentRoot.invalidate();
}
}
/**
* get the drawerContentRoot Layout (ScrimInsetsFrameLayout)
*
* @return
*/
public ScrimInsetsFrameLayout getScrimInsetsFrameLayout() {
return mDrawerBuilder.mDrawerContentRoot;
}
/**
* a helper method to enable the keyboardUtil for a specific activity
* or disable it. note this will cause some frame drops because of the
* listener.
*
* @param activity
* @param enable
*/
public void keyboardSupportEnabled(Activity activity, boolean enable) {
if (getContent() != null && getContent().getChildCount() > 0) {
if (mKeyboardUtil == null) {
mKeyboardUtil = new KeyboardUtil(activity, getContent().getChildAt(0));
mKeyboardUtil.disable();
}
if (enable) {
mKeyboardUtil.enable();
} else {
mKeyboardUtil.disable();
}
}
}
/**
* get the slider layout of the current drawer.
* This is the layout containing the ListView
*
* @return
*/
public RelativeLayout getSlider() {
return mDrawerBuilder.mSliderLayout;
}
/**
* get the container frameLayout of the current drawer
*
* @return
*/
public FrameLayout getContent() {
if (mContentView == null && this.mDrawerBuilder.mDrawerLayout != null) {
mContentView = (FrameLayout) this.mDrawerBuilder.mDrawerLayout.findViewById(R.id.content_layout);
}
return mContentView;
}
/**
* get the listView of the current drawer
*
* @return
*/
public ListView getListView() {
return mDrawerBuilder.mListView;
}
/**
* get the BaseDrawerAdapter of the current drawer
*
* @return
*/
public BaseDrawerAdapter getAdapter() {
return mDrawerBuilder.mAdapter;
}
/**
* get all drawerItems of the current drawer
*
* @return
*/
public ArrayList<IDrawerItem> getDrawerItems() {
return mDrawerBuilder.mDrawerItems;
}
/**
* get the Header View if set else NULL
*
* @return
*/
public View getHeader() {
return mDrawerBuilder.mHeaderView;
}
/**
* get the StickyHeader View if set else NULL
*
* @return
*/
public View getStickyHeader() {
return mDrawerBuilder.mStickyHeaderView;
}
/**
* method to replace a previous set header
*
* @param view
*/
public void setHeader(View view) {
if (getListView() != null) {
BaseDrawerAdapter adapter = getAdapter();
getListView().setAdapter(null);
if (getHeader() != null) {
getListView().removeHeaderView(getHeader());
}
getListView().addHeaderView(view);
getListView().setAdapter(adapter);
mDrawerBuilder.mHeaderView = view;
mDrawerBuilder.mHeaderOffset = 1;
}
}
/**
* method to remove the header of the list
*/
public void removeHeader() {
if (getListView() != null && getHeader() != null) {
getListView().removeHeaderView(getHeader());
mDrawerBuilder.mHeaderView = null;
mDrawerBuilder.mHeaderOffset = 0;
}
}
/**
* get the Footer View if set else NULL
*
* @return
*/
public View getFooter() {
return mDrawerBuilder.mFooterView;
}
/**
* get the StickyFooter View if set else NULL
*
* @return
*/
public View getStickyFooter() {
return mDrawerBuilder.mStickyFooterView;
}
/**
* get the ActionBarDrawerToggle
*
* @return
*/
public ActionBarDrawerToggle getActionBarDrawerToggle() {
return mDrawerBuilder.mActionBarDrawerToggle;
}
/**
* calculates the position of an drawerItem. searching by it's identifier
*
* @param drawerItem
* @return
*/
public int getPositionFromIdentifier(IDrawerItem drawerItem) {
return getPositionFromIdentifier(drawerItem.getIdentifier());
}
/**
* calculates the position of an drawerItem. searching by it's identifier
*
* @param identifier
* @return
*/
public int getPositionFromIdentifier(int identifier) {
return DrawerUtils.getPositionFromIdentifier(mDrawerBuilder, identifier);
}
/**
* calculates the position of an drawerItem. searching by it's identifier
*
* @param drawerItem
* @return
*/
public int getFooterPositionFromIdentifier(IDrawerItem drawerItem) {
return getFooterPositionFromIdentifier(drawerItem.getIdentifier());
}
/**
* calculates the position of an drawerItem inside the footer. searching by it's identfier
*
* @param identifier
* @return
*/
public int getFooterPositionFromIdentifier(int identifier) {
return DrawerUtils.getFooterPositionFromIdentifier(mDrawerBuilder, identifier);
}
/**
* get the current selection
*
* @return
*/
public int getCurrentSelection() {
return mDrawerBuilder.mCurrentSelection;
}
/**
* get the current footer selection
*
* @return
*/
public int getCurrentFooterSelection() {
return mDrawerBuilder.mCurrentFooterSelection;
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view!
*
* @param identifier
*/
public boolean setSelectionByIdentifier(int identifier) {
return setSelection(getPositionFromIdentifier(identifier), true);
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param identifier
* @param fireOnClick
*/
public boolean setSelectionByIdentifier(int identifier, boolean fireOnClick) {
return setSelection(getPositionFromIdentifier(identifier), fireOnClick);
}
/**
* set the current selection in the footer of the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param identifier
* @param fireOnClick
*/
public void setFooterSelectionByIdentifier(int identifier, boolean fireOnClick) {
setFooterSelection(getPositionFromIdentifier(identifier), fireOnClick);
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view!
*
* @param drawerItem
*/
public boolean setSelection(IDrawerItem drawerItem) {
return setSelection(getPositionFromIdentifier(drawerItem), true);
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param drawerItem
* @param fireOnClick
*/
public boolean setSelection(IDrawerItem drawerItem, boolean fireOnClick) {
return setSelection(getPositionFromIdentifier(drawerItem), fireOnClick);
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view!
*
* @param position the position to select
*/
public boolean setSelection(int position) {
return setSelection(position, true);
}
/*
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param position
* @param fireOnClick
* @return true if the event was consumed
*/
public boolean setSelection(int position, boolean fireOnClick) {
if (mDrawerBuilder.mListView != null) {
return DrawerUtils.setListSelection(mDrawerBuilder, position, fireOnClick, mDrawerBuilder.getDrawerItem(position, false));
}
return false;
}
/**
* set the current selection in the footer of the drawer
* NOTE: This will trigger onDrawerItemSelected without a view!
*
* @param position the position to select
*/
public void setFooterSelection(int position) {
setFooterSelection(position, true);
}
/**
* set the current selection in the footer of the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param position
* @param fireOnClick
*/
public void setFooterSelection(int position, boolean fireOnClick) {
DrawerUtils.setFooterSelection(mDrawerBuilder, position, fireOnClick);
}
/**
* update a specific drawer item :D
* automatically identified by its id
*
* @param drawerItem
*/
public void updateItem(IDrawerItem drawerItem) {
updateItem(drawerItem, getPositionFromIdentifier(drawerItem));
}
/**
* Update a drawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void updateItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Add a drawerItem at the end
*
* @param drawerItem
*/
public void addItem(IDrawerItem drawerItem) {
if (mDrawerBuilder.mDrawerItems != null) {
mDrawerBuilder.mDrawerItems.add(drawerItem);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Add a drawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void addItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mDrawerItems != null) {
mDrawerBuilder.mDrawerItems.add(position, drawerItem);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Set a drawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void setItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mDrawerItems != null) {
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Remove a drawerItem at a specific position
*
* @param position
*/
public void removeItem(int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
mDrawerBuilder.mDrawerItems.remove(position);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Removes all items from drawer
*/
public void removeAllItems() {
mDrawerBuilder.mDrawerItems.clear();
mDrawerBuilder.mAdapter.dataUpdated();
}
/**
* add new Items to the current DrawerItem List
*
* @param drawerItems
*/
public void addItems(IDrawerItem... drawerItems) {
if (mDrawerBuilder.mDrawerItems != null) {
Collections.addAll(mDrawerBuilder.mDrawerItems, drawerItems);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Replace the current DrawerItems with a new ArrayList of items
*
* @param drawerItems
*/
public void setItems(ArrayList<IDrawerItem> drawerItems) {
setItems(drawerItems, false);
}
/**
* replace the current DrawerItems with the new ArrayList.
*
* @param drawerItems
* @param switchedItems
*/
private void setItems(ArrayList<IDrawerItem> drawerItems, boolean switchedItems) {
mDrawerBuilder.mDrawerItems = drawerItems;
//if we are currently at a switched list set the new reference
if (originalDrawerItems != null && !switchedItems) {
originalDrawerItems = drawerItems;
} else {
mDrawerBuilder.mAdapter.setDrawerItems(mDrawerBuilder.mDrawerItems);
}
mDrawerBuilder.mAdapter.dataUpdated();
}
/**
* Update the name of a drawer item if its an instance of nameable
*
* @param nameRes
* @param position
*/
public void updateName(int nameRes, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Nameable) {
((Nameable) drawerItem).setName(null);
((Nameable) drawerItem).setNameRes(nameRes);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the name of a drawer item if its an instance of nameable
*
* @param name
* @param position
*/
public void updateName(String name, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Nameable) {
((Nameable) drawerItem).setNameRes(-1);
((Nameable) drawerItem).setName(name);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the badge of a drawer item if its an instance of badgeable
*
* @param badge
* @param position
*/
public void updateBadge(String badge, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Badgeable) {
((Badgeable) drawerItem).setBadge(badge);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the icon of a drawer item if its an instance of iconable
*
* @param icon
* @param position
*/
public void updateIcon(Drawable icon, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Iconable) {
((Iconable) drawerItem).setIcon(icon);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the icon of a drawer item from an iconRes
*
* @param iconRes
* @param position
*/
public void updateIcon(int iconRes, int position) {
if (mDrawerBuilder.mRootView != null && mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Iconable) {
((Iconable) drawerItem).setIcon(UIUtils.getCompatDrawable(mDrawerBuilder.mRootView.getContext(), iconRes));
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the icon of a drawer item if its an instance of iconable
*
* @param icon
* @param position
*/
public void updateIcon(IIcon icon, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Iconable) {
((Iconable) drawerItem).setIIcon(icon);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* update a specific footerDrawerItem :D
* automatically identified by it's id
*
* @param drawerItem
*/
public void updateFooterItem(IDrawerItem drawerItem) {
updateFooterItem(drawerItem, getFooterPositionFromIdentifier(drawerItem));
}
/**
* update a footerDrawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void updateFooterItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mStickyDrawerItems != null && mDrawerBuilder.mStickyDrawerItems.size() > position) {
mDrawerBuilder.mStickyDrawerItems.set(position, drawerItem);
}
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Add a footerDrawerItem at the end
*
* @param drawerItem
*/
public void addFooterItem(IDrawerItem drawerItem) {
if (mDrawerBuilder.mStickyDrawerItems == null) {
mDrawerBuilder.mStickyDrawerItems = new ArrayList<>();
}
mDrawerBuilder.mStickyDrawerItems.add(drawerItem);
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Add a footerDrawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void addFooterItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mStickyDrawerItems == null) {
mDrawerBuilder.mStickyDrawerItems = new ArrayList<>();
}
mDrawerBuilder.mStickyDrawerItems.add(position, drawerItem);
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Set a footerDrawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void setFooterItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mStickyDrawerItems != null && mDrawerBuilder.mStickyDrawerItems.size() > position) {
mDrawerBuilder.mStickyDrawerItems.set(position, drawerItem);
}
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Remove a footerDrawerItem at a specific position
*
* @param position
*/
public void removeFooterItem(int position) {
if (mDrawerBuilder.mStickyDrawerItems != null && mDrawerBuilder.mStickyDrawerItems.size() > position) {
mDrawerBuilder.mStickyDrawerItems.remove(position);
}
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Removes all footerItems from drawer
*/
public void removeAllFooterItems() {
if (mDrawerBuilder.mStickyDrawerItems != null) {
mDrawerBuilder.mStickyDrawerItems.clear();
}
if (mDrawerBuilder.mStickyFooterView != null) {
mDrawerBuilder.mStickyFooterView.setVisibility(View.GONE);
}
}
/**
* setter for the OnDrawerItemClickListener
*
* @param onDrawerItemClickListener
*/
public void setOnDrawerItemClickListener(OnDrawerItemClickListener onDrawerItemClickListener) {
mDrawerBuilder.mOnDrawerItemClickListener = onDrawerItemClickListener;
}
/**
* method to get the OnDrawerItemClickListener
*
* @return
*/
public OnDrawerItemClickListener getOnDrawerItemClickListener() {
return mDrawerBuilder.mOnDrawerItemClickListener;
}
/**
* setter for the OnDrawerItemLongClickListener
*
* @param onDrawerItemLongClickListener
*/
public void setOnDrawerItemLongClickListener(OnDrawerItemLongClickListener onDrawerItemLongClickListener) {
mDrawerBuilder.mOnDrawerItemLongClickListener = onDrawerItemLongClickListener;
}
/**
* method to get the OnDrawerItemLongClickListener
*
* @return
*/
public OnDrawerItemLongClickListener getOnDrawerItemLongClickListener() {
return mDrawerBuilder.mOnDrawerItemLongClickListener;
}
//variables to store and remember the original list of the drawer
private Drawer.OnDrawerItemClickListener originalOnDrawerItemClickListener;
private ArrayList<IDrawerItem> originalDrawerItems;
private int originalDrawerSelection = -1;
public boolean switchedDrawerContent() {
return !(originalOnDrawerItemClickListener == null && originalDrawerItems == null && originalDrawerSelection == -1);
}
/**
* method to switch the drawer content to new elements
*
* @param onDrawerItemClickListener
* @param drawerItems
* @param drawerSelection
*/
public void switchDrawerContent(OnDrawerItemClickListener onDrawerItemClickListener, ArrayList<IDrawerItem> drawerItems, int drawerSelection) {
//just allow a single switched drawer
if (!switchedDrawerContent()) {
//save out previous values
originalOnDrawerItemClickListener = getOnDrawerItemClickListener();
originalDrawerItems = getDrawerItems();
originalDrawerSelection = getCurrentSelection();
//set the new items
setOnDrawerItemClickListener(onDrawerItemClickListener);
setItems(drawerItems, true);
setSelection(drawerSelection, false);
mDrawerBuilder.mAdapter.resetAnimation();
if (getStickyFooter() != null) {
getStickyFooter().setVisibility(View.GONE);
}
}
}
/**
* helper method to reset to the original drawerContent
*/
public void resetDrawerContent() {
if (switchedDrawerContent()) {
//set the new items
setOnDrawerItemClickListener(originalOnDrawerItemClickListener);
setItems(originalDrawerItems, true);
setSelection(originalDrawerSelection, false);
//remove the references
originalOnDrawerItemClickListener = null;
originalDrawerItems = null;
originalDrawerSelection = -1;
mDrawerBuilder.mAdapter.resetAnimation();
if (getStickyFooter() != null) {
getStickyFooter().setVisibility(View.VISIBLE);
}
}
}
/**
* add the values to the bundle for saveInstanceState
*
* @param savedInstanceState
* @return
*/
public Bundle saveInstanceState(Bundle savedInstanceState) {
if (savedInstanceState != null) {
savedInstanceState.putInt(BUNDLE_SELECTION, mDrawerBuilder.mCurrentSelection);
savedInstanceState.putInt(BUNDLE_FOOTER_SELECTION, mDrawerBuilder.mCurrentFooterSelection);
}
return savedInstanceState;
}
public interface OnDrawerNavigationListener {
/**
* @param clickedView
* @return true if the event was consumed
*/
boolean onNavigationClickListener(View clickedView);
}
public interface OnDrawerItemClickListener {
/**
* @param parent
* @param view
* @param position
* @param id
* @param drawerItem
* @return true if the event was consumed
*/
boolean onItemClick(AdapterView<?> parent, View view, int position, long id, IDrawerItem drawerItem);
}
public interface OnDrawerItemLongClickListener {
/**
* @param parent
* @param view
* @param position
* @param id
* @param drawerItem
* @return true if the event was consumed
*/
boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id, IDrawerItem drawerItem);
}
public interface OnDrawerListener {
/**
* @param drawerView
*/
void onDrawerOpened(View drawerView);
/**
* @param drawerView
*/
void onDrawerClosed(View drawerView);
/**
* @param drawerView
* @param slideOffset
*/
void onDrawerSlide(View drawerView, float slideOffset);
}
public interface OnDrawerItemSelectedListener {
/**
* @param parent
* @param view
* @param position
* @param id
* @param drawerItem
*/
void onItemSelected(AdapterView<?> parent, View view, int position, long id, IDrawerItem drawerItem);
/**
* @param parent
*/
void onNothingSelected(AdapterView<?> parent);
}
}
| library/src/main/java/com/mikepenz/materialdrawer/Drawer.java | package com.mikepenz.materialdrawer;
import android.app.Activity;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.FrameLayout;
import android.widget.ListView;
import android.widget.RelativeLayout;
import com.mikepenz.iconics.typeface.IIcon;
import com.mikepenz.materialdrawer.adapter.BaseDrawerAdapter;
import com.mikepenz.materialdrawer.model.interfaces.Badgeable;
import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem;
import com.mikepenz.materialdrawer.model.interfaces.Iconable;
import com.mikepenz.materialdrawer.model.interfaces.Nameable;
import com.mikepenz.materialdrawer.util.KeyboardUtil;
import com.mikepenz.materialdrawer.util.UIUtils;
import com.mikepenz.materialdrawer.view.ScrimInsetsFrameLayout;
import java.util.ArrayList;
import java.util.Collections;
/**
* Created by mikepenz on 03.02.15.
*/
public class Drawer {
/**
* BUNDLE param to store the selection
*/
protected static final String BUNDLE_SELECTION = "bundle_selection";
protected static final String BUNDLE_FOOTER_SELECTION = "bundle_footer_selection";
/**
* Per the design guidelines, you should show the drawer on launch until the user manually
* expands it. This shared preference tracks this.
*/
protected static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned";
private final DrawerBuilder mDrawerBuilder;
private FrameLayout mContentView;
private KeyboardUtil mKeyboardUtil = null;
/**
* the protected Constructor for the result
*
* @param drawerBuilder
*/
protected Drawer(DrawerBuilder drawerBuilder) {
this.mDrawerBuilder = drawerBuilder;
}
/**
* Get the DrawerLayout of the current drawer
*
* @return
*/
public DrawerLayout getDrawerLayout() {
return this.mDrawerBuilder.mDrawerLayout;
}
/**
* Open the drawer
*/
public void openDrawer() {
if (mDrawerBuilder.mDrawerLayout != null && mDrawerBuilder.mSliderLayout != null) {
if (mDrawerBuilder.mDrawerGravity != null) {
mDrawerBuilder.mDrawerLayout.openDrawer(mDrawerBuilder.mDrawerGravity);
} else {
mDrawerBuilder.mDrawerLayout.openDrawer(mDrawerBuilder.mSliderLayout);
}
}
}
/**
* close the drawer
*/
public void closeDrawer() {
if (mDrawerBuilder.mDrawerLayout != null) {
if (mDrawerBuilder.mDrawerGravity != null) {
mDrawerBuilder.mDrawerLayout.closeDrawer(mDrawerBuilder.mDrawerGravity);
} else {
mDrawerBuilder.mDrawerLayout.closeDrawer(mDrawerBuilder.mSliderLayout);
}
}
}
/**
* Get the current state of the drawer.
* True if the drawer is currently open.
*
* @return
*/
public boolean isDrawerOpen() {
if (mDrawerBuilder.mDrawerLayout != null && mDrawerBuilder.mSliderLayout != null) {
return mDrawerBuilder.mDrawerLayout.isDrawerOpen(mDrawerBuilder.mSliderLayout);
}
return false;
}
/**
* set the insetsFrameLayout to display the content in fullscreen
* under the statusBar and navigationBar
*
* @param fullscreen
*/
public void setFullscreen(boolean fullscreen) {
if (mDrawerBuilder.mDrawerContentRoot != null) {
mDrawerBuilder.mDrawerContentRoot.setEnabled(!fullscreen);
}
}
/**
* Set the color for the statusBar
*
* @param statusBarColor
*/
public void setStatusBarColor(int statusBarColor) {
if (mDrawerBuilder.mDrawerContentRoot != null) {
mDrawerBuilder.mDrawerContentRoot.setInsetForeground(statusBarColor);
mDrawerBuilder.mDrawerContentRoot.invalidate();
}
}
/**
* get the drawerContentRoot Layout (ScrimInsetsFrameLayout)
*
* @return
*/
public ScrimInsetsFrameLayout getScrimInsetsFrameLayout() {
return mDrawerBuilder.mDrawerContentRoot;
}
/**
* a helper method to enable the keyboardUtil for a specific activity
* or disable it. note this will cause some frame drops because of the
* listener.
*
* @param activity
* @param enable
*/
public void keyboardSupportEnabled(Activity activity, boolean enable) {
if (getContent() != null && getContent().getChildCount() > 0) {
if (mKeyboardUtil == null) {
mKeyboardUtil = new KeyboardUtil(activity, getContent().getChildAt(0));
mKeyboardUtil.disable();
}
if (enable) {
mKeyboardUtil.enable();
} else {
mKeyboardUtil.disable();
}
}
}
/**
* get the slider layout of the current drawer.
* This is the layout containing the ListView
*
* @return
*/
public RelativeLayout getSlider() {
return mDrawerBuilder.mSliderLayout;
}
/**
* get the container frameLayout of the current drawer
*
* @return
*/
public FrameLayout getContent() {
if (mContentView == null && this.mDrawerBuilder.mDrawerLayout != null) {
mContentView = (FrameLayout) this.mDrawerBuilder.mDrawerLayout.findViewById(R.id.content_layout);
}
return mContentView;
}
/**
* get the listView of the current drawer
*
* @return
*/
public ListView getListView() {
return mDrawerBuilder.mListView;
}
/**
* get the BaseDrawerAdapter of the current drawer
*
* @return
*/
public BaseDrawerAdapter getAdapter() {
return mDrawerBuilder.mAdapter;
}
/**
* get all drawerItems of the current drawer
*
* @return
*/
public ArrayList<IDrawerItem> getDrawerItems() {
return mDrawerBuilder.mDrawerItems;
}
/**
* get the Header View if set else NULL
*
* @return
*/
public View getHeader() {
return mDrawerBuilder.mHeaderView;
}
/**
* get the StickyHeader View if set else NULL
*
* @return
*/
public View getStickyHeader() {
return mDrawerBuilder.mStickyHeaderView;
}
/**
* method to replace a previous set header
*
* @param view
*/
public void setHeader(View view) {
if (getListView() != null) {
BaseDrawerAdapter adapter = getAdapter();
getListView().setAdapter(null);
if (getHeader() != null) {
getListView().removeHeaderView(getHeader());
}
getListView().addHeaderView(view);
getListView().setAdapter(adapter);
mDrawerBuilder.mHeaderView = view;
mDrawerBuilder.mHeaderOffset = 1;
}
}
/**
* method to remove the header of the list
*/
public void removeHeader() {
if (getListView() != null && getHeader() != null) {
getListView().removeHeaderView(getHeader());
mDrawerBuilder.mHeaderView = null;
mDrawerBuilder.mHeaderOffset = 0;
}
}
/**
* get the Footer View if set else NULL
*
* @return
*/
public View getFooter() {
return mDrawerBuilder.mFooterView;
}
/**
* get the StickyFooter View if set else NULL
*
* @return
*/
public View getStickyFooter() {
return mDrawerBuilder.mStickyFooterView;
}
/**
* get the ActionBarDrawerToggle
*
* @return
*/
public ActionBarDrawerToggle getActionBarDrawerToggle() {
return mDrawerBuilder.mActionBarDrawerToggle;
}
/**
* calculates the position of an drawerItem. searching by it's identifier
*
* @param drawerItem
* @return
*/
public int getPositionFromIdentifier(IDrawerItem drawerItem) {
return getPositionFromIdentifier(drawerItem.getIdentifier());
}
/**
* calculates the position of an drawerItem. searching by it's identifier
*
* @param identifier
* @return
*/
public int getPositionFromIdentifier(int identifier) {
return DrawerUtils.getPositionFromIdentifier(mDrawerBuilder, identifier);
}
/**
* calculates the position of an drawerItem. searching by it's identifier
*
* @param drawerItem
* @return
*/
public int getFooterPositionFromIdentifier(IDrawerItem drawerItem) {
return getFooterPositionFromIdentifier(drawerItem.getIdentifier());
}
/**
* calculates the position of an drawerItem inside the footer. searching by it's identfier
*
* @param identifier
* @return
*/
public int getFooterPositionFromIdentifier(int identifier) {
return DrawerUtils.getFooterPositionFromIdentifier(mDrawerBuilder, identifier);
}
/**
* get the current selection
*
* @return
*/
public int getCurrentSelection() {
return mDrawerBuilder.mCurrentSelection;
}
/**
* get the current footer selection
*
* @return
*/
public int getCurrentFooterSelection() {
return mDrawerBuilder.mCurrentFooterSelection;
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view!
*
* @param identifier
*/
public boolean setSelectionByIdentifier(int identifier) {
return setSelection(getPositionFromIdentifier(identifier), true);
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param identifier
* @param fireOnClick
*/
public boolean setSelectionByIdentifier(int identifier, boolean fireOnClick) {
return setSelection(getPositionFromIdentifier(identifier), fireOnClick);
}
/**
* set the current selection in the footer of the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param identifier
* @param fireOnClick
*/
public void setFooterSelectionByIdentifier(int identifier, boolean fireOnClick) {
setFooterSelection(getPositionFromIdentifier(identifier), fireOnClick);
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view!
*
* @param drawerItem
*/
public boolean setSelection(IDrawerItem drawerItem) {
return setSelection(getPositionFromIdentifier(drawerItem), true);
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param drawerItem
* @param fireOnClick
*/
public boolean setSelection(IDrawerItem drawerItem, boolean fireOnClick) {
return setSelection(getPositionFromIdentifier(drawerItem), fireOnClick);
}
/**
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view!
*
* @param position the position to select
*/
public boolean setSelection(int position) {
return setSelection(position, true);
}
/*
* set the current selection in the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param position
* @param fireOnClick
* @return true if the event was consumed
*/
public boolean setSelection(int position, boolean fireOnClick) {
if (mDrawerBuilder.mListView != null) {
return DrawerUtils.setListSelection(mDrawerBuilder, position, fireOnClick, mDrawerBuilder.getDrawerItem(position, false));
}
return false;
}
/**
* set the current selection in the footer of the drawer
* NOTE: This will trigger onDrawerItemSelected without a view!
*
* @param position the position to select
*/
public void setFooterSelection(int position) {
setFooterSelection(position, true);
}
/**
* set the current selection in the footer of the drawer
* NOTE: This will trigger onDrawerItemSelected without a view if you pass fireOnClick = true;
*
* @param position
* @param fireOnClick
*/
public void setFooterSelection(int position, boolean fireOnClick) {
DrawerUtils.setFooterSelection(mDrawerBuilder, position, fireOnClick);
}
/**
* update a specific drawer item :D
* automatically identified by its id
*
* @param drawerItem
*/
public void updateItem(IDrawerItem drawerItem) {
updateItem(drawerItem, getPositionFromIdentifier(drawerItem));
}
/**
* Update a drawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void updateItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Add a drawerItem at the end
*
* @param drawerItem
*/
public void addItem(IDrawerItem drawerItem) {
if (mDrawerBuilder.mDrawerItems != null) {
mDrawerBuilder.mDrawerItems.add(drawerItem);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Add a drawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void addItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mDrawerItems != null) {
mDrawerBuilder.mDrawerItems.add(position, drawerItem);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Set a drawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void setItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mDrawerItems != null) {
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Remove a drawerItem at a specific position
*
* @param position
*/
public void removeItem(int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
mDrawerBuilder.mDrawerItems.remove(position);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Removes all items from drawer
*/
public void removeAllItems() {
mDrawerBuilder.mDrawerItems.clear();
mDrawerBuilder.mAdapter.dataUpdated();
}
/**
* add new Items to the current DrawerItem List
*
* @param drawerItems
*/
public void addItems(IDrawerItem... drawerItems) {
if (mDrawerBuilder.mDrawerItems != null) {
Collections.addAll(mDrawerBuilder.mDrawerItems, drawerItems);
mDrawerBuilder.mAdapter.dataUpdated();
}
}
/**
* Replace the current DrawerItems with a new ArrayList of items
*
* @param drawerItems
*/
public void setItems(ArrayList<IDrawerItem> drawerItems) {
setItems(drawerItems, false);
}
/**
* replace the current DrawerItems with the new ArrayList.
*
* @param drawerItems
* @param switchedItems
*/
private void setItems(ArrayList<IDrawerItem> drawerItems, boolean switchedItems) {
mDrawerBuilder.mDrawerItems = drawerItems;
//if we are currently at a switched list set the new reference
if (originalDrawerItems != null && !switchedItems) {
originalDrawerItems = drawerItems;
} else {
mDrawerBuilder.mAdapter.setDrawerItems(mDrawerBuilder.mDrawerItems);
}
mDrawerBuilder.mAdapter.dataUpdated();
}
/**
* Update the name of a drawer item if its an instance of nameable
*
* @param nameRes
* @param position
*/
public void updateName(int nameRes, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Nameable) {
((Nameable) drawerItem).setName(null);
((Nameable) drawerItem).setNameRes(nameRes);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the name of a drawer item if its an instance of nameable
*
* @param name
* @param position
*/
public void updateName(String name, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Nameable) {
((Nameable) drawerItem).setNameRes(-1);
((Nameable) drawerItem).setName(name);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the badge of a drawer item if its an instance of badgeable
*
* @param badge
* @param position
*/
public void updateBadge(String badge, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Badgeable) {
((Badgeable) drawerItem).setBadge(badge);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the icon of a drawer item if its an instance of iconable
*
* @param icon
* @param position
*/
public void updateIcon(Drawable icon, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Iconable) {
((Iconable) drawerItem).setIcon(icon);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the icon of a drawer item from an iconRes
*
* @param iconRes
* @param position
*/
public void updateIcon(int iconRes, int position) {
if (mDrawerBuilder.mRootView != null && mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Iconable) {
((Iconable) drawerItem).setIcon(UIUtils.getCompatDrawable(mDrawerBuilder.mRootView.getContext(), iconRes));
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* Update the icon of a drawer item if its an instance of iconable
*
* @param icon
* @param position
*/
public void updateIcon(IIcon icon, int position) {
if (mDrawerBuilder.checkDrawerItem(position, false)) {
IDrawerItem drawerItem = mDrawerBuilder.mDrawerItems.get(position);
if (drawerItem instanceof Iconable) {
((Iconable) drawerItem).setIIcon(icon);
}
mDrawerBuilder.mDrawerItems.set(position, drawerItem);
mDrawerBuilder.mAdapter.notifyDataSetChanged();
}
}
/**
* update a specific footerDrawerItem :D
* automatically identified by it's id
*
* @param drawerItem
*/
public void updateFooterItem(IDrawerItem drawerItem) {
updateFooterItem(drawerItem, getFooterPositionFromIdentifier(drawerItem));
}
/**
* update a footerDrawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void updateFooterItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mStickyDrawerItems != null && mDrawerBuilder.mStickyDrawerItems.size() > position) {
mDrawerBuilder.mStickyDrawerItems.set(position, drawerItem);
}
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Add a footerDrawerItem at the end
*
* @param drawerItem
*/
public void addFooterItem(IDrawerItem drawerItem) {
if (mDrawerBuilder.mStickyDrawerItems == null) {
mDrawerBuilder.mStickyDrawerItems = new ArrayList<>();
}
mDrawerBuilder.mStickyDrawerItems.add(drawerItem);
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Add a footerDrawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void addFooterItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mStickyDrawerItems == null) {
mDrawerBuilder.mStickyDrawerItems = new ArrayList<>();
}
mDrawerBuilder.mStickyDrawerItems.add(position, drawerItem);
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Set a footerDrawerItem at a specific position
*
* @param drawerItem
* @param position
*/
public void setFooterItem(IDrawerItem drawerItem, int position) {
if (mDrawerBuilder.mStickyDrawerItems != null && mDrawerBuilder.mStickyDrawerItems.size() > position) {
mDrawerBuilder.mStickyDrawerItems.set(position, drawerItem);
}
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Remove a footerDrawerItem at a specific position
*
* @param position
*/
public void removeFooterItem(int position) {
if (mDrawerBuilder.mStickyDrawerItems != null && mDrawerBuilder.mStickyDrawerItems.size() > position) {
mDrawerBuilder.mStickyDrawerItems.remove(position);
}
DrawerUtils.rebuildFooterView(mDrawerBuilder);
}
/**
* Removes all footerItems from drawer
*/
public void removeAllFooterItems() {
if (mDrawerBuilder.mStickyDrawerItems != null) {
mDrawerBuilder.mStickyDrawerItems.clear();
}
if (mDrawerBuilder.mStickyFooterView != null) {
mDrawerBuilder.mStickyFooterView.setVisibility(View.GONE);
}
}
/**
* setter for the OnDrawerItemClickListener
*
* @param onDrawerItemClickListener
*/
public void setOnDrawerItemClickListener(OnDrawerItemClickListener onDrawerItemClickListener) {
mDrawerBuilder.mOnDrawerItemClickListener = onDrawerItemClickListener;
}
/**
* method to get the OnDrawerItemClickListener
*
* @return
*/
public OnDrawerItemClickListener getOnDrawerItemClickListener() {
return mDrawerBuilder.mOnDrawerItemClickListener;
}
/**
* setter for the OnDrawerItemLongClickListener
*
* @param onDrawerItemLongClickListener
*/
public void setOnDrawerItemLongClickListener(OnDrawerItemLongClickListener onDrawerItemLongClickListener) {
mDrawerBuilder.mOnDrawerItemLongClickListener = onDrawerItemLongClickListener;
}
/**
* method to get the OnDrawerItemLongClickListener
*
* @return
*/
public OnDrawerItemLongClickListener getOnDrawerItemLongClickListener() {
return mDrawerBuilder.mOnDrawerItemLongClickListener;
}
//variables to store and remember the original list of the drawer
private Drawer.OnDrawerItemClickListener originalOnDrawerItemClickListener;
private ArrayList<IDrawerItem> originalDrawerItems;
private int originalDrawerSelection = -1;
public boolean switchedDrawerContent() {
return !(originalOnDrawerItemClickListener == null && originalDrawerItems == null && originalDrawerSelection == -1);
}
/**
* method to switch the drawer content to new elements
*
* @param onDrawerItemClickListener
* @param drawerItems
* @param drawerSelection
*/
public void switchDrawerContent(OnDrawerItemClickListener onDrawerItemClickListener, ArrayList<IDrawerItem> drawerItems, int drawerSelection) {
//just allow a single switched drawer
if (!switchedDrawerContent()) {
//save out previous values
originalOnDrawerItemClickListener = getOnDrawerItemClickListener();
originalDrawerItems = getDrawerItems();
originalDrawerSelection = getCurrentSelection();
//set the new items
setOnDrawerItemClickListener(onDrawerItemClickListener);
setItems(drawerItems, true);
setSelection(drawerSelection, false);
mDrawerBuilder.mAdapter.resetAnimation();
if (getStickyFooter() != null) {
getStickyFooter().setVisibility(View.GONE);
}
}
}
/**
* helper method to reset to the original drawerContent
*/
public void resetDrawerContent() {
if (switchedDrawerContent()) {
//set the new items
setOnDrawerItemClickListener(originalOnDrawerItemClickListener);
setItems(originalDrawerItems, true);
setSelection(originalDrawerSelection, false);
//remove the references
originalOnDrawerItemClickListener = null;
originalDrawerItems = null;
originalDrawerSelection = -1;
mDrawerBuilder.mAdapter.resetAnimation();
if (getStickyFooter() != null) {
getStickyFooter().setVisibility(View.VISIBLE);
}
}
}
/**
* add the values to the bundle for saveInstanceState
*
* @param savedInstanceState
* @return
*/
public Bundle saveInstanceState(Bundle savedInstanceState) {
if (savedInstanceState != null) {
savedInstanceState.putInt(BUNDLE_SELECTION, mDrawerBuilder.mCurrentSelection);
savedInstanceState.putInt(BUNDLE_FOOTER_SELECTION, mDrawerBuilder.mCurrentFooterSelection);
}
return savedInstanceState;
}
public interface OnDrawerNavigationListener {
/**
* @param clickedView
* @return true if the event was consumed
*/
boolean onNavigationClickListener(View clickedView);
}
public interface OnDrawerItemClickListener {
/**
* @param parent
* @param view
* @param position
* @param id
* @param drawerItem
* @return true if the event was consumed
*/
boolean onItemClick(AdapterView<?> parent, View view, int position, long id, IDrawerItem drawerItem);
}
public interface OnDrawerItemLongClickListener {
/**
* @param parent
* @param view
* @param position
* @param id
* @param drawerItem
* @return true if the event was consumed
*/
boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id, IDrawerItem drawerItem);
}
public interface OnDrawerListener {
/**
* @param drawerView
*/
void onDrawerOpened(View drawerView);
/**
* @param drawerView
*/
void onDrawerClosed(View drawerView);
/**
* @param drawerView
* @param slideOffset
*/
void onDrawerSlide(View drawerView, float slideOffset);
}
public interface OnDrawerItemSelectedListener {
/**
* @param parent
* @param view
* @param position
* @param id
* @param drawerItem
*/
void onItemSelected(AdapterView<?> parent, View view, int position, long id, IDrawerItem drawerItem);
/**
* @param parent
*/
void onNothingSelected(AdapterView<?> parent);
}
}
| * switch openDrawer, closeDrawer, isDrawerOpen to use the gravity
| library/src/main/java/com/mikepenz/materialdrawer/Drawer.java | * switch openDrawer, closeDrawer, isDrawerOpen to use the gravity |
|
Java | apache-2.0 | ad576125875a793aa4ed9e6b97ce9e3afb93c49f | 0 | safarijv/ifpress-solr-plugin | package com.ifactory.press.db.solr.spelling.suggest;
import java.io.Closeable;
import java.io.IOException;
import java.text.BreakIterator;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.solr.spelling.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.spell.HighFrequencyDictionary;
import org.apache.lucene.search.suggest.analyzing.AnalyzingInfixSuggester;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.CloseHook;
import org.apache.solr.core.SolrCore;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.spelling.SpellingOptions;
import org.apache.solr.spelling.SpellingResult;
import org.apache.solr.spelling.suggest.Suggester;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimaps;
/**
* <h3>A suggester that draws suggestions from terms in multiple fields.</h3>
*
* <p>
* Contributions from each field are weighted by a per-field <b>weight</b>, and
* zero-weighted based on a global minimum <b>threshold</b> term frequency, a per-field
* minimum threshold (<b>minfreq</b>) and a per-field maximum (<b>maxfreq</b>) threshold.
* All thresholds are compared against (term frequency / document count), an estimate of
* the fraction of documents containing the term. Thus setting maximum=0.5 will filter out
* terms occurring in as many or more than half the documents.
* </p>
*
* <p>
* In the normal mode of operation, the given <b>field</b>'s analyzer is used to tokenize the
* stored field values; each resulting token becomes a suggestion.
* </p>
*
* <p>
* An alternate mode of operation provides for unanalyzed stored field values to
* be used as suggestions. This mode is selected by specifying
* <b>analyzerFieldType</b>=string in the suggester configuration. In this mode, every
* suggestion is given the field's constant <b>weight</b>: term frequency is not considered
* as part of the weight, and no filtering is applied based on frequency.
* </p>
*
* <p>
* If <b>filterDuplicates</b> is set to true for a field, then each suggestion generated by
* the field is looked up in the suggester: if it exists already, it is not added again. This
* slows down indexing considerably so it should only be turned on when actually needed.
* </p>
*
* <p>
* The following sample configuration illustrates a setup where suggestions are
* drawn from a title field and a full text field, with different weights and
* thresholds.
* </p>
*
* <pre>
* {@code
* <!-- Suggester -->
* <searchComponent name="suggest-component" class="solr.SpellCheckComponent">
*
* <!-- Multiple "Spell Checkers" can be declared and used by this
* component
* -->
*
* <!-- a spellchecker built from a field of the main index -->
* <lst name="spellchecker">
* <str name="name">suggest-infix-all</str>
* <str name="classname">org.apache.solr.spelling.suggest.MultiSuggester</str>
* <str name="lookupImpl">org.apache.solr.spelling.suggest.fst.AnalyzingInfixLookupFactory</str>
* <str name="suggestAnalyzerFieldType">text</str>
* <int name="maxSuggestionLength">80</int>
* <float name="threshold">0.0</float>
* <!-- true == performance-killer. MultiSuggester handles incremental updates automatically, so there's no need for this anyway. -->
* <str name="buildOnCommit">false</str>
* <lst name="fields">
* <lst name="field">
* <str name="name">fulltext_t</str>
* <float name="weight">1.0</float>
* <float name="minfreq">0.005</float>
* <float name="maxfreq">0.3</float>
* </lst>
* <lst name="field">
* <str name="name">title_ms</str>
* <float name="weight">10.0</float>
* </lst>
* <lst name="field">
* <!-- a field whose values are weighted by the value of another field in the same document -->
* <str name="name">weighted_field_ms</str>
* <str name="weight_field">weight_dv</str>
* <float name="weight">10.0</float>
* </lst>
* <lst name="field">
* <str name="name">title_t</str>
* <analyzerFieldType>string</analyzerFieldType>
* <float name="weight">10.0</float>
* </lst>
* <lst name="field">
* <str name="name">duplicate_title_t</str>
* <str name="analyzerFieldType">string</str>
* <float name="weight">3.0</float>
* <bool name="filterDuplicates">true</bool>
* </lst>
* </lst>
*
* </searchComponent>
* }
* </pre>
*
*
* NOTE: the incremental weighting scheme gives an artifical "advantage" to
* infrequent terms that happen to be indexed first because their weights are
* normalized when the number of documents is low. To avoid this, it's
* recommended to rebuild the index periodically. If the index is large and
* growing relatively slowly, this effect will be very small, though.
*/
@SuppressWarnings("rawtypes")
public class MultiSuggester extends Suggester {
// weights are stored internally as longs, but externally as small
// floating point numbers. The floating point weights are multiplied by
// this factor to convert them to longs with a sufficient
// range. WEIGHT_SCALE should be greater than the number of documents
private static final int WEIGHT_SCALE = 10000000;
private static final Logger LOG = LoggerFactory.getLogger(MultiSuggester.class);
private WeightedField[] fields;
private int maxSuggestionLength;
// use a synchronized Multimap - there may be one with the same name for each
// core
private static final ListMultimap<Object, Object> registry = Multimaps.synchronizedListMultimap(ArrayListMultimap.create());
private static final int DEFAULT_MAX_SUGGESTION_LENGTH = 80;
@Override
public String init(NamedList config, SolrCore coreParam) {
String myname = (String) config.get(DICTIONARY_NAME);
this.core = coreParam;
// Workaround for SOLR-6246 (lock exception on core reload): close
// any suggester registered with the same name.
if (registry.containsKey(myname)) {
MultiSuggester suggesterToClose = null;
for (Object o : registry.get(myname)) {
MultiSuggester suggester = (MultiSuggester) o;
if (suggester.core.getName().equals(coreParam.getName())) {
suggesterToClose = suggester;
break;
}
}
if (suggesterToClose != null) {
registry.remove(myname, suggesterToClose);
try {
suggesterToClose.close();
} catch (IOException e) {
LOG.error("An exception occurred while closing the spellchecker", e);
}
}
}
super.init(config, coreParam);
// effectively disable analysis *by the SpellChecker/Suggester component*
// because this leads
// to independent suggestions for each token; we want AIS to perform
// analysis and consider the tokens together
analyzer = new KeywordAnalyzer();
initWeights((NamedList) config.get("fields"), coreParam);
Integer maxLengthConfig = (Integer) config.get("maxSuggestionLength");
maxSuggestionLength = maxLengthConfig != null ? maxLengthConfig : DEFAULT_MAX_SUGGESTION_LENGTH;
registry.put(myname, this);
core.addCloseHook(new CloseHandler());
return myname;
}
private void initWeights(NamedList fieldConfigs, SolrCore coreParam) {
fields = new WeightedField[fieldConfigs.size()];
for (int ifield = 0; ifield < fieldConfigs.size(); ifield++) {
NamedList fieldConfig = (NamedList) fieldConfigs.getVal(ifield);
String fieldName = (String) fieldConfig.get("name");
Float weight = (Float) fieldConfig.get("weight");
if (weight == null) {
weight = 1.0f;
}
Float minFreq = (Float) fieldConfig.get("minfreq");
if (minFreq == null) {
minFreq = 0.0f;
}
Float maxFreq = (Float) fieldConfig.get("maxfreq");
if (maxFreq == null) {
maxFreq = 1.0f;
}
Boolean filterDuplicates = fieldConfig.getBooleanArg("filterDuplicates");
if (filterDuplicates == null) {
filterDuplicates = false;
}
String analyzerFieldTypeName = (String) fieldConfig.get("analyzerFieldType");
Analyzer fieldAnalyzer;
boolean useStoredField = analyzerFieldTypeName != null;
if (useStoredField) {
// useStoredField - when re-building, we retrieve the stored field value
if ("string".equals(analyzerFieldTypeName)) {
fieldAnalyzer = null;
} else {
fieldAnalyzer = coreParam.getLatestSchema().getFieldTypeByName(analyzerFieldTypeName).getIndexAnalyzer();
}
} else {
// Use the existing term values as analyzed by the field
fieldAnalyzer = coreParam.getLatestSchema().getFieldType(fieldName).getIndexAnalyzer();
}
fields[ifield] = new WeightedField(fieldName, weight, minFreq, maxFreq, fieldAnalyzer, useStoredField, filterDuplicates);
}
Arrays.sort(fields);
}
@Override
public void build(SolrCore coreParam, SolrIndexSearcher searcher) throws IOException {
LOG.info("build suggestion index: " + name);
reader = searcher.getIndexReader();
SafariInfixSuggester ais = (SafariInfixSuggester) lookup;
ais.clear();
// index all the terms-based fields using dictionaries
for (WeightedField fld : fields) {
if (fld.useStoredField) {
buildFromStoredField(fld, searcher);
} else {
// TODO: refactor b/c we're not really using the MultiDictionary's multiple dictionary capability any more
dictionary = new MultiDictionary();
buildFromTerms(fld);
ais.add(dictionary);
ais.refresh();
}
}
LOG.info(String.format("%s suggestion index built: %d suggestions", name, ais.getCount()));
}
private void buildFromStoredField(WeightedField fld, SolrIndexSearcher searcher) throws IOException {
if (fld.fieldAnalyzer != null) {
throw new IllegalStateException("not supported: analyzing stored fields");
}
LOG.info(String.format("build suggestions from values for: %s (%d)", fld.fieldName, fld.weight));
Set<String> fieldsToLoad = new HashSet<String>();
fieldsToLoad.add(fld.fieldName);
int maxDoc = searcher.maxDoc();
for (int idoc = 0; idoc < maxDoc; ++idoc) {
// TODO: exclude deleted documents
Document doc = reader.document(idoc, fieldsToLoad);
String value = doc.get(fld.fieldName);
if (value != null) {
addRaw(fld, value);
}
if (idoc % 10000 == 9999) {
commit(searcher);
}
}
commit(searcher);
}
private void buildFromTerms(WeightedField fld) throws IOException {
HighFrequencyDictionary hfd = new HighFrequencyDictionary(reader, fld.fieldName, fld.minFreq);
int numDocs = reader.getDocCount(fld.fieldName);
int minFreq = (int) (fld.minFreq * numDocs);
int maxFreq = (int) (fld.maxFreq * numDocs);
LOG.info(String.format("build suggestions from terms for: %s (min=%d, max=%d, weight=%d)", fld.fieldName, minFreq, maxFreq, fld.weight));
((MultiDictionary) dictionary).addDictionary(hfd, minFreq, maxFreq, fld.weight / (2 + numDocs));
}
@Override
public void reload(SolrCore coreParam, SolrIndexSearcher searcher) throws IOException {
if (lookup instanceof AnalyzingInfixSuggester) {
// AnalyzingInfixSuggester maintains its own index and sees updates, so we
// don't need to
// build it every time the core starts or is reloaded
AnalyzingInfixSuggester ais = (AnalyzingInfixSuggester) lookup;
if (ais.getCount() > 0) {
LOG.info("load existing suggestion index");
return;
}
}
build(core, searcher);
}
/**
* Adds the field values from the document to the suggester
*
* suggestions for each field are managed using one of the following weighting
* and update strategies: - constant weight: all terms occurring in the field
* are weighted equally - frequency weight: terms have a weight that is the
* field's weight * the number of occurrences frequency-weighted suggestions
* can have their frequency calculated by: - the value of docFreq() on a
* source field - a frequency maintained in a docValues field - the current
* weight in the suggester index
*
* @param doc
* @param searcher
* @throws IOException
*/
public void add(SolrInputDocument doc, SolrIndexSearcher searcher) throws IOException {
if (!(lookup instanceof SafariInfixSuggester)) {
return;
}
for (WeightedField fld : fields) {
if (!doc.containsKey(fld.fieldName)) {
continue;
}
fld.pendingDocCount++;
for (Object value : doc.getFieldValues(fld.fieldName)) {
String strValue = value.toString();
if (fld.fieldAnalyzer == null) {
addRaw(fld, strValue);
} else {
addTokenized(fld, strValue);
}
}
}
}
/**
* Add the value to the suggester, so it will be available as a suggestion.
*
* @param ais
* the suggester
* @param weight
* the weight of the suggestion
* @param value
* the value to add
* @throws IOException
*/
private void addRaw(WeightedField fld, String value) throws IOException {
if (value.length() > maxSuggestionLength) {
// break the value into segments if it's too long
BreakIterator scanner = BreakIterator.getWordInstance();
scanner.setText(value);
int offset = 0;
while (offset < value.length() - maxSuggestionLength) {
int next = scanner.following(offset + maxSuggestionLength - 1);
incPending(fld, value.substring(offset, next));
offset = next;
}
// just drop any trailing goo
} else {
// add the value unchanged
incPending(fld, value);
}
// LOG.debug ("add raw " + value);
}
private void addTokenized(WeightedField fld, String value) throws IOException {
TokenStream tokens = fld.fieldAnalyzer.tokenStream(fld.fieldName, value);
tokens.reset();
CharTermAttribute termAtt = tokens.addAttribute(CharTermAttribute.class);
Set<String> once = new HashSet<String>();
try {
while (tokens.incrementToken()) {
String token = termAtt.toString();
token = MultiDictionary.stripAfflatus(token);
if (once.add(token)) {
// only add each token once per field value to keep frequencies in line with
// HighFrequencyDictionary, which counts using TermsEnum.docFreq()
incPending(fld, token);
// LOG.debug("add token " + token);
}
}
tokens.end();
} finally {
tokens.close();
}
}
private void incPending(WeightedField fld, String suggestion) {
ConcurrentHashMap<String, Integer> pending = fld.pending;
if (pending.containsKey(suggestion)) {
pending.put(suggestion, pending.get(suggestion) + 1);
} else {
pending.put(suggestion, 1);
}
}
public void commit(SolrIndexSearcher searcher) throws IOException {
if (!(lookup instanceof SafariInfixSuggester)) {
return;
}
boolean updated = false;
SafariInfixSuggester ais = (SafariInfixSuggester) lookup;
for (WeightedField fld : fields) {
// get the number of documents having this field
long docCount = searcher.getIndexReader().getDocCount(fld.fieldName) + fld.pendingDocCount;
fld.pendingDocCount = 0;
// swap in a new pending map so we can accept new suggestions while we
// commit
ConcurrentHashMap<String, Integer> batch = fld.pending;
fld.pending = new ConcurrentHashMap<String, Integer>(batch.size());
BytesRef bytes = new BytesRef(maxSuggestionLength);
BytesRefBuilder bytesRefBuilder = new BytesRefBuilder(); // From Lucene docs: BytesRef should not be used as a buffer, use BytesRefBuilder instead
bytesRefBuilder.append(bytes);
Term t = new Term(fld.fieldName, bytesRefBuilder);
long minCount = (long) (fld.minFreq * docCount);
long maxCount = (long) (docCount <= 1 ? Long.MAX_VALUE : (fld.maxFreq * docCount + 1));
updated = updated || !batch.isEmpty();
for (Map.Entry<String, Integer> e : batch.entrySet()) {
String term = e.getKey();
// check for duplicates
if (fld.filterDuplicates && ais.lookup(term, 1, true, false).size() > 0) {
// LOG.debug("skipping duplicate " + term);
continue;
}
// TODO: incorporate external metric (eg popularity) into weight
long weight;
if (fld.fieldAnalyzer == null) {
weight = fld.weight;
} else {
long count = searcher.getIndexReader().docFreq(t);
if (count < 0) {
// FIXME: is this even possible?
count = e.getValue();
} else {
count += e.getValue();
}
if (count < minCount || count > maxCount) {
weight = 0;
} else {
weight = (fld.weight * count) / docCount;
}
}
bytesRefBuilder.copyChars(term);
bytes = bytesRefBuilder.get();
ais.update(bytes, weight);
}
}
// refresh after each field so the counts will accumulate across fields?
if (updated) {
ais.refresh();
}
}
public void close() throws IOException {
if (lookup != null && lookup instanceof Closeable) {
((Closeable) lookup).close();
lookup = null;
}
}
/**
* Note: this class has a natural ordering that is inconsistent with equals.
*/
class WeightedField implements Comparable<WeightedField> {
final static int MAX_TERM_LENGTH = 128;
final String fieldName;
final long weight;
final float minFreq;
final float maxFreq;
final Analyzer fieldAnalyzer;
final boolean useStoredField;
private ConcurrentHashMap<String, Integer> pending;
private int pendingDocCount;
final boolean filterDuplicates;
WeightedField(String name, float weight, float minFreq, float maxFreq, Analyzer analyzer, boolean useStoredField, Boolean filterDuplicates) {
this.fieldName = name;
this.weight = (long) (weight * WEIGHT_SCALE);
this.minFreq = minFreq;
this.maxFreq = maxFreq;
this.fieldAnalyzer = analyzer;
this.useStoredField = useStoredField;
this.filterDuplicates = filterDuplicates;
pending = new ConcurrentHashMap<String, Integer>();
pendingDocCount = 0;
}
@Override
public String toString() {
return fieldName + '^' + weight;
}
@Override
public int compareTo(WeightedField fld) {
// sort from highest to lowest
return (int) (fld.weight - weight);
}
}
class CloseHandler extends CloseHook {
@Override
public void postClose(SolrCore c) {
try {
close();
} catch (IOException e) {
LOG.error("An error occurred while closing: " + e.getMessage(), e);
}
}
@Override
public void preClose(SolrCore c) {
}
}
@Override
public SpellingResult getSuggestions(SpellingOptions options) throws IOException {
SpellingResult result = super.getSuggestions(options);
if (options.extendedResults) {
for (Map.Entry<?, LinkedHashMap<String, Integer>> suggestion : result.getSuggestions().entrySet()) {
Object token = suggestion.getKey();
int freq = 0;
for (Map.Entry<String, Integer> e : suggestion.getValue().entrySet()) {
if (e.getKey().equals(token.toString())) {
freq = e.getValue();
break;
}
}
result.addFrequency((Token) token, freq);
}
}
return result;
}
}
| src/main/java/com/ifactory/press/db/solr/spelling/suggest/MultiSuggester.java | package com.ifactory.press.db.solr.spelling.suggest;
import java.io.Closeable;
import java.io.IOException;
import java.text.BreakIterator;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.spell.HighFrequencyDictionary;
import org.apache.lucene.search.suggest.analyzing.AnalyzingInfixSuggester;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.CloseHook;
import org.apache.solr.core.SolrCore;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.spelling.SpellingOptions;
import org.apache.solr.spelling.SpellingResult;
import org.apache.solr.spelling.suggest.Suggester;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimaps;
/**
* <h3>A suggester that draws suggestions from terms in multiple fields.</h3>
*
* <p>
* Contributions from each field are weighted by a per-field <b>weight</b>, and
* zero-weighted based on a global minimum <b>threshold</b> term frequency, a per-field
* minimum threshold (<b>minfreq</b>) and a per-field maximum (<b>maxfreq</b>) threshold.
* All thresholds are compared against (term frequency / document count), an estimate of
* the fraction of documents containing the term. Thus setting maximum=0.5 will filter out
* terms occurring in as many or more than half the documents.
* </p>
*
* <p>
* In the normal mode of operation, the given <b>field</b>'s analyzer is used to tokenize the
* stored field values; each resulting token becomes a suggestion.
* </p>
*
* <p>
* An alternate mode of operation provides for unanalyzed stored field values to
* be used as suggestions. This mode is selected by specifying
* <b>analyzerFieldType</b>=string in the suggester configuration. In this mode, every
* suggestion is given the field's constant <b>weight</b>: term frequency is not considered
* as part of the weight, and no filtering is applied based on frequency.
* </p>
*
* <p>
* If <b>filterDuplicates</b> is set to true for a field, then each suggestion generated by
* the field is looked up in the suggester: if it exists already, it is not added again. This
* slows down indexing considerably so it should only be turned on when actually needed.
* </p>
*
* <p>
* The following sample configuration illustrates a setup where suggestions are
* drawn from a title field and a full text field, with different weights and
* thresholds.
* </p>
*
* <pre>
* {@code
* <!-- Suggester -->
* <searchComponent name="suggest-component" class="solr.SpellCheckComponent">
*
* <!-- Multiple "Spell Checkers" can be declared and used by this
* component
* -->
*
* <!-- a spellchecker built from a field of the main index -->
* <lst name="spellchecker">
* <str name="name">suggest-infix-all</str>
* <str name="classname">org.apache.solr.spelling.suggest.MultiSuggester</str>
* <str name="lookupImpl">org.apache.solr.spelling.suggest.fst.AnalyzingInfixLookupFactory</str>
* <str name="suggestAnalyzerFieldType">text</str>
* <int name="maxSuggestionLength">80</int>
* <float name="threshold">0.0</float>
* <!-- true == performance-killer. MultiSuggester handles incremental updates automatically, so there's no need for this anyway. -->
* <str name="buildOnCommit">false</str>
* <lst name="fields">
* <lst name="field">
* <str name="name">fulltext_t</str>
* <float name="weight">1.0</float>
* <float name="minfreq">0.005</float>
* <float name="maxfreq">0.3</float>
* </lst>
* <lst name="field">
* <str name="name">title_ms</str>
* <float name="weight">10.0</float>
* </lst>
* <lst name="field">
* <!-- a field whose values are weighted by the value of another field in the same document -->
* <str name="name">weighted_field_ms</str>
* <str name="weight_field">weight_dv</str>
* <float name="weight">10.0</float>
* </lst>
* <lst name="field">
* <str name="name">title_t</str>
* <analyzerFieldType>string</analyzerFieldType>
* <float name="weight">10.0</float>
* </lst>
* <lst name="field">
* <str name="name">duplicate_title_t</str>
* <str name="analyzerFieldType">string</str>
* <float name="weight">3.0</float>
* <bool name="filterDuplicates">true</bool>
* </lst>
* </lst>
*
* </searchComponent>
* }
* </pre>
*
*
* NOTE: the incremental weighting scheme gives an artifical "advantage" to
* infrequent terms that happen to be indexed first because their weights are
* normalized when the number of documents is low. To avoid this, it's
* recommended to rebuild the index periodically. If the index is large and
* growing relatively slowly, this effect will be very small, though.
*/
@SuppressWarnings("rawtypes")
public class MultiSuggester extends Suggester {
// weights are stored internally as longs, but externally as small
// floating point numbers. The floating point weights are multiplied by
// this factor to convert them to longs with a sufficient
// range. WEIGHT_SCALE should be greater than the number of documents
private static final int WEIGHT_SCALE = 10000000;
private static final Logger LOG = LoggerFactory.getLogger(MultiSuggester.class);
private WeightedField[] fields;
private int maxSuggestionLength;
// use a synchronized Multimap - there may be one with the same name for each
// core
private static final ListMultimap<Object, Object> registry = Multimaps.synchronizedListMultimap(ArrayListMultimap.create());
private static final int DEFAULT_MAX_SUGGESTION_LENGTH = 80;
@Override
public String init(NamedList config, SolrCore coreParam) {
String myname = (String) config.get(DICTIONARY_NAME);
this.core = coreParam;
// Workaround for SOLR-6246 (lock exception on core reload): close
// any suggester registered with the same name.
if (registry.containsKey(myname)) {
MultiSuggester suggesterToClose = null;
for (Object o : registry.get(myname)) {
MultiSuggester suggester = (MultiSuggester) o;
if (suggester.core.getName().equals(coreParam.getName())) {
suggesterToClose = suggester;
break;
}
}
if (suggesterToClose != null) {
registry.remove(myname, suggesterToClose);
try {
suggesterToClose.close();
} catch (IOException e) {
LOG.error("An exception occurred while closing the spellchecker", e);
}
}
}
super.init(config, coreParam);
// effectively disable analysis *by the SpellChecker/Suggester component*
// because this leads
// to independent suggestions for each token; we want AIS to perform
// analysis and consider the tokens together
analyzer = new KeywordAnalyzer();
initWeights((NamedList) config.get("fields"), coreParam);
Integer maxLengthConfig = (Integer) config.get("maxSuggestionLength");
maxSuggestionLength = maxLengthConfig != null ? maxLengthConfig : DEFAULT_MAX_SUGGESTION_LENGTH;
registry.put(myname, this);
core.addCloseHook(new CloseHandler());
return myname;
}
private void initWeights(NamedList fieldConfigs, SolrCore coreParam) {
fields = new WeightedField[fieldConfigs.size()];
for (int ifield = 0; ifield < fieldConfigs.size(); ifield++) {
NamedList fieldConfig = (NamedList) fieldConfigs.getVal(ifield);
String fieldName = (String) fieldConfig.get("name");
Float weight = (Float) fieldConfig.get("weight");
if (weight == null) {
weight = 1.0f;
}
Float minFreq = (Float) fieldConfig.get("minfreq");
if (minFreq == null) {
minFreq = 0.0f;
}
Float maxFreq = (Float) fieldConfig.get("maxfreq");
if (maxFreq == null) {
maxFreq = 1.0f;
}
Boolean filterDuplicates = fieldConfig.getBooleanArg("filterDuplicates");
if (filterDuplicates == null) {
filterDuplicates = false;
}
String analyzerFieldTypeName = (String) fieldConfig.get("analyzerFieldType");
Analyzer fieldAnalyzer;
boolean useStoredField = analyzerFieldTypeName != null;
if (useStoredField) {
// useStoredField - when re-building, we retrieve the stored field value
if ("string".equals(analyzerFieldTypeName)) {
fieldAnalyzer = null;
} else {
fieldAnalyzer = coreParam.getLatestSchema().getFieldTypeByName(analyzerFieldTypeName).getIndexAnalyzer();
}
} else {
// Use the existing term values as analyzed by the field
fieldAnalyzer = coreParam.getLatestSchema().getFieldType(fieldName).getIndexAnalyzer();
}
fields[ifield] = new WeightedField(fieldName, weight, minFreq, maxFreq, fieldAnalyzer, useStoredField, filterDuplicates);
}
Arrays.sort(fields);
}
@Override
public void build(SolrCore coreParam, SolrIndexSearcher searcher) throws IOException {
LOG.info("build suggestion index: " + name);
reader = searcher.getIndexReader();
SafariInfixSuggester ais = (SafariInfixSuggester) lookup;
ais.clear();
// index all the terms-based fields using dictionaries
for (WeightedField fld : fields) {
if (fld.useStoredField) {
buildFromStoredField(fld, searcher);
} else {
// TODO: refactor b/c we're not really using the MultiDictionary's multiple dictionary capability any more
dictionary = new MultiDictionary();
buildFromTerms(fld);
ais.add(dictionary);
ais.refresh();
}
}
LOG.info(String.format("%s suggestion index built: %d suggestions", name, ais.getCount()));
}
private void buildFromStoredField(WeightedField fld, SolrIndexSearcher searcher) throws IOException {
if (fld.fieldAnalyzer != null) {
throw new IllegalStateException("not supported: analyzing stored fields");
}
LOG.info(String.format("build suggestions from values for: %s (%d)", fld.fieldName, fld.weight));
Set<String> fieldsToLoad = new HashSet<String>();
fieldsToLoad.add(fld.fieldName);
int maxDoc = searcher.maxDoc();
for (int idoc = 0; idoc < maxDoc; ++idoc) {
// TODO: exclude deleted documents
Document doc = reader.document(idoc, fieldsToLoad);
String value = doc.get(fld.fieldName);
if (value != null) {
addRaw(fld, value);
}
if (idoc % 10000 == 9999) {
commit(searcher);
}
}
commit(searcher);
}
private void buildFromTerms(WeightedField fld) throws IOException {
HighFrequencyDictionary hfd = new HighFrequencyDictionary(reader, fld.fieldName, fld.minFreq);
int numDocs = reader.getDocCount(fld.fieldName);
int minFreq = (int) (fld.minFreq * numDocs);
int maxFreq = (int) (fld.maxFreq * numDocs);
LOG.info(String.format("build suggestions from terms for: %s (min=%d, max=%d, weight=%d)", fld.fieldName, minFreq, maxFreq, fld.weight));
((MultiDictionary) dictionary).addDictionary(hfd, minFreq, maxFreq, fld.weight / (2 + numDocs));
}
@Override
public void reload(SolrCore coreParam, SolrIndexSearcher searcher) throws IOException {
if (lookup instanceof AnalyzingInfixSuggester) {
// AnalyzingInfixSuggester maintains its own index and sees updates, so we
// don't need to
// build it every time the core starts or is reloaded
AnalyzingInfixSuggester ais = (AnalyzingInfixSuggester) lookup;
if (ais.getCount() > 0) {
LOG.info("load existing suggestion index");
return;
}
}
build(core, searcher);
}
/**
* Adds the field values from the document to the suggester
*
* suggestions for each field are managed using one of the following weighting
* and update strategies: - constant weight: all terms occurring in the field
* are weighted equally - frequency weight: terms have a weight that is the
* field's weight * the number of occurrences frequency-weighted suggestions
* can have their frequency calculated by: - the value of docFreq() on a
* source field - a frequency maintained in a docValues field - the current
* weight in the suggester index
*
* @param doc
* @param searcher
* @throws IOException
*/
public void add(SolrInputDocument doc, SolrIndexSearcher searcher) throws IOException {
if (!(lookup instanceof SafariInfixSuggester)) {
return;
}
for (WeightedField fld : fields) {
if (!doc.containsKey(fld.fieldName)) {
continue;
}
fld.pendingDocCount++;
for (Object value : doc.getFieldValues(fld.fieldName)) {
String strValue = value.toString();
if (fld.fieldAnalyzer == null) {
addRaw(fld, strValue);
} else {
addTokenized(fld, strValue);
}
}
}
}
/**
* Add the value to the suggester, so it will be available as a suggestion.
*
* @param ais
* the suggester
* @param weight
* the weight of the suggestion
* @param value
* the value to add
* @throws IOException
*/
private void addRaw(WeightedField fld, String value) throws IOException {
if (value.length() > maxSuggestionLength) {
// break the value into segments if it's too long
BreakIterator scanner = BreakIterator.getWordInstance();
scanner.setText(value);
int offset = 0;
while (offset < value.length() - maxSuggestionLength) {
int next = scanner.following(offset + maxSuggestionLength - 1);
incPending(fld, value.substring(offset, next));
offset = next;
}
// just drop any trailing goo
} else {
// add the value unchanged
incPending(fld, value);
}
// LOG.debug ("add raw " + value);
}
private void addTokenized(WeightedField fld, String value) throws IOException {
TokenStream tokens = fld.fieldAnalyzer.tokenStream(fld.fieldName, value);
tokens.reset();
CharTermAttribute termAtt = tokens.addAttribute(CharTermAttribute.class);
Set<String> once = new HashSet<String>();
try {
while (tokens.incrementToken()) {
String token = termAtt.toString();
token = MultiDictionary.stripAfflatus(token);
if (once.add(token)) {
// only add each token once per field value to keep frequencies in line with
// HighFrequencyDictionary, which counts using TermsEnum.docFreq()
incPending(fld, token);
// LOG.debug("add token " + token);
}
}
tokens.end();
} finally {
tokens.close();
}
}
private void incPending(WeightedField fld, String suggestion) {
ConcurrentHashMap<String, Integer> pending = fld.pending;
if (pending.containsKey(suggestion)) {
pending.put(suggestion, pending.get(suggestion) + 1);
} else {
pending.put(suggestion, 1);
}
}
public void commit(SolrIndexSearcher searcher) throws IOException {
if (!(lookup instanceof SafariInfixSuggester)) {
return;
}
boolean updated = false;
SafariInfixSuggester ais = (SafariInfixSuggester) lookup;
for (WeightedField fld : fields) {
// get the number of documents having this field
long docCount = searcher.getIndexReader().getDocCount(fld.fieldName) + fld.pendingDocCount;
fld.pendingDocCount = 0;
// swap in a new pending map so we can accept new suggestions while we
// commit
ConcurrentHashMap<String, Integer> batch = fld.pending;
fld.pending = new ConcurrentHashMap<String, Integer>(batch.size());
BytesRef bytes = new BytesRef(maxSuggestionLength);
BytesRefBuilder bytesRefBuilder = new BytesRefBuilder(); // From Lucene docs: BytesRef should not be used as a buffer, use BytesRefBuilder instead
bytesRefBuilder.append(bytes);
Term t = new Term(fld.fieldName, bytesRefBuilder);
long minCount = (long) (fld.minFreq * docCount);
long maxCount = (long) (docCount <= 1 ? Long.MAX_VALUE : (fld.maxFreq * docCount + 1));
updated = updated || !batch.isEmpty();
for (Map.Entry<String, Integer> e : batch.entrySet()) {
String term = e.getKey();
// check for duplicates
if (fld.filterDuplicates && ais.lookup(term, 1, true, false).size() > 0) {
// LOG.debug("skipping duplicate " + term);
continue;
}
// TODO: incorporate external metric (eg popularity) into weight
long weight;
if (fld.fieldAnalyzer == null) {
weight = fld.weight;
} else {
long count = searcher.getIndexReader().docFreq(t);
if (count < 0) {
// FIXME: is this even possible?
count = e.getValue();
} else {
count += e.getValue();
}
if (count < minCount || count > maxCount) {
weight = 0;
} else {
weight = (fld.weight * count) / docCount;
}
}
bytesRefBuilder.copyChars(term);
bytes = bytesRefBuilder.get();
ais.update(bytes, weight);
}
}
// refresh after each field so the counts will accumulate across fields?
if (updated) {
ais.refresh();
}
}
public void close() throws IOException {
if (lookup != null && lookup instanceof Closeable) {
((Closeable) lookup).close();
lookup = null;
}
}
/**
* Note: this class has a natural ordering that is inconsistent with equals.
*/
class WeightedField implements Comparable<WeightedField> {
final static int MAX_TERM_LENGTH = 128;
final String fieldName;
final long weight;
final float minFreq;
final float maxFreq;
final Analyzer fieldAnalyzer;
final boolean useStoredField;
private ConcurrentHashMap<String, Integer> pending;
private int pendingDocCount;
final boolean filterDuplicates;
WeightedField(String name, float weight, float minFreq, float maxFreq, Analyzer analyzer, boolean useStoredField, Boolean filterDuplicates) {
this.fieldName = name;
this.weight = (long) (weight * WEIGHT_SCALE);
this.minFreq = minFreq;
this.maxFreq = maxFreq;
this.fieldAnalyzer = analyzer;
this.useStoredField = useStoredField;
this.filterDuplicates = filterDuplicates;
pending = new ConcurrentHashMap<String, Integer>();
pendingDocCount = 0;
}
@Override
public String toString() {
return fieldName + '^' + weight;
}
@Override
public int compareTo(WeightedField fld) {
// sort from highest to lowest
return (int) (fld.weight - weight);
}
}
class CloseHandler extends CloseHook {
@Override
public void postClose(SolrCore c) {
try {
close();
} catch (IOException e) {
LOG.error("An error occurred while closing: " + e.getMessage(), e);
}
}
@Override
public void preClose(SolrCore c) {
}
}
@Override
public SpellingResult getSuggestions(SpellingOptions options) throws IOException {
SpellingResult result = super.getSuggestions(options);
if (options.extendedResults) {
for (Map.Entry<?, LinkedHashMap<String, Integer>> suggestion : result.getSuggestions().entrySet()) {
Object token = suggestion.getKey();
int freq = 0;
for (Map.Entry<String, Integer> e : suggestion.getValue().entrySet()) {
if (e.getKey().equals(token.toString())) {
freq = e.getValue();
break;
}
}
result.addFrequency((Token) token, freq);
}
}
return result;
}
}
| Token moved to spelling class, this should be the only place it is used
| src/main/java/com/ifactory/press/db/solr/spelling/suggest/MultiSuggester.java | Token moved to spelling class, this should be the only place it is used |
|
Java | bsd-3-clause | e739bcf351bd658d5ada3ba742ee6d80f8aed23b | 0 | NCIP/cananolab,NCIP/cananolab,NCIP/cananolab | import gov.nih.nci.cagrid.cananolab.client.CaNanoLabServiceClient;
import gov.nih.nci.cagrid.cqlquery.Attribute;
import gov.nih.nci.cagrid.cqlquery.CQLQuery;
import gov.nih.nci.cagrid.cqlquery.Predicate;
import gov.nih.nci.cagrid.cqlresultset.CQLQueryResults;
import gov.nih.nci.cagrid.data.utilities.CQLQueryResultsIterator;
import gov.nih.nci.cananolab.domain.common.ExperimentConfig;
import gov.nih.nci.cananolab.domain.common.File;
import gov.nih.nci.cananolab.domain.common.Finding;
import gov.nih.nci.cananolab.domain.common.Keyword;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.domain.common.Protocol;
import gov.nih.nci.cananolab.domain.common.Publication;
import gov.nih.nci.cananolab.domain.particle.ActivationMethod;
import gov.nih.nci.cananolab.domain.particle.Characterization;
import gov.nih.nci.cananolab.domain.particle.ChemicalAssociation;
import gov.nih.nci.cananolab.domain.particle.ComposingElement;
import gov.nih.nci.cananolab.domain.particle.Function;
import gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity;
import gov.nih.nci.cananolab.domain.particle.NanomaterialEntity;
import gov.nih.nci.cananolab.domain.particle.Sample;
import gov.nih.nci.cananolab.domain.particle.SampleComposition;
//import gov.nih.nci.cananolab.domain.nanomaterial.
import gov.nih.nci.cananolab.domain.agentmaterial.*;
import gov.nih.nci.cananolab.domain.characterization.*;
import gov.nih.nci.cananolab.domain.function.*;
import gov.nih.nci.cananolab.domain.characterization.physical.*;
public class GridClientTest {
CaNanoLabServiceClient gridClient;
public GridClientTest(CaNanoLabServiceClient gridClient) {
this.gridClient = gridClient;
}
public void testComposingElement(String id) {
//"6062106"
System.out.println("Testing ComposingElement with id=" + id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.ComposingElement");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.ComposingElement");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
ComposingElement ce = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
ce = (ComposingElement) obj;
System.out.println("ComposingElement: id="+ce.getId() + "\tDesc="+ce.getDescription() + "\tName="+ ce.getName());
}
}catch(Exception e){
System.out.println("Exception getting ComposingElement for id="+ id + ": " + e);
}
}
public void testSampleComposition(String id) {
//"6160390"
System.out.println("Testing SampleComposition with id=" + id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.SampleComposition");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.SampleComposition");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
SampleComposition sc = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
sc = (SampleComposition) obj;
System.out.println("SampleComposition : id="+sc.getId() );
}
}catch(Exception e){
System.out.println("Exception getting SampleComposition for id=" + id + ": " +e);
}
}
public void testGetAllCharacterizationByCQLQuery() {
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.Characterization");
/*Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue("10846210");
target.setAttribute(attribute);*/
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.Characterization");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Characterization chara = null;
System.out.println("Testing GetAllCharacterizationByCQLQuery, for every characterization test \n"+
"GetFinding, GetProtocol, GetExperimentConfigs, and Characterization.");
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
chara = (Characterization) obj;
if(chara != null){
testGetFindingsByCharacterizationId(chara.getId().toString());
testGetProtocolByCharacterizationId(chara.getId().toString());
testGetExperimentConfigsByCharacterizationId(chara.getId().toString());
testCharacterization(chara.getId().toString());
}
}
}catch(Exception e){
System.out.println("Exception getting all Characterization by CQLQuery: " + e);
}
}
public void testProtocol(String id) {
System.out.println("Test Protocol with id="+id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.common.Protocol");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.common.Protocol");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Protocol p = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
p = (Protocol) obj;
System.out.println("Protocol: id="+p.getId() + "\tName=" +p.getName() + "\tAbbreviation=" +p.getAbbreviation() + "\tType=" +p.getType() + "\tVersion=" +p.getVersion());
}
}catch(Exception e){
System.out.println("Exception getting Protocol for id=" + id + ": " +e);
}
}
public void testChemicalAssociation(String id) {
//"8847369"
System.out.println("Test ChemicalAssociation with id="+id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.ChemicalAssociation");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.ChemicalAssociation");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
ChemicalAssociation ca = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
ca = (ChemicalAssociation) obj;
System.out.println("ChemicalAssociation Desc: "+ca.getDescription() + ", id: " + ca.getId());
}
}catch(Exception e){
System.out.println("Exception getting ChemicalAssociation for id="+ id + ": " + e);
}
}
public void testPublication(String id) {
//"8847369"
System.out.println("Test Publication with id="+id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.common.Publication");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.common.Publication");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Publication p = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
p = (Publication) obj;
System.out.println("Publication: id=" + p.getId() + "\tName=" + p.getName() + "\tDesc="+p.getDescription() + "\tTitle=" + p.getTitle());
}
}catch(Exception e){
System.out.println("Exception getting Publication for id=" + id + ": " +e);
}
}
public void testActivationMethod(String id) {
//"3833872"
System.out.println("Testing ActivationMethod with id="+id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.ActivationMethod");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.ActivationMethod");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
ActivationMethod am = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
am = (ActivationMethod) obj;
System.out.println("Activation Effect: id="+ am.getId() + "\tActivationEffect=" +am.getActivationEffect() + ", Type=" + am.getType());
}
}catch(Exception e){
System.out.println("Exception getting ActivationMethod for id=" + id + ": " +e);
}
}
public void testNanomaterialEntity(String id) {
//"6160399"
System.out.println("Testing NanoMaterialEntity with id="+ id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.NanomaterialEntity");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.NanomaterialEntity");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
NanomaterialEntity nanoEntity = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
nanoEntity = (NanomaterialEntity) obj;
System.out.println("NanoMaterial entity: id="+nanoEntity.getId() + "\tDesc=" +nanoEntity.getDescription() + "\tCreatedBy=" +nanoEntity.getCreatedBy());
}
}catch(Exception e){
System.out.println("Exception getting NanomaterialEntity for id=" + id + ": " +e);
}
}
public void testSample(String id) {
System.out.println("Testing Sample with id=" +id);
//"3735553"
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.Sample");
Attribute attribute = new Attribute();
/*attribute.setName("name");
attribute.setPredicate(Predicate.LIKE);
attribute.setValue("NCL-23-1"); //"20917507"); //NCL-23-1*/
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.Sample");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Sample sample = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
sample = (Sample) obj;
System.out.println("Sample: Name="+sample.getName() + ", id=" + sample.getId());
}
}catch(Exception e){
System.out.println("Exception getting Sample for id=" + id + ": " +e);
}
}
public void testFunction(String id) {
//"10944705"
System.out.println("Testing Function with id=" + id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.Function");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.Function");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Function fe = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
fe = (Function) obj;
System.out.println("Function: desc="+fe.getDescription() + "\tId=" + fe.getId());
}
}catch(Exception e){
System.out.println("Exception getting Function for id="+ id + ": " + e);
}
}
public void testFunctionalizingEntity(String id) {
//"6225945"
System.out.println("Testing FunctionalizingEntity with id=" + id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
FunctionalizingEntity fe = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
fe = (FunctionalizingEntity) obj;
System.out.println("FunctionalizingEntity: name="+fe.getName() + "\tId=" + fe.getId());
}
}catch(Exception e){
System.out.println("Exception getting FunctionalizaingEntity for id="+ id + ": " + e);
}
}
public void testCharacterization(String id) {
//"10977286"
System.out.println("Testing characterization with id=" +id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.Characterization");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.Characterization");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Characterization chara = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
chara = (Characterization) obj;
System.out.println("characterization: id="+chara.getId() + "\tDesignMethodDesc: " +chara.getDesignMethodsDescription());
}
}catch(Exception e){
System.out.println("Exception getting Characterization for id="+ id + ": " + e);
}
}
public void testGetKeywordsBySampleId(String sampleId) {
//String sampleId = "20917507";
System.out.println("Testing getKeyworkdsBySampleId: " + sampleId);
try{
Keyword[] keywords = gridClient.getKeywordsBySampleId(sampleId);
if(keywords != null){
for(Keyword keyword: keywords){
if(keyword != null){
System.out.println("Keyword name: " + keyword.getName() + "\tId: " + keyword.getId());
}
}
}
}catch(Exception e){
System.out.println("Exception getting KeywordsBySampleId for sampleId="+ sampleId + ": " + e);
}
System.out.println("Finished printing getKeyworkdsBySampleId results for sampleId: " + sampleId);
}
public void testGetPrimaryPointOfContactBySampleId(String sampleId) {
//String sampleId = "20917507";
System.out.println("Testing getPrimaryPointOfContactBySampleId : " + sampleId);
try{
PointOfContact contact = gridClient.getPrimaryPointOfContactBySampleId(sampleId);
if(contact != null){
System.out.println("primary contact name: " + contact.getFirstName() + "\t" +contact.getLastName() +
"\tId: " + contact.getId() + "\tPhone: " + contact.getPhone() + "\tRole: " +
contact.getRole() + "\tEmail: " +contact.getEmail());
}
}catch(Exception e){
System.out.println("Exception getting PrimaryPointOfContactBySampleId for sampleId="+ sampleId + ": " + e);
}
System.out.println("Finished printing getPrimaryPointOfContactBySampleId results for sampleId: " + sampleId);
}
public void testGetOtherPointOfContactsBySampleId(String sampleId) {
//String sampleId = "3735553";
System.out.println("Testing getOtherPointOfContactsBySampleId : " + sampleId);
try{
PointOfContact[] contacts = gridClient.getOtherPointOfContactsBySampleId(sampleId);
if(contacts != null){
for(PointOfContact contact: contacts){
if(contact != null){
System.out.println("primary contact name: " + contact.getFirstName() + "\t" +contact.getLastName() +
"\tId: " + contact.getId() + "\tPhone: " + contact.getPhone() + "\tRole: " +
contact.getRole() + "\tEmail: " +contact.getEmail());
}
}
}
}catch(Exception e){
System.out.println("Exception getting OtherPointOfContactsBySampleId for sampleId="+ sampleId + ": " + e);
}
System.out.println("Finished printing getPrimaryPointOfContactBySampleId results for sampleId: " + sampleId);
}
public void testGetExperimentConfigsByCharacterizationId(String charId) {
System.out.println("Testing testGetExperimentConfigsByCharacterizationId : " + charId);
try{
ExperimentConfig[] experimentConfigs = gridClient.getExperimentConfigsByCharacterizationId(charId);
for(ExperimentConfig exp: experimentConfigs){
if(exp != null){
System.out.println("ExperimentConfig Id: " + exp.getId() + "\tDesc: " +exp.getDescription() );
}
}
}catch(Exception e){
System.out.println("Exception getting ExperimentConfigsByCharacterizationId for charid="+ charId + ": " + e);
}
System.out.println("Finished printing testGetExperimentConfigsByCharacterizationId results for charId: " + charId);
}
public void testGetFindingsByCharacterizationId(String charId) {
//String charId = "3932251";
System.out.println("Testing testGetFindingsByCharacterizationId : " + charId);
try{
Finding[] findings = gridClient.getFindingsByCharacterizationId(charId);
if(findings != null){
for(Finding f: findings){
if(f != null){
System.out.println("Finding Id: " + f.getId() + "\tCreatedBy: " +f.getCreatedBy() );
}
}
}
}catch(Exception e){
System.out.println("Exception getting FindingsByCharacterizationId for charid="+ charId + ": " + e);
}
System.out.println("Finished printing testGetFindingsByCharacterizationId results for charId: " + charId);
}
public void testGetProtocolByCharacterizationId(String charId) {
//String charId = "21867791";
System.out.println("Testing testGetProtocolByCharacterizationId : " + charId);
try{
Protocol p = gridClient.getProtocolByCharacterizationId(charId);
if(p != null){
System.out.println("Protocol Id: " + p.getId() + "\tCreatedBy: " +p.getCreatedBy() + "\tName: "
+ p.getName() + "\tType: " + p.getType());
}
}catch(Exception e){
System.out.println("Exception getting ProtocolByCharacterizationId for charid="+ charId + ": " + e);
}
System.out.println("Finished printing testGetProtocolByCharacterizationId results for charId: " + charId);
}
public void testGetPublicationBySampleId(String sampleId) {
//String sampleId = "20917507";
System.out.println("Testing getPublicationBySampleId : " + sampleId);
try{
Publication[] pubs = gridClient.getPublicationsBySampleId(sampleId);
if(pubs != null){
for(Publication p: pubs){
if(p != null){
System.out.print("Publication Id: " + p.getId() + "\tDesc: " + p.getDescription() +
"\tName: " + p.getName() + "\tJournalName: " + p.getJournalName() + "\tTitle: " +p.getTitle());
}
}
}
}catch(Exception e){
System.out.println("Exception when testing getPublicationBySampleId for sampleId=" + sampleId);
}
System.out.println("Finished printing getPrimaryPointOfContactBySampleId results for sampleId: " + sampleId);
}
public void testGetSampleIds() {
try{
String[] sampleIds = gridClient.getSampleIds("", "", null, null, null, null, null);
System.out.println("Testing getSampleIds operation.... \n" +
"For every sample, test get Publication, keywords, primary contact and other contact.");
for(String id: sampleIds){
testSample(id);
testGetPublicationBySampleId(id);
testGetKeywordsBySampleId(id);
testGetOtherPointOfContactsBySampleId(id);
testGetPrimaryPointOfContactBySampleId(id);
}
}catch(Exception e){
System.out.println("Exception getting SampleIds: " + e);
}
System.out.println("\nFinished testing samples.... \n");
}
public void testGetPublicationIdsBy() {
try{
String[] pubIds = gridClient.getPublicationIdsBy("", "", "", null, null, null, null, null, null, null, null);
System.out.println("Testing getPublicationIdsBy operation.... \n " +
"For every publication, test Publication");
if(pubIds != null){
for(String id: pubIds){
if(id != null){
testPublication(id);
}
}
}
}catch(Exception e){
System.out.println("Exception getting PublicationIds: " + e);
}
System.out.println("\nFinished testing publications.....\n");
}
public void testGetFileByProtocolId(String protocolId){
//String protocolId = "24390915";
System.out.println("Testing getFileByProtocolId: " + protocolId);
try{
File file = gridClient.getFileByProtocolId(protocolId);
if(file != null){
System.out.println("File desc: " + file.getDescription() + "\tName: "+ file.getName() + "\tUri: " +file.getUri());
}
}catch(Exception e){
System.out.println("Exception getting ExperimentConfigsByCharacterizationId for protocolid="+ protocolId + ": " + e);
}
}
public void testGetFilesByCompositionInfoId(String id, String className) {
//String id = "21376285";//"21376285";
//String className="NanoMaterialEntity";
System.out.println("Test getFilesByCompositionInfoId: id=" + id + ", className=" + className);
try{
File[] files = gridClient.getFilesByCompositionInfoId(id,className);
if(files != null){
for(File file: files){
System.out.println("File desc: " + file.getDescription() + "\tName: "+ file.getName() + "\tUri: " +file.getUri());
}
}
}catch(Exception e){
System.out.println("Exception getting FilesByCompositionInfoId for id="+ id + ", className=" +className + ": " + e);
}
}
public static void main(String[] args) {
System.out.println("Running the Grid Service Client");
try {
if (!(args.length < 2)) {
if (args[0].equals("-url")) {
CaNanoLabServiceClient client = new CaNanoLabServiceClient(
args[1]);
GridClientTest test=new GridClientTest(client);
//test.testGetPrimaryPointOfContactBySampleId("10354688");
//these methods has loops to test other methods
test.testGetSampleIds();
test.testGetPublicationIdsBy();
//test.testGetAllCharacterizationByCQLQuery();
//these methods user can plug in any parameter
test.testGetFindingsByCharacterizationId("3932251");
test.testGetProtocolByCharacterizationId("3932251");
test.testGetExperimentConfigsByCharacterizationId("3932251");
test.testCharacterization("3932251");
test.testGetFileByProtocolId("24390915");
test.testGetFilesByCompositionInfoId("21376285","NanomaterialEntity");
test.testNanomaterialEntity("6160399");
test.testFunction("10944705");
test.testComposingElement("6062106");
test.testFunctionalizingEntity("6225945");
test.testChemicalAssociation("8847369");
test.testSampleComposition("6160390");
test.testActivationMethod("3833872");
test.testProtocol("24390915");
} else {
System.exit(1);
}
} else {
System.exit(1);
}
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
} | software/cananolab-grid/test/src/GridClientTest.java | import gov.nih.nci.cagrid.cananolab.client.CaNanoLabServiceClient;
import gov.nih.nci.cagrid.cqlquery.Attribute;
import gov.nih.nci.cagrid.cqlquery.CQLQuery;
import gov.nih.nci.cagrid.cqlquery.Predicate;
import gov.nih.nci.cagrid.cqlresultset.CQLQueryResults;
import gov.nih.nci.cagrid.data.utilities.CQLQueryResultsIterator;
import gov.nih.nci.cananolab.domain.common.ExperimentConfig;
import gov.nih.nci.cananolab.domain.common.File;
import gov.nih.nci.cananolab.domain.common.Finding;
import gov.nih.nci.cananolab.domain.common.Keyword;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.domain.common.Protocol;
import gov.nih.nci.cananolab.domain.common.Publication;
import gov.nih.nci.cananolab.domain.particle.ActivationMethod;
import gov.nih.nci.cananolab.domain.particle.Characterization;
import gov.nih.nci.cananolab.domain.particle.ChemicalAssociation;
import gov.nih.nci.cananolab.domain.particle.ComposingElement;
import gov.nih.nci.cananolab.domain.particle.Function;
import gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity;
import gov.nih.nci.cananolab.domain.particle.NanomaterialEntity;
import gov.nih.nci.cananolab.domain.particle.Sample;
import gov.nih.nci.cananolab.domain.particle.SampleComposition;
//import gov.nih.nci.cananolab.domain.nanomaterial.
import gov.nih.nci.cananolab.domain.agentmaterial.*;
import gov.nih.nci.cananolab.domain.characterization.*;
import gov.nih.nci.cananolab.domain.function.*;
import gov.nih.nci.cananolab.domain.characterization.physical.*;
public class GridClientTest {
CaNanoLabServiceClient gridClient;
public GridClientTest(CaNanoLabServiceClient gridClient) {
this.gridClient = gridClient;
}
public void testComposingElement(String id) {
//"6062106"
System.out.println("Testing ComposingElement with id=" + id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.ComposingElement");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.ComposingElement");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
ComposingElement ce = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
ce = (ComposingElement) obj;
System.out.println("ComposingElement: id="+ce.getId() + "\tDesc="+ce.getDescription() + "\tName="+ ce.getName());
}
}catch(Exception e){
System.out.println("Exception getting ComposingElement for id="+ id + ": " + e);
}
}
public void testSampleComposition(String id) {
//"6160390"
System.out.println("Testing SampleComposition with id=" + id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.SampleComposition");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.SampleComposition");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
SampleComposition sc = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
sc = (SampleComposition) obj;
System.out.println("SampleComposition : id="+sc.getId() );
}
}catch(Exception e){
System.out.println("Exception getting SampleComposition for id=" + id + ": " +e);
}
}
public void testGetAllCharacterizationByCQLQuery() {
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.Characterization");
/*Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue("10846210");
target.setAttribute(attribute);*/
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.Characterization");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Characterization chara = null;
System.out.println("Testing GetAllCharacterizationByCQLQuery, for every characterization test \n"+
"GetFinding, GetProtocol, GetExperimentConfigs, and Characterization.");
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
chara = (Characterization) obj;
if(chara != null){
testGetFindingsByCharacterizationId(chara.getId().toString());
testGetProtocolByCharacterizationId(chara.getId().toString());
testGetExperimentConfigsByCharacterizationId(chara.getId().toString());
testCharacterization(chara.getId().toString());
}
}
}catch(Exception e){
System.out.println("Exception getting all Characterization by CQLQuery: " + e);
}
}
public void testProtocol(String id) {
System.out.println("Test Protocol with id="+id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.common.Protocol");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.common.Protocol");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Protocol p = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
p = (Protocol) obj;
System.out.println("Protocol: id="+p.getId() + "\tName=" +p.getName() + "\tAbbreviation=" +p.getAbbreviation() + "\tType=" +p.getType() + "\tVersion=" +p.getVersion());
}
}catch(Exception e){
System.out.println("Exception getting Protocol for id=" + id + ": " +e);
}
}
public void testChemicalAssociation(String id) {
//"8847369"
System.out.println("Test ChemicalAssociation with id="+id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.ChemicalAssociation");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.ChemicalAssociation");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
ChemicalAssociation ca = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
ca = (ChemicalAssociation) obj;
System.out.println("ChemicalAssociation Desc: "+ca.getDescription() + ", id: " + ca.getId());
}
}catch(Exception e){
System.out.println("Exception getting ChemicalAssociation for id="+ id + ": " + e);
}
}
public void testPublication(String id) {
//"8847369"
System.out.println("Test Publication with id="+id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.common.Publication");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.common.Publication");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Publication p = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
p = (Publication) obj;
System.out.println("Publication: id=" + p.getId() + "\tName=" + p.getName() + "\tDesc="+p.getDescription() + "\tTitle=" + p.getTitle());
}
}catch(Exception e){
System.out.println("Exception getting Publication for id=" + id + ": " +e);
}
}
public void testActivationMethod(String id) {
//"3833872"
System.out.println("Testing ActivationMethod with id="+id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.ActivationMethod");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.ActivationMethod");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
ActivationMethod am = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
am = (ActivationMethod) obj;
System.out.println("Activation Effect: id="+ am.getId() + "\tActivationEffect=" +am.getActivationEffect() + ", Type=" + am.getType());
}
}catch(Exception e){
System.out.println("Exception getting ActivationMethod for id=" + id + ": " +e);
}
}
public void testNanomaterialEntity(String id) {
//"6160399"
System.out.println("Testing NanoMaterialEntity with id="+ id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.NanomaterialEntity");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.NanomaterialEntity");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
NanomaterialEntity nanoEntity = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
nanoEntity = (NanomaterialEntity) obj;
System.out.println("NanoMaterial entity: id="+nanoEntity.getId() + "\tDesc=" +nanoEntity.getDescription() + "\tCreatedBy=" +nanoEntity.getCreatedBy());
}
}catch(Exception e){
System.out.println("Exception getting NanomaterialEntity for id=" + id + ": " +e);
}
}
public void testSample(String id) {
System.out.println("Testing Sample with id=" +id);
//"3735553"
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.Sample");
Attribute attribute = new Attribute();
/*attribute.setName("name");
attribute.setPredicate(Predicate.LIKE);
attribute.setValue("NCL-23-1"); //"20917507"); //NCL-23-1*/
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.Sample");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Sample sample = null;
System.out.println("sample resutls: " + results + iter.hasNext());
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
sample = (Sample) obj;
System.out.println("Sample: Name="+sample.getName() + ", id=" + sample.getId());
}
}catch(Exception e){
System.out.println("Exception getting Sample for id=" + id + ": " +e);
}
}
public void testFunction(String id) {
//"10944705"
System.out.println("Testing Function with id=" + id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.Function");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.Function");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Function fe = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
fe = (Function) obj;
System.out.println("Function: desc="+fe.getDescription() + "\tId=" + fe.getId());
}
}catch(Exception e){
System.out.println("Exception getting Function for id="+ id + ": " + e);
}
}
public void testFunctionalizingEntity(String id) {
//"6225945"
System.out.println("Testing FunctionalizingEntity with id=" + id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
FunctionalizingEntity fe = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
fe = (FunctionalizingEntity) obj;
System.out.println("FunctionalizingEntity: name="+fe.getName() + "\tId=" + fe.getId());
}
}catch(Exception e){
System.out.println("Exception getting FunctionalizaingEntity for id="+ id + ": " + e);
}
}
public void testCharacterization(String id) {
//"10977286"
System.out.println("Testing characterization with id=" +id);
CQLQuery query = new CQLQuery();
gov.nih.nci.cagrid.cqlquery.Object target = new gov.nih.nci.cagrid.cqlquery.Object();
target
.setName("gov.nih.nci.cananolab.domain.particle.Characterization");
Attribute attribute = new Attribute();
attribute.setName("id");
attribute.setPredicate(Predicate.EQUAL_TO);
attribute.setValue(id);
target.setAttribute(attribute);
query.setTarget(target);
try{
CQLQueryResults results = gridClient.query(query);
results
.setTargetClassname("gov.nih.nci.cananolab.domain.particle.Characterization");
CQLQueryResultsIterator iter = new CQLQueryResultsIterator(results);
Characterization chara = null;
while (iter.hasNext()) {
java.lang.Object obj = iter.next();
chara = (Characterization) obj;
System.out.println("characterization: id="+chara.getId() + "\tDesignMethodDesc: " +chara.getDesignMethodsDescription());
}
}catch(Exception e){
System.out.println("Exception getting Characterization for id="+ id + ": " + e);
}
}
public void testGetKeywordsBySampleId(String sampleId) {
//String sampleId = "20917507";
System.out.println("Testing getKeyworkdsBySampleId: " + sampleId);
try{
Keyword[] keywords = gridClient.getKeywordsBySampleId(sampleId);
if(keywords != null){
for(Keyword keyword: keywords){
if(keyword != null){
System.out.println("Keyword name: " + keyword.getName() + "\tId: " + keyword.getId());
}
}
}
}catch(Exception e){
System.out.println("Exception getting KeywordsBySampleId for sampleId="+ sampleId + ": " + e);
}
System.out.println("Finished printing getKeyworkdsBySampleId results for sampleId: " + sampleId);
}
public void testGetPrimaryPointOfContactBySampleId(String sampleId) {
//String sampleId = "20917507";
System.out.println("Testing getPrimaryPointOfContactBySampleId : " + sampleId);
try{
PointOfContact contact = gridClient.getPrimaryPointOfContactBySampleId(sampleId);
if(contact != null){
System.out.println("primary contact name: " + contact.getFirstName() + "\t" +contact.getLastName() +
"\tId: " + contact.getId() + "\tPhone: " + contact.getPhone() + "\tRole: " +
contact.getRole() + "\tEmail: " +contact.getEmail());
}
}catch(Exception e){
System.out.println("Exception getting PrimaryPointOfContactBySampleId for sampleId="+ sampleId + ": " + e);
}
System.out.println("Finished printing getPrimaryPointOfContactBySampleId results for sampleId: " + sampleId);
}
public void testGetOtherPointOfContactsBySampleId(String sampleId) {
//String sampleId = "3735553";
System.out.println("Testing getOtherPointOfContactsBySampleId : " + sampleId);
try{
PointOfContact[] contacts = gridClient.getOtherPointOfContactsBySampleId(sampleId);
if(contacts != null){
for(PointOfContact contact: contacts){
if(contact != null){
System.out.println("primary contact name: " + contact.getFirstName() + "\t" +contact.getLastName() +
"\tId: " + contact.getId() + "\tPhone: " + contact.getPhone() + "\tRole: " +
contact.getRole() + "\tEmail: " +contact.getEmail());
}
}
}
}catch(Exception e){
System.out.println("Exception getting OtherPointOfContactsBySampleId for sampleId="+ sampleId + ": " + e);
}
System.out.println("Finished printing getPrimaryPointOfContactBySampleId results for sampleId: " + sampleId);
}
public void testGetExperimentConfigsByCharacterizationId(String charId) {
System.out.println("Testing testGetExperimentConfigsByCharacterizationId : " + charId);
try{
ExperimentConfig[] experimentConfigs = gridClient.getExperimentConfigsByCharacterizationId(charId);
for(ExperimentConfig exp: experimentConfigs){
if(exp != null){
System.out.println("ExperimentConfig Id: " + exp.getId() + "\tDesc: " +exp.getDescription() );
}
}
}catch(Exception e){
System.out.println("Exception getting ExperimentConfigsByCharacterizationId for charid="+ charId + ": " + e);
}
System.out.println("Finished printing testGetExperimentConfigsByCharacterizationId results for charId: " + charId);
}
public void testGetFindingsByCharacterizationId(String charId) {
//String charId = "3932251";
System.out.println("Testing testGetFindingsByCharacterizationId : " + charId);
try{
Finding[] findings = gridClient.getFindingsByCharacterizationId(charId);
if(findings != null){
for(Finding f: findings){
if(f != null){
System.out.println("Finding Id: " + f.getId() + "\tCreatedBy: " +f.getCreatedBy() );
}
}
}
}catch(Exception e){
System.out.println("Exception getting FindingsByCharacterizationId for charid="+ charId + ": " + e);
}
System.out.println("Finished printing testGetFindingsByCharacterizationId results for charId: " + charId);
}
public void testGetProtocolByCharacterizationId(String charId) {
//String charId = "21867791";
System.out.println("Testing testGetProtocolByCharacterizationId : " + charId);
try{
Protocol p = gridClient.getProtocolByCharacterizationId(charId);
if(p != null){
System.out.println("Protocol Id: " + p.getId() + "\tCreatedBy: " +p.getCreatedBy() + "\tName: "
+ p.getName() + "\tType: " + p.getType());
}
}catch(Exception e){
System.out.println("Exception getting ProtocolByCharacterizationId for charid="+ charId + ": " + e);
}
System.out.println("Finished printing testGetProtocolByCharacterizationId results for charId: " + charId);
}
public void testGetPublicationBySampleId(String sampleId) {
//String sampleId = "20917507";
System.out.println("Testing getPublicationBySampleId : " + sampleId);
try{
Publication[] pubs = gridClient.getPublicationsBySampleId(sampleId);
if(pubs != null){
for(Publication p: pubs){
if(p != null){
System.out.print("Publication Id: " + p.getId() + "\tDesc: " + p.getDescription() +
"\tName: " + p.getName() + "\tJournalName: " + p.getJournalName() + "\tTitle: " +p.getTitle());
}
}
}
}catch(Exception e){
System.out.println("Exception when testing getPublicationBySampleId for sampleId=" + sampleId);
}
System.out.println("Finished printing getPrimaryPointOfContactBySampleId results for sampleId: " + sampleId);
}
public void testGetSampleIds() {
try{
String[] sampleIds = gridClient.getSampleIds("", "", null, null, null, null, null);
System.out.println("Testing getSampleIds operation.... \n" +
"For every sample, test get Publication, keywords, primary contact and other contact.");
for(String id: sampleIds){
testSample(id);
testGetPublicationBySampleId(id);
testGetKeywordsBySampleId(id);
testGetOtherPointOfContactsBySampleId(id);
testGetPrimaryPointOfContactBySampleId(id);
}
}catch(Exception e){
System.out.println("Exception getting SampleIds: " + e);
}
System.out.println("\nFinished testing samples.... \n");
}
public void testGetPublicationIdsBy() {
try{
String[] pubIds = gridClient.getPublicationIdsBy("", "", "", null, null, null, null, null, null, null, null);
System.out.println("Testing getPublicationIdsBy operation.... \n " +
"For every publication, test Publication");
if(pubIds != null){
for(String id: pubIds){
if(id != null){
testPublication(id);
}
}
}
}catch(Exception e){
System.out.println("Exception getting PublicationIds: " + e);
}
System.out.println("\nFinished testing publications.....\n");
}
public void testGetFileByProtocolId(String protocolId){
//String protocolId = "24390915";
System.out.println("Testing getFileByProtocolId: " + protocolId);
try{
File file = gridClient.getFileByProtocolId(protocolId);
if(file != null){
System.out.println("File desc: " + file.getDescription() + "\tName: "+ file.getName() + "\tUri: " +file.getUri());
}
}catch(Exception e){
System.out.println("Exception getting ExperimentConfigsByCharacterizationId for protocolid="+ protocolId + ": " + e);
}
}
public void testGetFilesByCompositionInfoId(String id, String className) {
//String id = "21376285";//"21376285";
//String className="NanoMaterialEntity";
System.out.println("Test getFilesByCompositionInfoId: id=" + id + ", className=" + className);
try{
File[] files = gridClient.getFilesByCompositionInfoId(id,className);
if(files != null){
for(File file: files){
System.out.println("File desc: " + file.getDescription() + "\tName: "+ file.getName() + "\tUri: " +file.getUri());
}
}
}catch(Exception e){
System.out.println("Exception getting FilesByCompositionInfoId for id="+ id + ", className=" +className + ": " + e);
}
}
public static void main(String[] args) {
System.out.println("Running the Grid Service Client");
try {
if (!(args.length < 2)) {
if (args[0].equals("-url")) {
CaNanoLabServiceClient client = new CaNanoLabServiceClient(
args[1]);
GridClientTest test=new GridClientTest(client);
//test.testGetPrimaryPointOfContactBySampleId("10354688");
//these methods has loops to test other methods
test.testGetSampleIds();
test.testGetPublicationIdsBy();
//test.testGetAllCharacterizationByCQLQuery();
//these methods user can plug in any parameter
test.testGetFindingsByCharacterizationId("3932251");
test.testGetProtocolByCharacterizationId("3932251");
test.testGetExperimentConfigsByCharacterizationId("3932251");
test.testCharacterization("3932251");
test.testGetFileByProtocolId("24390915");
test.testGetFilesByCompositionInfoId("21376285","NanomaterialEntity");
test.testNanomaterialEntity("6160399");
test.testFunction("10944705");
test.testComposingElement("6062106");
test.testFunctionalizingEntity("6225945");
test.testChemicalAssociation("8847369");
test.testSampleComposition("6160390");
test.testActivationMethod("3833872");
test.testProtocol("24390915");
} else {
System.exit(1);
}
} else {
System.exit(1);
}
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
} | removed unused codes
SVN-Revision: 19344
| software/cananolab-grid/test/src/GridClientTest.java | removed unused codes |
|
Java | bsd-3-clause | 81cd937a5a8cebc9bfeb053e6e7d0b8771a35975 | 0 | cerebro/ggp-base,cerebro/ggp-base | package apps.kiosk.games;
import java.awt.Color;
import java.awt.Graphics;
import apps.kiosk.GridGameCanvas;
public class TTTxNineCanvas extends GridGameCanvas {
private static final long serialVersionUID = 1L;
public String getGameName() { return "Nine Board Tic Tac Toe"; }
protected String getGameKIF() { return "tictactoex9"; }
protected int getGridHeight() { return 9; }
protected int getGridWidth() { return 9; }
private int xSelectedBoard = 0;
private int ySelectedBoard = 0;
private int xSelectedSpot = 0;
private int ySelectedSpot = 0;
protected void handleClickOnCell(int xCell, int yCell, int xWithin, int yWithin) {
int xBoard = 1 + (xCell / 3);
int yBoard = 1 + (yCell / 3);
int xSpot = 1 + (xCell % 3);
int ySpot = 1 + (yCell % 3);
String theMove = "( mark " + xBoard + " " + yBoard + " " + xSpot + " " + ySpot + " )";
if(gameStateHasLegalMove(theMove)) {
xSelectedSpot = xSpot;
ySelectedSpot = ySpot;
xSelectedBoard = xBoard;
ySelectedBoard = yBoard;
submitWorkingMove(stringToMove(theMove));
}
}
protected void renderCell(int xCell, int yCell, Graphics g) {
int width = g.getClipBounds().width;
int height = g.getClipBounds().height;
int xBoard = 1 + (xCell / 3);
int yBoard = 1 + (yCell / 3);
int xSpot = 1 + (xCell % 3);
int ySpot = 1 + (yCell % 3);
g.setColor(Color.BLACK);
g.drawRect(7, 7, width-14, height-14);
if(gameStateHasFact("( boardtoplay " + xBoard + " " + yBoard + " )") ||
gameStateHasFact("( boardtoplay any any )")) {
g.setColor(Color.BLUE);
}
if(xSpot == 1) g.fillRect(0, 0, 5, height);
if(xSpot == 3) g.fillRect(width-5, 0, 5, height);
if(ySpot == 1) g.fillRect(0, 0, width, 5);
if(ySpot == 3) g.fillRect(0, height-5, width, 5);
if(gameStateHasFact("( cell " + xBoard + " " + yBoard + " " + xSpot + " " + ySpot + " x )")) {
g.setColor(Color.GRAY);
g.fillRect(8, 8, width-15, height-15);
g.setColor(Color.BLACK);
fillWithString(g, "X", 1.2);
} else if(gameStateHasFact("( cell " + xBoard + " " + yBoard + " " + xSpot + " " + ySpot + " o )")) {
g.setColor(Color.GRAY);
g.fillRect(8, 8, width-15, height-15);
g.setColor(Color.WHITE);
fillWithString(g, "O", 1.2);
} else {
;
}
String theMove = "( mark " + xBoard + " " + yBoard + " " + xSpot + " " + ySpot + " )";
if(gameStateHasLegalMove(theMove)) {
g.setColor(Color.GRAY);
for(int i = 8; i < 10; i++)
g.drawRect(i, i, width-2*i, height-2*i);
}
if(xSelectedSpot == xSpot && ySelectedSpot == ySpot &&
xSelectedBoard == xBoard && ySelectedBoard == yBoard) {
g.setColor(Color.GREEN);
g.fillRect(10, 10, width-19, height-19);
}
}
public void clearMoveSelection() {
submitWorkingMove(null);
xSelectedSpot = 0;
ySelectedSpot = 0;
repaint();
}
} | ggp-base/src/apps/kiosk/games/TTTxNineCanvas.java | package apps.kiosk.games;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Image;
import apps.kiosk.GridGameCanvas;
public class TTTxNineCanvas extends GridGameCanvas {
private static final long serialVersionUID = 1L;
private Image xImage = getImage("TTTx.png");
private Image oImage = getImage("TTTo.png");
public String getGameName() { return "Nine Board Tic Tac Toe"; }
protected String getGameKIF() { return "tictactoex9"; }
protected int getGridHeight() { return 9; }
protected int getGridWidth() { return 9; }
private int xSelectedBoard = 0;
private int ySelectedBoard = 0;
private int xSelectedSpot = 0;
private int ySelectedSpot = 0;
protected void handleClickOnCell(int xCell, int yCell, int xWithin, int yWithin) {
int xBoard = 1 + (xCell / 3);
int yBoard = 1 + (yCell / 3);
int xSpot = 1 + (xCell % 3);
int ySpot = 1 + (yCell % 3);
String theMove = "( mark " + xBoard + " " + yBoard + " " + xSpot + " " + ySpot + " )";
if(gameStateHasLegalMove(theMove)) {
xSelectedSpot = xSpot;
ySelectedSpot = ySpot;
xSelectedBoard = xBoard;
ySelectedBoard = yBoard;
submitWorkingMove(stringToMove(theMove));
}
}
protected void renderCell(int xCell, int yCell, Graphics g) {
int width = g.getClipBounds().width;
int height = g.getClipBounds().height;
int xBoard = 1 + (xCell / 3);
int yBoard = 1 + (yCell / 3);
int xSpot = 1 + (xCell % 3);
int ySpot = 1 + (yCell % 3);
g.setColor(Color.BLACK);
g.drawRect(7, 7, width-14, height-14);
if(gameStateHasFact("( boardtoplay " + xBoard + " " + yBoard + " )") ||
gameStateHasFact("( boardtoplay any any )")) {
g.setColor(Color.BLUE);
}
if(xSpot == 1) g.fillRect(0, 0, 5, height);
if(xSpot == 3) g.fillRect(width-5, 0, 5, height);
if(ySpot == 1) g.fillRect(0, 0, width, 5);
if(ySpot == 3) g.fillRect(0, height-5, width, 5);
if(gameStateHasFact("( cell " + xBoard + " " + yBoard + " " + xSpot + " " + ySpot + " x )")) {
g.drawImage(xImage, 5, 5, width-10, height-10, null);
} else if(gameStateHasFact("( cell " + xBoard + " " + yBoard + " " + xSpot + " " + ySpot + " o )")) {
g.drawImage(oImage, 5, 5, width-10, height-10, null);
} else {
;
}
String theMove = "( mark " + xBoard + " " + yBoard + " " + xSpot + " " + ySpot + " )";
if(gameStateHasLegalMove(theMove)) {
g.setColor(Color.GRAY);
for(int i = 8; i < 10; i++)
g.drawRect(i, i, width-2*i, height-2*i);
}
if(xSelectedSpot == xSpot && ySelectedSpot == ySpot &&
xSelectedBoard == xBoard && ySelectedBoard == yBoard) {
g.setColor(Color.GREEN);
g.fillRect(10, 10, width-19, height-19);
}
}
public void clearMoveSelection() {
submitWorkingMove(null);
xSelectedSpot = 0;
ySelectedSpot = 0;
repaint();
}
} | Used the native fonts for 9xTTT, rather than pre-captured images.
git-svn-id: 4739e81c2fe647bfb539b919360e2c658e6121ea@29 716a755e-b13f-cedc-210d-596dafc6fb9b
| ggp-base/src/apps/kiosk/games/TTTxNineCanvas.java | Used the native fonts for 9xTTT, rather than pre-captured images. |
|
Java | apache-2.0 | e8e72f4434fb2416c2065171c44ed411eb8eace3 | 0 | golfstream83/JavaCourse | Modul1-Basic-syntax/Lesson2-Data-type/Calculator.java | package calculator;
public class Calculator {
public double result;
/*
сложение двух чисел
*/
public void add (double first, double second) {
this.result = first + second;
}
/*
вычитание из числа first числа second
*/
public void substruct (double first, double second) {
this.result = first - second;
}
/*
деление числа first на число second
*/
public void div (double first, double second) {
this.result = first / second;
}
/*
умножение числа first на число second
*/
public void multiple (double first, double second) {
this.result = first * second;
}
public void showResult() {
System.out.printf("%s%f%n" ,"Result : ", this.result);
}
} | Delete Calculator.java | Modul1-Basic-syntax/Lesson2-Data-type/Calculator.java | Delete Calculator.java |
||
Java | mit | 0e8a4a11b63626c805db915b6b0c5b08a08472ba | 0 | BrayanRosas/RoomManagerAutomation,PenielDVP/RoomManagerAutomation | package ui;
import ui.pages.HeaderMenuPage;
import ui.pages.LoginPage;
import ui.pages.SidebarMenuPage;
/**
* Created with IntelliJ IDEA.
* User: jhasmanyquiroz
* Date: 12/7/15
* Time: 8:26 PM
* To change this template use File | Settings | File Templates.
*/
public class BaseMainPageObject {
private HeaderMenuPage headerMenu;
private SidebarMenuPage sidebarMenu;
public BaseMainPageObject() {
headerMenu = new HeaderMenuPage();
sidebarMenu = new SidebarMenuPage();
}
public LoginPage clickSignOutSuccessfully(){
System.out.println("Entered to click Sign out");
return headerMenu.clickSignOutSuccessfully();
}
} | src/main/java/ui/BaseMainPageObject.java | package ui;
import org.openqa.selenium.support.ui.ExpectedConditions;
import ui.pages.HeaderMenuPage;
import ui.pages.LoginPage;
import ui.pages.SidebarMenuPage;
/**
* Created with IntelliJ IDEA.
* User: jhasmanyquiroz
* Date: 12/7/15
* Time: 8:26 PM
* To change this template use File | Settings | File Templates.
*/
public class BaseMainPageObject {
private HeaderMenuPage headerMenu;
private SidebarMenuPage sidebarMenu;
public BaseMainPageObject() {
headerMenu = new HeaderMenuPage();
sidebarMenu = new SidebarMenuPage();
}
public LoginPage clickSignOutSuccessfully(){
System.out.println("Entered to click Sign out");
return headerMenu.clickSignOutSuccessfully();
}
} | update BaseMainPageObject
| src/main/java/ui/BaseMainPageObject.java | update BaseMainPageObject |
|
Java | mit | 1412e69d5d61d645e0cd9576efacbee2ba02f56c | 0 | codingchili/chili-core | package com.codingchili.core.benchmarking;
import static com.codingchili.core.configuration.CoreStrings.EXT_JSON;
import static com.codingchili.core.configuration.CoreStrings.getFileFriendlyDate;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import com.codingchili.core.context.CoreContext;
import com.codingchili.core.files.JsonFileStore;
import com.codingchili.core.protocol.Serializer;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
/**
* @author Robin Duda
* <p>
* Micro-benchmarks runner.
* <p>
* Creates and runs a group of benchmarks. Benchmarks are run for one implementation at a time
* and are executed in the same order as they are added to the group. The order for each
* benchmark test is also preserved. No more than one benchmark is executed concurrently.
* <p>
*/
public class BenchmarkExecutor {
private BenchmarkListener listener = new AbstractBenchmarkListener();
private AtomicBoolean warmup = new AtomicBoolean(true);
private CoreContext context;
/**
* Creates a new benchmarkexecutor that executes on the given context.
*
* @param context the context to execute on.
*/
public BenchmarkExecutor(CoreContext context) {
this.context = context;
}
/**
* @param future completed with the results of the benchmark when all benchmarks have passed.
* @param group a group of implementations that contains a set of benchmarks to be performed
*/
public void start(Future<List<BenchmarkGroup>> future, BenchmarkGroup group) {
List<BenchmarkGroup> list = new ArrayList<>();
list.add(group);
start(future, list);
}
/**
* @param future completed with the results of the benchmark when all benchmarks have passed.
* @param groups a list of groups of implementations that contains a set of benchmarks to be performed
*/
public void start(Future<List<BenchmarkGroup>> future, List<BenchmarkGroup> groups) {
Future<BenchmarkGroup> allGroups = Future.succeededFuture();
for (BenchmarkGroup group : groups) {
allGroups = allGroups.compose(v -> {
Future<BenchmarkGroup> benchmark = Future.future();
listener.onGroupStarted(group);
executeImplementations(benchmark, group);
return benchmark;
});
}
allGroups.compose(done -> {
future.complete(groups);
return Future.succeededFuture();
});
}
private void executeImplementations(Future<BenchmarkGroup> future, BenchmarkGroup group) {
Future<Void> allImplementations = Future.succeededFuture();
for (BenchmarkImplementation implementation : group.getImplementations()) {
allImplementations = allImplementations.compose(v -> {
Future<Void> execution = Future.future();
// on initialization: perform a warmup run that executes all benchmarks once
// and then call #reset on the implementation, to prepare for a recorded test run.
implementation.initialize(context,
initialized -> warmup(group, implementation,
warmed -> benchmark(group, implementation,
benched -> implementation.shutdown(execution))));
return execution;
});
}
allImplementations.compose(done -> {
listener.onGroupCompleted(group);
future.complete(group);
return Future.succeededFuture();
});
}
/**
* Runs through the benchmark once without recording results as warmup.
* Calls #reset on the benchmark implementation to prepare for a benchmark run.
*
* @param implementation the implementation to warmup.
* @param handler the handler to call when completed.
*/
private void warmup(BenchmarkGroup group, BenchmarkImplementation implementation, Handler<AsyncResult<Void>>
handler) {
Future<Void> future = Future.future();
warmup.set(true);
listener.onImplementationWarmup(implementation);
future.setHandler(done -> {
warmup.set(false);
listener.onImplementationWarmupComplete(implementation);
implementation.reset(reset -> handler.handle(Future.succeededFuture()));
});
benchmark(group, implementation, future);
}
/**
* Schedule all benchmarks for the given implementation.
*
* @param implementation the implementation to run benchmarks for.
* @param future to complete when all benchmarks has completed.
*/
private void benchmark(BenchmarkGroup group, BenchmarkImplementation implementation,
Handler<AsyncResult<Void>> future) {
List<Benchmark> benchmarks = implementation.getBenchmarks();
if (!warmup.get()) {
listener.onImplementationTestBegin(implementation);
}
Future<Void> allTests = Future.succeededFuture();
for (Benchmark benchmark : benchmarks) {
allTests = allTests.compose(v -> {
Future<Void> next = Future.future();
implementation.next(next);
return next.compose(n -> doBench(group, benchmark));
});
}
allTests.compose(result -> {
if (!warmup.get()) {
listener.onImplementationCompleted(implementation);
}
future.handle(Future.succeededFuture());
return Future.succeededFuture();
});
}
/**
* Performs the actual benchmarking by measuring the time taken to execute the given
* benchmarks operation.
*
* @param benchmark the benchmark to execute
* @return a future that is completed when the benchmark is completed.
*/
private Future<Void> doBench(BenchmarkGroup group, Benchmark benchmark) {
Future<Void> future = Future.future();
AtomicInteger completed = new AtomicInteger(0);
benchmark.start();
for (int i = 0; i < group.getIterations(); i++) {
Future<Void> iteration = Future.<Void>future().setHandler(done -> {
if (completed.incrementAndGet() >= group.getIterations()) {
if (future.tryComplete()) {
benchmark.finish();
if (!warmup.get()) {
listener.onBenchmarkCompleted(benchmark);
}
}
} else if (completed.get() % group.getProgressInterval() == 0) {
listener.onProgressUpdate(benchmark, completed.get());
}
});
benchmark.operation().perform(iteration);
}
return future;
}
/**
* Sets the executor event listener.
*
* @param listener the listener to execute on events.
*/
public BenchmarkExecutor setListener(BenchmarkListener listener) {
this.listener = listener;
return this;
}
}
| core/main/java/com/codingchili/core/benchmarking/BenchmarkExecutor.java | package com.codingchili.core.benchmarking;
import static com.codingchili.core.configuration.CoreStrings.EXT_JSON;
import static com.codingchili.core.configuration.CoreStrings.getFileFriendlyDate;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import com.codingchili.core.context.CoreContext;
import com.codingchili.core.files.JsonFileStore;
import com.codingchili.core.protocol.Serializer;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
/**
* @author Robin Duda
* <p>
* Micro-benchmarks runner.
* <p>
* Creates and runs a group of benchmarks. Benchmarks are run for one implementation at a time
* and are executed in the same order as they are added to the group. The order for each
* benchmark test is also preserved. No more than one benchmark is executed concurrently.
* <p>
*/
public class BenchmarkExecutor {
private BenchmarkListener listener = new AbstractBenchmarkListener();
private AtomicBoolean warmup = new AtomicBoolean(true);
private CoreContext context;
/**
* Creates a new benchmarkexecutor that executes on the given context.
*
* @param context the context to execute on.
*/
public BenchmarkExecutor(CoreContext context) {
this.context = context;
}
/**
* @param future completed with the results of the benchmark when all benchmarks have passed.
* @param group a group of implementations that contains a set of benchmarks to be performed
*/
public void start(Future<List<BenchmarkGroup>> future, BenchmarkGroup group) {
List<BenchmarkGroup> list = new ArrayList<>();
list.add(group);
start(future, list);
}
/**
* @param future completed with the results of the benchmark when all benchmarks have passed.
* @param groups a list of groups of implementations that contains a set of benchmarks to be performed
*/
public void start(Future<List<BenchmarkGroup>> future, List<BenchmarkGroup> groups) {
Future<BenchmarkGroup> allGroups = Future.succeededFuture();
for (BenchmarkGroup group : groups) {
allGroups = allGroups.compose(v -> {
Future<BenchmarkGroup> benchmark = Future.future();
listener.onGroupStarted(group);
executeImplementations(benchmark, group);
return benchmark;
});
}
allGroups.compose(done -> {
saveResultsToFile(groups);
future.complete(groups);
return Future.succeededFuture();
});
}
private void saveResultsToFile(List<BenchmarkGroup> groups) {
JsonFileStore.writeObject(Serializer.json(groups),
getFileFriendlyDate() + EXT_JSON);
}
private void executeImplementations(Future<BenchmarkGroup> future, BenchmarkGroup group) {
Future<Void> allImplementations = Future.succeededFuture();
for (BenchmarkImplementation implementation : group.getImplementations()) {
allImplementations = allImplementations.compose(v -> {
Future<Void> execution = Future.future();
// on initialization: perform a warmup run that executes all benchmarks once
// and then call #reset on the implementation, to prepare for a recorded test run.
implementation.initialize(context,
initialized -> warmup(group, implementation,
warmed -> benchmark(group, implementation,
benched -> implementation.shutdown(execution))));
return execution;
});
}
allImplementations.compose(done -> {
listener.onGroupCompleted(group);
future.complete(group);
return Future.succeededFuture();
});
}
/**
* Runs through the benchmark once without recording results as warmup.
* Calls #reset on the benchmark implementation to prepare for a benchmark run.
*
* @param implementation the implementation to warmup.
* @param handler the handler to call when completed.
*/
private void warmup(BenchmarkGroup group, BenchmarkImplementation implementation, Handler<AsyncResult<Void>>
handler) {
Future<Void> future = Future.future();
warmup.set(true);
listener.onImplementationWarmup(implementation);
future.setHandler(done -> {
warmup.set(false);
listener.onImplementationWarmupComplete(implementation);
implementation.reset(reset -> handler.handle(Future.succeededFuture()));
});
benchmark(group, implementation, future);
}
/**
* Schedule all benchmarks for the given implementation.
*
* @param implementation the implementation to run benchmarks for.
* @param future to complete when all benchmarks has completed.
*/
private void benchmark(BenchmarkGroup group, BenchmarkImplementation implementation,
Handler<AsyncResult<Void>> future) {
List<Benchmark> benchmarks = implementation.getBenchmarks();
if (!warmup.get()) {
listener.onImplementationTestBegin(implementation);
}
Future<Void> allTests = Future.succeededFuture();
for (Benchmark benchmark : benchmarks) {
allTests = allTests.compose(v -> {
Future<Void> next = Future.future();
implementation.next(next);
return next.compose(n -> doBench(group, benchmark));
});
}
allTests.compose(result -> {
if (!warmup.get()) {
listener.onImplementationCompleted(implementation);
}
future.handle(Future.succeededFuture());
return Future.succeededFuture();
});
}
/**
* Performs the actual benchmarking by measuring the time taken to execute the given
* benchmarks operation.
*
* @param benchmark the benchmark to execute
* @return a future that is completed when the benchmark is completed.
*/
private Future<Void> doBench(BenchmarkGroup group, Benchmark benchmark) {
Future<Void> future = Future.future();
AtomicInteger completed = new AtomicInteger(0);
benchmark.start();
for (int i = 0; i < group.getIterations(); i++) {
Future<Void> iteration = Future.<Void>future().setHandler(done -> {
if (completed.incrementAndGet() >= group.getIterations()) {
if (future.tryComplete()) {
benchmark.finish();
if (!warmup.get()) {
listener.onBenchmarkCompleted(benchmark);
}
}
} else if (completed.get() % group.getProgressInterval() == 0) {
listener.onProgressUpdate(benchmark, completed.get());
}
});
benchmark.operation().perform(iteration);
}
return future;
}
/**
* Sets the executor event listener.
*
* @param listener the listener to execute on events.
*/
public BenchmarkExecutor setListener(BenchmarkListener listener) {
this.listener = listener;
return this;
}
}
| Removed save results to file for executor should be done in reporter.
| core/main/java/com/codingchili/core/benchmarking/BenchmarkExecutor.java | Removed save results to file for executor should be done in reporter. |
|
Java | mit | 7fe97520f7c05b3f8f37f43bc21b22377d444636 | 0 | broadinstitute/picard,broadinstitute/picard,annkupi/picard,alecw/picard,annkupi/picard,nh13/picard,broadinstitute/picard,nh13/picard,broadinstitute/picard,nh13/picard,annkupi/picard,broadinstitute/picard,alecw/picard,alecw/picard,alecw/picard,nh13/picard,annkupi/picard | /*
* The MIT License
*
* Copyright (c) 2009 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package net.sf.samtools;
import net.sf.samtools.util.Iso8601Date;
import net.sf.samtools.util.StringUtil;
import net.sf.samtools.util.DateParser;
import java.util.Map;
import java.util.Date;
import java.text.DateFormat;
import java.text.ParseException;
/**
* Converter between SAM text representation of a tag, and in-memory Object representation.
* Note that this class is not thread-safe, in that some local variables have been made into instance
* variables in order to reduce object creation, but it should not ever be the case that the same
* instance is used in multiple threads.
*/
class TextTagCodec {
private static final int NUM_TAG_FIELDS = 3;
/**
* This is really a local variable of decode(), but allocated here to reduce allocations.
*/
private final String[] fields = new String[NUM_TAG_FIELDS];
/**
* This is really a local variable of decodeTypeAndValue(), but allocated here to reduce allocations.
*/
private final String[] typeAndValueFields = new String[NUM_TAG_FIELDS - 1];
/**
* Convert in-memory representation of tag to SAM text representation.
* @param tagName Two-character tag name.
* @param value Tag value as approriate Object subclass.
* @return SAM text String representation, i.e. name:type:value
*/
String encode(final String tagName, Object value) {
final StringBuilder sb = new StringBuilder(tagName);
sb.append(':');
char tagType = BinaryTagCodec.getTagValueType(value);
switch (tagType) {
case 'c':
case 'C':
case 's':
case 'S':
case 'I':
tagType = 'i';
}
if (tagType == 'H') {
value = StringUtil.bytesToHexString((byte[])value);
} else if (tagType == 'i') {
final long longVal = ((Number) value).longValue();
if (longVal > Integer.MAX_VALUE || longVal < Integer.MIN_VALUE) {
throw new SAMFormatException("Value for tag " + tagName + " cannot be stored in an Integer: " + longVal);
}
}
sb.append(tagType);
sb.append(':');
sb.append(value.toString());
return sb.toString();
}
/**
* Encode a standard tag, which should not have a type field.
* @param tagName 2-character String.
* @param value Not necessarily a String. Some of these are integers but the type is implied by
* the tagName. Converted to String with toString().
* @return Colon-separated text representation suitable for a SAM header, i.e. name:value.
*/
String encodeUntypedTag(final String tagName, final Object value) {
final StringBuilder sb = new StringBuilder(tagName);
sb.append(':');
sb.append(value.toString());
return sb.toString();
}
/**
* Convert typed tag in SAM text format (name:type:value) into tag name and Object value representation.
* @param tag SAM text format name:type:value tag.
* @return Tag name as 2-character String, and tag value in appropriate class based on tag type.
*/
Map.Entry<String, Object> decode(final String tag) {
final int numFields = StringUtil.split(tag, fields, ':');
if (numFields != TextTagCodec.NUM_TAG_FIELDS) {
throw new SAMFormatException("Not enough fields in tag '" + tag + "'");
}
final String key = fields[0];
final String type = fields[1];
final String stringVal = fields[2];
final Object val = convertStringToObject(type, stringVal);
return new Map.Entry<String, Object>() {
public String getKey() {
return key;
}
public Object getValue() {
return val;
}
public Object setValue(final Object o) {
throw new UnsupportedOperationException();
}
};
}
/**
* Similar to decode() method above, but the tag name has already been stripped off.
* @param typeAndValue type:string-value, or, for backward-compatibility, just string-value.
* @return Value converted into the appropriate type.
*/
Object decodeTypeAndValue(final String typeAndValue) {
final int numFields = StringUtil.split(typeAndValue, typeAndValueFields, ':');
if (numFields == 1) {
// For backward compatibility, if no colon, treat as String type
return typeAndValue;
}
return convertStringToObject(typeAndValueFields[0], typeAndValueFields[1]);
}
private Object convertStringToObject(final String type, final String stringVal) {
final Object val;
if (type.equals("Z")) {
val = stringVal;
} else if (type.equals("A")) {
if (stringVal.length() != 1) {
throw new SAMFormatException("Tag of type A should have a single-character value");
}
val = stringVal.charAt(0);
} else if (type.equals("i")) {
try {
val = new Integer(stringVal);
} catch (NumberFormatException e) {
throw new SAMFormatException("Tag of type i should have signed decimal value");
}
} else if (type.equals("f")) {
try {
val = new Float(stringVal);
} catch (NumberFormatException e) {
throw new SAMFormatException("Tag of type f should have single-precision floating point value");
}
} else if (type.equals("H")) {
try {
val = StringUtil.hexStringToBytes(stringVal);
} catch (NumberFormatException e) {
throw new SAMFormatException("Tag of type H should have valid hex string with even number of digits");
}
} else {
throw new SAMFormatException("Unrecognized tag type: " + type);
}
return val;
}
Iso8601Date decodeDate(final String dateStr) {
try {
return new Iso8601Date(dateStr);
} catch (DateParser.InvalidDateException ex) {
try {
return new Iso8601Date(DateFormat.getDateTimeInstance().parse(dateStr));
} catch (ParseException e) {
try {
return new Iso8601Date(new Date(dateStr));
} catch (Exception e1) {
throw new DateParser.InvalidDateException("Could not parse as date: " + dateStr, e);
}
}
}
}
}
| src/java/net/sf/samtools/TextTagCodec.java | /*
* The MIT License
*
* Copyright (c) 2009 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package net.sf.samtools;
import net.sf.samtools.util.Iso8601Date;
import net.sf.samtools.util.StringUtil;
import net.sf.samtools.util.DateParser;
import java.util.Map;
import java.text.DateFormat;
import java.text.ParseException;
/**
* Converter between SAM text representation of a tag, and in-memory Object representation.
* Note that this class is not thread-safe, in that some local variables have been made into instance
* variables in order to reduce object creation, but it should not ever be the case that the same
* instance is used in multiple threads.
*/
class TextTagCodec {
private static final int NUM_TAG_FIELDS = 3;
/**
* This is really a local variable of decode(), but allocated here to reduce allocations.
*/
private final String[] fields = new String[NUM_TAG_FIELDS];
/**
* This is really a local variable of decodeTypeAndValue(), but allocated here to reduce allocations.
*/
private final String[] typeAndValueFields = new String[NUM_TAG_FIELDS - 1];
/**
* Convert in-memory representation of tag to SAM text representation.
* @param tagName Two-character tag name.
* @param value Tag value as approriate Object subclass.
* @return SAM text String representation, i.e. name:type:value
*/
String encode(final String tagName, Object value) {
final StringBuilder sb = new StringBuilder(tagName);
sb.append(':');
char tagType = BinaryTagCodec.getTagValueType(value);
switch (tagType) {
case 'c':
case 'C':
case 's':
case 'S':
case 'I':
tagType = 'i';
}
if (tagType == 'H') {
value = StringUtil.bytesToHexString((byte[])value);
} else if (tagType == 'i') {
final long longVal = ((Number) value).longValue();
if (longVal > Integer.MAX_VALUE || longVal < Integer.MIN_VALUE) {
throw new SAMFormatException("Value for tag " + tagName + " cannot be stored in an Integer: " + longVal);
}
}
sb.append(tagType);
sb.append(':');
sb.append(value.toString());
return sb.toString();
}
/**
* Encode a standard tag, which should not have a type field.
* @param tagName 2-character String.
* @param value Not necessarily a String. Some of these are integers but the type is implied by
* the tagName. Converted to String with toString().
* @return Colon-separated text representation suitable for a SAM header, i.e. name:value.
*/
String encodeUntypedTag(final String tagName, final Object value) {
final StringBuilder sb = new StringBuilder(tagName);
sb.append(':');
sb.append(value.toString());
return sb.toString();
}
/**
* Convert typed tag in SAM text format (name:type:value) into tag name and Object value representation.
* @param tag SAM text format name:type:value tag.
* @return Tag name as 2-character String, and tag value in appropriate class based on tag type.
*/
Map.Entry<String, Object> decode(final String tag) {
final int numFields = StringUtil.split(tag, fields, ':');
if (numFields != TextTagCodec.NUM_TAG_FIELDS) {
throw new SAMFormatException("Not enough fields in tag '" + tag + "'");
}
final String key = fields[0];
final String type = fields[1];
final String stringVal = fields[2];
final Object val = convertStringToObject(type, stringVal);
return new Map.Entry<String, Object>() {
public String getKey() {
return key;
}
public Object getValue() {
return val;
}
public Object setValue(final Object o) {
throw new UnsupportedOperationException();
}
};
}
/**
* Similar to decode() method above, but the tag name has already been stripped off.
* @param typeAndValue type:string-value, or, for backward-compatibility, just string-value.
* @return Value converted into the appropriate type.
*/
Object decodeTypeAndValue(final String typeAndValue) {
final int numFields = StringUtil.split(typeAndValue, typeAndValueFields, ':');
if (numFields == 1) {
// For backward compatibility, if no colon, treat as String type
return typeAndValue;
}
return convertStringToObject(typeAndValueFields[0], typeAndValueFields[1]);
}
private Object convertStringToObject(final String type, final String stringVal) {
final Object val;
if (type.equals("Z")) {
val = stringVal;
} else if (type.equals("A")) {
if (stringVal.length() != 1) {
throw new SAMFormatException("Tag of type A should have a single-character value");
}
val = stringVal.charAt(0);
} else if (type.equals("i")) {
try {
val = new Integer(stringVal);
} catch (NumberFormatException e) {
throw new SAMFormatException("Tag of type i should have signed decimal value");
}
} else if (type.equals("f")) {
try {
val = new Float(stringVal);
} catch (NumberFormatException e) {
throw new SAMFormatException("Tag of type f should have single-precision floating point value");
}
} else if (type.equals("H")) {
try {
val = StringUtil.hexStringToBytes(stringVal);
} catch (NumberFormatException e) {
throw new SAMFormatException("Tag of type H should have valid hex string with even number of digits");
}
} else {
throw new SAMFormatException("Unrecognized tag type: " + type);
}
return val;
}
Iso8601Date decodeDate(final String dateStr) {
try {
return new Iso8601Date(dateStr);
} catch (DateParser.InvalidDateException ex) {
try {
return new Iso8601Date(DateFormat.getDateTimeInstance().parse(dateStr));
} catch (ParseException e) {
throw new DateParser.InvalidDateException("Could not parse as date: " + dateStr, e);
}
}
}
}
| Add an additional date format for converting to Iso8601
| src/java/net/sf/samtools/TextTagCodec.java | Add an additional date format for converting to Iso8601 |
|
Java | mit | 2a782b7fd558dd796041135bde76d78b3a3fc829 | 0 | pierresouchay/mssql-jdbc,Microsoft/mssql-jdbc | /*
* Microsoft JDBC Driver for SQL Server
*
* Copyright(c) Microsoft Corporation All rights reserved.
*
* This program is made available under the terms of the MIT License. See the LICENSE file in the project root for more information.
*/
package com.microsoft.sqlserver.jdbc;
import java.sql.ParameterMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* SQLServerParameterMetaData provides JDBC 3.0 meta data for prepared statement parameters.
*
* The API javadoc for JDBC API methods that this class implements are not repeated here. Please see Sun's JDBC API interfaces javadoc for those
* details.
*
* Prepared statements are executed with SET FMT ONLY to retrieve column meta data Callable statements : sp_sp_sproc_columns is called to retrieve
* names and meta data for the procedures params.
*/
public final class SQLServerParameterMetaData implements ParameterMetaData {
private final static int SQL_SERVER_2012_VERSION = 11;
private final SQLServerStatement stmtParent;
private SQLServerConnection con;
/* Used for callable statement meta data */
private Statement stmtCall;
private SQLServerResultSet rsProcedureMeta;
static final private java.util.logging.Logger logger = java.util.logging.Logger
.getLogger("com.microsoft.sqlserver.jdbc.internals.SQLServerParameterMetaData");
static private final AtomicInteger baseID = new AtomicInteger(0); // Unique id generator for each instance (used for logging).
final private String traceID = " SQLServerParameterMetaData:" + nextInstanceID();
boolean isTVP = false;
// Returns unique id for each instance.
private static int nextInstanceID() {
return baseID.incrementAndGet();
}
/**
* This is a helper function to provide an ID string suitable for tracing.
*
* @return traceID string
*/
final public String toString() {
return traceID;
}
/**
* Parse the columns in a column set.
*
* @param columnSet
* the list of columns
* @param columnStartToken
* the token that prfixes the column set
*/
/* L2 */ private String parseColumns(String columnSet,
String columnStartToken) {
StringTokenizer st = new StringTokenizer(columnSet, " =?<>!", true);
final int START = 0;
final int PARAMNAME = 1;
final int PARAMVALUE = 2;
int nState = 0;
String sLastField = null;
StringBuilder sb = new StringBuilder();
while (st.hasMoreTokens()) {
String sToken = st.nextToken();
if (sToken.equalsIgnoreCase(columnStartToken)) {
nState = PARAMNAME;
continue;
}
if (nState == START)
continue;
if ((sToken.charAt(0) == '=') || sToken.equalsIgnoreCase("is") || (sToken.charAt(0) == '<') || (sToken.charAt(0) == '>')
|| sToken.equalsIgnoreCase("like") || sToken.equalsIgnoreCase("not") || sToken.equalsIgnoreCase("in")
|| (sToken.charAt(0) == '!')) {
nState = PARAMVALUE;
continue;
}
if (sToken.charAt(0) == '?' && sLastField != null) {
if (sb.length() != 0) {
sb.append(", ");
}
sb.append(sLastField);
nState = PARAMNAME;
sLastField = null;
continue;
}
if (nState == PARAMNAME) {
// space get the next token.
if (sToken.equals(" "))
continue;
String paramN = escapeParse(st, sToken);
if (paramN.length() > 0) {
sLastField = paramN;
}
}
}
return sb.toString();
}
/**
* Parse the column set in an insert syntax.
*
* @param sql
* the sql syntax
* @param columnMarker
* the token that denotes the start of the column set
*/
/* L2 */ private String parseInsertColumns(String sql,
String columnMarker) {
StringTokenizer st = new StringTokenizer(sql, " (),", true);
int nState = 0;
String sLastField = null;
StringBuilder sb = new StringBuilder();
while (st.hasMoreTokens()) {
String sToken = st.nextToken();
if (sToken.equalsIgnoreCase(columnMarker)) {
nState = 1;
continue;
}
if (nState == 0)
continue;
if (sToken.charAt(0) == '=') {
nState = 2;
continue;
}
if ((sToken.charAt(0) == ',' || sToken.charAt(0) == ')' || sToken.charAt(0) == ' ') && sLastField != null) {
if (sb.length() != 0)
sb.append(", ");
sb.append(sLastField);
nState = 1;
sLastField = null;
}
if (sToken.charAt(0) == ')') {
nState = 0;
break;
}
if (nState == 1) {
if (sToken.trim().length() > 0) {
if (sToken.charAt(0) != ',')
sLastField = escapeParse(st, sToken);
}
}
}
return sb.toString();
}
/* Used for prepared statement meta data */
class QueryMeta {
String parameterClassName = null;
int parameterType = 0;
String parameterTypeName = null;
int precision = 0;
int scale = 0;
int isNullable = ParameterMetaData.parameterNullableUnknown;
boolean isSigned = false;
}
Map<Integer, QueryMeta> queryMetaMap = null;
/*
* Parse query metadata.
*/
private void parseQueryMeta(ResultSet rsQueryMeta) throws SQLServerException {
Pattern datatypePattern = Pattern.compile("(.*)\\((.*)(\\)|,(.*)\\))");
try {
while (rsQueryMeta.next()) {
QueryMeta qm = new QueryMeta();
SSType ssType = null;
int paramOrdinal = rsQueryMeta.getInt("parameter_ordinal");
String typename = rsQueryMeta.getString("suggested_system_type_name");
if (null == typename) {
typename = rsQueryMeta.getString("suggested_user_type_name");
SQLServerPreparedStatement pstmt = (SQLServerPreparedStatement) con
.prepareCall("select max_length, precision, scale, is_nullable from sys.assembly_types where name = ?");
pstmt.setNString(1, typename);
ResultSet assemblyRs = pstmt.executeQuery();
if (assemblyRs.next()) {
qm.parameterTypeName = typename;
qm.precision = assemblyRs.getInt("max_length");
qm.scale = assemblyRs.getInt("scale");
ssType = SSType.UDT;
}
}
else {
qm.precision = rsQueryMeta.getInt("suggested_precision");
qm.scale = rsQueryMeta.getInt("suggested_scale");
Matcher matcher = datatypePattern.matcher(typename);
if (matcher.matches()) {
// the datatype has some precision/scale defined explicitly.
ssType = SSType.of(matcher.group(1));
if (typename.equalsIgnoreCase("varchar(max)") || typename.equalsIgnoreCase("varbinary(max)")) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE;
}
else if (typename.equalsIgnoreCase("nvarchar(max)")) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE / 2;
}
else if (SSType.Category.CHARACTER == ssType.category || SSType.Category.BINARY == ssType.category
|| SSType.Category.NCHARACTER == ssType.category) {
try {
// For character/binary data types "suggested_precision" is 0. So get the precision from the type itself.
qm.precision = Integer.parseInt(matcher.group(2));
}
catch (NumberFormatException e) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_metaDataErrorForParameter"));
Object[] msgArgs = {new Integer(paramOrdinal)};
SQLServerException.makeFromDriverError(con, stmtParent, form.format(msgArgs) + " " + e.toString(), null, false);
}
}
}
else
ssType = SSType.of(typename);
// For float and real types suggested_precision returns the number of bits, not digits.
if (SSType.FLOAT == ssType) {
// https://msdn.microsoft.com/en-CA/library/ms173773.aspx
// real is float(24) and is 7 digits. Float is 15 digits.
qm.precision = 15;
}
else if (SSType.REAL == ssType) {
qm.precision = 7;
}
else if (SSType.TEXT == ssType) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE;
}
else if (SSType.NTEXT == ssType) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE / 2;
}
else if (SSType.IMAGE == ssType) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE;
}
else if (SSType.GUID == ssType) {
qm.precision = SQLServerDatabaseMetaData.uniqueidentifierSize;
}
else if (SSType.TIMESTAMP == ssType) {
qm.precision = 8;
}
else if (SSType.XML == ssType) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE / 2;
}
qm.parameterTypeName = ssType.toString();
}
// Check if ssType is null. Was caught by static analysis.
if (null == ssType) {
throw new SQLServerException(SQLServerException.getErrString("R_metaDataErrorForParameter"), null);
}
JDBCType jdbcType = ssType.getJDBCType();
qm.parameterClassName = jdbcType.className();
qm.parameterType = jdbcType.getIntValue();
// The parameter can be signed if it is a NUMERIC type (except bit or tinyint).
qm.isSigned = ((SSType.Category.NUMERIC == ssType.category) && (SSType.BIT != ssType) && (SSType.TINYINT != ssType));
queryMetaMap.put(paramOrdinal, qm);
}
}
catch (SQLException e) {
throw new SQLServerException(SQLServerException.getErrString("R_metaDataErrorForParameter"), e);
}
}
private void parseQueryMetaFor2008(ResultSet rsQueryMeta) throws SQLServerException {
ResultSetMetaData md;
try {
md = rsQueryMeta.getMetaData();
for (int i = 1; i <= md.getColumnCount(); i++) {
QueryMeta qm = new QueryMeta();
qm.parameterClassName = md.getColumnClassName(i);
qm.parameterType = md.getColumnType(i);
qm.parameterTypeName = md.getColumnTypeName(i);
qm.precision = md.getPrecision(i);
qm.scale = md.getScale(i);
qm.isNullable = md.isNullable(i);
qm.isSigned = md.isSigned(i);
queryMetaMap.put(i, qm);
}
}
catch (SQLException e) {
throw new SQLServerException(SQLServerException.getErrString("R_metaDataErrorForParameter"), e);
}
}
/**
* Escape parser, using the tokenizer tokenizes escaped strings properly e.g.[Table Name, ]
*
* @param st
* string tokenizer
* @param firstToken
* @returns the full token
*/
private String escapeParse(StringTokenizer st,
String firstToken) {
String nameFragment;
String fullName;
nameFragment = firstToken;
// skip spaces
while (nameFragment.equals(" ") && st.hasMoreTokens()) {
nameFragment = st.nextToken();
}
fullName = nameFragment;
if (nameFragment.charAt(0) == '[' && nameFragment.charAt(nameFragment.length() - 1) != ']') {
while (st.hasMoreTokens()) {
nameFragment = st.nextToken();
fullName = fullName.concat(nameFragment);
if (nameFragment.charAt(nameFragment.length() - 1) == ']') {
break;
}
}
}
fullName = fullName.trim();
return fullName;
}
private class MetaInfo {
String table;
String fields;
MetaInfo(String table,
String fields) {
this.table = table;
this.fields = fields;
}
}
/**
* Parse a SQL syntax.
*
* @param sql
* String
* @param sTableMarker
* the location of the table in the syntax
*/
private MetaInfo parseStatement(String sql,
String sTableMarker) {
StringTokenizer st = new StringTokenizer(sql, " ,\r\n", true);
/* Find the table */
String metaTable = null;
String metaFields = "";
while (st.hasMoreTokens()) {
String sToken = st.nextToken().trim();
if(sToken.contains("*/")){
sToken = removeCommentsInTheBeginning(sToken, 0, 0, "/*", "*/");
}
if (sToken.equalsIgnoreCase(sTableMarker)) {
if (st.hasMoreTokens()) {
metaTable = escapeParse(st, st.nextToken());
break;
}
}
}
if (null != metaTable) {
if (sTableMarker.equalsIgnoreCase("UPDATE"))
metaFields = parseColumns(sql, "SET"); // Get the set fields
else if (sTableMarker.equalsIgnoreCase("INTO")) // insert
metaFields = parseInsertColumns(sql, "("); // Get the value fields
else
metaFields = parseColumns(sql, "WHERE"); // Get the where fields
return new MetaInfo(metaTable, metaFields);
}
return null;
}
/**
* Parse a SQL syntax.
*
* @param sql
* the syntax
* @throws SQLServerException
*/
private MetaInfo parseStatement(String sql) throws SQLServerException {
StringTokenizer st = new StringTokenizer(sql, " ");
if (st.hasMoreTokens()) {
String sToken = st.nextToken().trim();
// filter out multiple line comments in the beginning of the query
if (sToken.contains("/*")) {
String sqlWithoutCommentsInBeginning = removeCommentsInTheBeginning(sql, 0, 0, "/*", "*/");
return parseStatement(sqlWithoutCommentsInBeginning);
}
// filter out single line comments in the beginning of the query
if (sToken.contains("--")) {
String sqlWithoutCommentsInBeginning = removeCommentsInTheBeginning(sql, 0, 0, "--", "\n");
return parseStatement(sqlWithoutCommentsInBeginning);
}
if (sToken.equalsIgnoreCase("INSERT"))
return parseStatement(sql, "INTO"); // INTO marks the table name
if (sToken.equalsIgnoreCase("UPDATE"))
return parseStatement(sql, "UPDATE");
if (sToken.equalsIgnoreCase("SELECT"))
return parseStatement(sql, "FROM");
if (sToken.equalsIgnoreCase("DELETE"))
return parseStatement(sql, "FROM");
}
return null;
}
private String removeCommentsInTheBeginning(String sql,
int startCommentMarkCount,
int endCommentMarkCount,
String startMark,
String endMark) {
int startCommentMarkIndex = sql.indexOf(startMark);
int endCommentMarkIndex = sql.indexOf(endMark);
if (-1 == startCommentMarkIndex) {
startCommentMarkIndex = Integer.MAX_VALUE;
}
if (-1 == endCommentMarkIndex) {
endCommentMarkIndex = Integer.MAX_VALUE;
}
// Base case. startCommentMarkCount is guaranteed to be bigger than 0 because the method is called when /* occurs
if (startCommentMarkCount == endCommentMarkCount) {
if (startCommentMarkCount != 0 && endCommentMarkCount != 0) {
return sql;
}
}
// filter out first start comment mark
if (startCommentMarkIndex < endCommentMarkIndex) {
String sqlWithoutCommentsInBeginning = sql.substring(startCommentMarkIndex + startMark.length());
return removeCommentsInTheBeginning(sqlWithoutCommentsInBeginning, ++startCommentMarkCount, endCommentMarkCount, startMark, endMark);
}
// filter out first end comment mark
else {
String sqlWithoutCommentsInBeginning = sql.substring(endCommentMarkIndex + endMark.length());
return removeCommentsInTheBeginning(sqlWithoutCommentsInBeginning, startCommentMarkCount, ++endCommentMarkCount, startMark, endMark);
}
}
String parseThreePartNames(String threeName) throws SQLServerException {
int noofitems = 0;
String procedureName = null;
String procedureOwner = null;
String procedureQualifier = null;
StringTokenizer st = new StringTokenizer(threeName, ".", true);
// parse left to right looking for three part name
// note the user can provide three part, two part or one part name
while (st.hasMoreTokens()) {
String sToken = st.nextToken();
String nextItem = escapeParse(st, sToken);
if (nextItem.equals(".") == false) {
switch (noofitems) {
case 2:
procedureQualifier = procedureOwner;
procedureOwner = procedureName;
procedureName = nextItem;
noofitems++;
break;
case 1:
procedureOwner = procedureName;
procedureName = nextItem;
noofitems++;
break;
case 0:
procedureName = nextItem;
noofitems++;
break;
default:
noofitems++;
break;
}
}
}
StringBuilder sb = new StringBuilder(100);
if (noofitems > 3 && 1 < noofitems)
SQLServerException.makeFromDriverError(con, stmtParent, SQLServerException.getErrString("R_noMetadata"), null, false);
switch (noofitems) {
case 3:
sb.append("@procedure_qualifier =");
sb.append(procedureQualifier);
sb.append(", ");
sb.append("@procedure_owner =");
sb.append(procedureOwner);
sb.append(", ");
sb.append("@procedure_name =");
sb.append(procedureName);
sb.append(", ");
break;
case 2:
sb.append("@procedure_owner =");
sb.append(procedureOwner);
sb.append(", ");
sb.append("@procedure_name =");
sb.append(procedureName);
sb.append(", ");
break;
case 1:
sb.append("@procedure_name =");
sb.append(procedureName);
sb.append(", ");
break;
default:
break;
}
return sb.toString();
}
private void checkClosed() throws SQLServerException {
stmtParent.checkClosed();
}
/**
* Create new parameter meta data.
*
* @param st
* the prepared statement
* @param sProcString
* the pricedure name
* @throws SQLServerException
*/
SQLServerParameterMetaData(SQLServerStatement st,
String sProcString) throws SQLServerException {
assert null != st;
stmtParent = st;
con = st.connection;
if (logger.isLoggable(java.util.logging.Level.FINE)) {
logger.fine(toString() + " created by (" + st.toString() + ")");
}
try {
// If the CallableStatement/PreparedStatement is a stored procedure call
// then we can extract metadata using sp_sproc_columns
if (null != st.procedureName) {
SQLServerStatement s = (SQLServerStatement) con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
String sProc = parseThreePartNames(st.procedureName);
if (con.isKatmaiOrLater())
rsProcedureMeta = s.executeQueryInternal("exec sp_sproc_columns_100 " + sProc + " @ODBCVer=3");
else
rsProcedureMeta = s.executeQueryInternal("exec sp_sproc_columns " + sProc + " @ODBCVer=3");
// Sixth is DATA_TYPE
rsProcedureMeta.getColumn(6).setFilter(new DataTypeFilter());
if (con.isKatmaiOrLater()) {
rsProcedureMeta.getColumn(8).setFilter(new ZeroFixupFilter());
rsProcedureMeta.getColumn(9).setFilter(new ZeroFixupFilter());
rsProcedureMeta.getColumn(17).setFilter(new ZeroFixupFilter());
}
}
// Otherwise we just have a parameterized statement.
// if SQL server version is 2012 and above use stored
// procedure "sp_describe_undeclared_parameters" to retrieve parameter meta data
// if SQL server version is 2008, then use FMTONLY
else {
queryMetaMap = new HashMap<Integer, QueryMeta>();
if (con.getServerMajorVersion() >= SQL_SERVER_2012_VERSION) {
// new implementation for SQL verser 2012 and above
String preparedSQL = con.replaceParameterMarkers(((SQLServerPreparedStatement) stmtParent).userSQL,
((SQLServerPreparedStatement) stmtParent).inOutParam, ((SQLServerPreparedStatement) stmtParent).bReturnValueSyntax);
SQLServerCallableStatement cstmt = (SQLServerCallableStatement) con.prepareCall("exec sp_describe_undeclared_parameters ?");
cstmt.setNString(1, preparedSQL);
parseQueryMeta(cstmt.executeQueryInternal());
cstmt.close();
}
else {
// old implementation for SQL server 2008
MetaInfo metaInfo = parseStatement(sProcString);
if (null == metaInfo) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_cantIdentifyTableMetadata"));
Object[] msgArgs = {sProcString};
SQLServerException.makeFromDriverError(con, stmtParent, form.format(msgArgs), null, false);
}
if (metaInfo.fields.length() <= 0)
return;
Statement stmt = con.createStatement();
String sCom = "sp_executesql N'SET FMTONLY ON SELECT " + metaInfo.fields + " FROM " + metaInfo.table + " WHERE 1 = 2'";
ResultSet rs = stmt.executeQuery(sCom);
parseQueryMetaFor2008(rs);
stmt.close();
rs.close();
}
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
}
catch(StringIndexOutOfBoundsException e){
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
}
}
public boolean isWrapperFor(Class<?> iface) throws SQLException {
DriverJDBCVersion.checkSupportsJDBC4();
boolean f = iface.isInstance(this);
return f;
}
public <T> T unwrap(Class<T> iface) throws SQLException {
DriverJDBCVersion.checkSupportsJDBC4();
T t;
try {
t = iface.cast(this);
}
catch (ClassCastException e) {
throw new SQLServerException(e.getMessage(), e);
}
return t;
}
/* L2 */ private void verifyParameterPosition(int param) throws SQLServerException {
boolean bFound = false;
try {
if (((SQLServerPreparedStatement) stmtParent).bReturnValueSyntax && isTVP) {
bFound = rsProcedureMeta.absolute(param);
}
else {
bFound = rsProcedureMeta.absolute(param + 1); // Note row 1 is the 'return value' meta data
}
}
catch (SQLException e) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_metaDataErrorForParameter"));
Object[] msgArgs = {new Integer(param)};
SQLServerException.makeFromDriverError(con, stmtParent, form.format(msgArgs) + " " + e.toString(), null, false);
}
if (!bFound) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_invalidParameterNumber"));
Object[] msgArgs = {new Integer(param)};
SQLServerException.makeFromDriverError(con, stmtParent, form.format(msgArgs), null, false);
}
}
/* L2 */ private void checkParam(int n) throws SQLServerException {
if (!queryMetaMap.containsKey(n)) {
SQLServerException.makeFromDriverError(con, stmtParent, SQLServerException.getErrString("R_noMetadata"), null, false);
}
}
/* L2 */ public String getParameterClassName(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).parameterClassName;
}
else {
verifyParameterPosition(param);
JDBCType jdbcType = JDBCType.of(rsProcedureMeta.getShort("DATA_TYPE"));
return jdbcType.className();
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return null;
}
}
/* L2 */ public int getParameterCount() throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement
return queryMetaMap.size();
}
else {
rsProcedureMeta.last();
int nCount = rsProcedureMeta.getRow() - 1;
if (nCount < 0)
nCount = 0;
return nCount;
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public int getParameterMode(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
checkParam(param);
// if it is not a stored proc, the param can only be input.
return parameterModeIn;
}
else {
verifyParameterPosition(param);
int n = rsProcedureMeta.getInt("COLUMN_TYPE");
switch (n) {
case 1:
return parameterModeIn;
case 2:
return parameterModeOut;
default:
return parameterModeUnknown;
}
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public int getParameterType(int param) throws SQLServerException {
checkClosed();
int parameterType;
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
parameterType = queryMetaMap.get(param).parameterType;
}
else {
verifyParameterPosition(param);
parameterType = rsProcedureMeta.getShort("DATA_TYPE");
}
switch (parameterType) {
case microsoft.sql.Types.DATETIME:
case microsoft.sql.Types.SMALLDATETIME:
parameterType = SSType.DATETIME2.getJDBCType().asJavaSqlType();
break;
case microsoft.sql.Types.MONEY:
case microsoft.sql.Types.SMALLMONEY:
parameterType = SSType.DECIMAL.getJDBCType().asJavaSqlType();
break;
case microsoft.sql.Types.GUID:
parameterType = SSType.CHAR.getJDBCType().asJavaSqlType();
break;
}
return parameterType;
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public String getParameterTypeName(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).parameterTypeName;
}
else {
verifyParameterPosition(param);
return rsProcedureMeta.getString("TYPE_NAME");
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return null;
}
}
/* L2 */ public int getPrecision(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).precision;
}
else {
verifyParameterPosition(param);
int nPrec = rsProcedureMeta.getInt("PRECISION");
return nPrec;
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public int getScale(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).scale;
}
else {
verifyParameterPosition(param);
int nScale = rsProcedureMeta.getInt("SCALE");
return nScale;
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public int isNullable(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).isNullable;
}
else {
verifyParameterPosition(param);
int nNull = rsProcedureMeta.getInt("NULLABLE");
if (nNull == 1)
return parameterNullable;
if (nNull == 0)
return parameterNoNulls;
return parameterNullableUnknown;
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/**
* Verify a supplied parameter index is valid
*
* @param param
* the param index
* @throws SQLServerException
* when an error occurs
* @return boolean
*/
/* L2 */ public boolean isSigned(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).isSigned;
}
else {
verifyParameterPosition(param);
return JDBCType.of(rsProcedureMeta.getShort("DATA_TYPE")).isSigned();
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return false;
}
}
String getTVPSchemaFromStoredProcedure(int param) throws SQLServerException {
checkClosed();
verifyParameterPosition(param);
return rsProcedureMeta.getString("SS_TYPE_SCHEMA_NAME");
}
}
| src/main/java/com/microsoft/sqlserver/jdbc/SQLServerParameterMetaData.java | /*
* Microsoft JDBC Driver for SQL Server
*
* Copyright(c) Microsoft Corporation All rights reserved.
*
* This program is made available under the terms of the MIT License. See the LICENSE file in the project root for more information.
*/
package com.microsoft.sqlserver.jdbc;
import java.sql.ParameterMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* SQLServerParameterMetaData provides JDBC 3.0 meta data for prepared statement parameters.
*
* The API javadoc for JDBC API methods that this class implements are not repeated here. Please see Sun's JDBC API interfaces javadoc for those
* details.
*
* Prepared statements are executed with SET FMT ONLY to retrieve column meta data Callable statements : sp_sp_sproc_columns is called to retrieve
* names and meta data for the procedures params.
*/
public final class SQLServerParameterMetaData implements ParameterMetaData {
private final static int SQL_SERVER_2012_VERSION = 11;
private final SQLServerStatement stmtParent;
private SQLServerConnection con;
/* Used for callable statement meta data */
private Statement stmtCall;
private SQLServerResultSet rsProcedureMeta;
static final private java.util.logging.Logger logger = java.util.logging.Logger
.getLogger("com.microsoft.sqlserver.jdbc.internals.SQLServerParameterMetaData");
static private final AtomicInteger baseID = new AtomicInteger(0); // Unique id generator for each instance (used for logging).
final private String traceID = " SQLServerParameterMetaData:" + nextInstanceID();
boolean isTVP = false;
// Returns unique id for each instance.
private static int nextInstanceID() {
return baseID.incrementAndGet();
}
/**
* This is a helper function to provide an ID string suitable for tracing.
*
* @return traceID string
*/
final public String toString() {
return traceID;
}
/**
* Parse the columns in a column set.
*
* @param columnSet
* the list of columns
* @param columnStartToken
* the token that prfixes the column set
*/
/* L2 */ private String parseColumns(String columnSet,
String columnStartToken) {
StringTokenizer st = new StringTokenizer(columnSet, " =?<>!", true);
final int START = 0;
final int PARAMNAME = 1;
final int PARAMVALUE = 2;
int nState = 0;
String sLastField = null;
StringBuilder sb = new StringBuilder();
while (st.hasMoreTokens()) {
String sToken = st.nextToken();
if (sToken.equalsIgnoreCase(columnStartToken)) {
nState = PARAMNAME;
continue;
}
if (nState == START)
continue;
if ((sToken.charAt(0) == '=') || sToken.equalsIgnoreCase("is") || (sToken.charAt(0) == '<') || (sToken.charAt(0) == '>')
|| sToken.equalsIgnoreCase("like") || sToken.equalsIgnoreCase("not") || sToken.equalsIgnoreCase("in")
|| (sToken.charAt(0) == '!')) {
nState = PARAMVALUE;
continue;
}
if (sToken.charAt(0) == '?' && sLastField != null) {
if (sb.length() != 0) {
sb.append(", ");
}
sb.append(sLastField);
nState = PARAMNAME;
sLastField = null;
continue;
}
if (nState == PARAMNAME) {
// space get the next token.
if (sToken.equals(" "))
continue;
String paramN = escapeParse(st, sToken);
if (paramN.length() > 0) {
sLastField = paramN;
}
}
}
return sb.toString();
}
/**
* Parse the column set in an insert syntax.
*
* @param sql
* the sql syntax
* @param columnMarker
* the token that denotes the start of the column set
*/
/* L2 */ private String parseInsertColumns(String sql,
String columnMarker) {
StringTokenizer st = new StringTokenizer(sql, " (),", true);
int nState = 0;
String sLastField = null;
StringBuilder sb = new StringBuilder();
while (st.hasMoreTokens()) {
String sToken = st.nextToken();
if (sToken.equalsIgnoreCase(columnMarker)) {
nState = 1;
continue;
}
if (nState == 0)
continue;
if (sToken.charAt(0) == '=') {
nState = 2;
continue;
}
if ((sToken.charAt(0) == ',' || sToken.charAt(0) == ')' || sToken.charAt(0) == ' ') && sLastField != null) {
if (sb.length() != 0)
sb.append(", ");
sb.append(sLastField);
nState = 1;
sLastField = null;
}
if (sToken.charAt(0) == ')') {
nState = 0;
break;
}
if (nState == 1) {
if (sToken.trim().length() > 0) {
if (sToken.charAt(0) != ',')
sLastField = escapeParse(st, sToken);
}
}
}
return sb.toString();
}
/* Used for prepared statement meta data */
class QueryMeta {
String parameterClassName = null;
int parameterType = 0;
String parameterTypeName = null;
int precision = 0;
int scale = 0;
int isNullable = ParameterMetaData.parameterNullableUnknown;
boolean isSigned = false;
}
Map<Integer, QueryMeta> queryMetaMap = null;
/*
* Parse query metadata.
*/
private void parseQueryMeta(ResultSet rsQueryMeta) throws SQLServerException {
Pattern datatypePattern = Pattern.compile("(.*)\\((.*)(\\)|,(.*)\\))");
try {
while (rsQueryMeta.next()) {
QueryMeta qm = new QueryMeta();
SSType ssType = null;
int paramOrdinal = rsQueryMeta.getInt("parameter_ordinal");
String typename = rsQueryMeta.getString("suggested_system_type_name");
if (null == typename) {
typename = rsQueryMeta.getString("suggested_user_type_name");
SQLServerPreparedStatement pstmt = (SQLServerPreparedStatement) con
.prepareCall("select max_length, precision, scale, is_nullable from sys.assembly_types where name = ?");
pstmt.setNString(1, typename);
ResultSet assemblyRs = pstmt.executeQuery();
if (assemblyRs.next()) {
qm.parameterTypeName = typename;
qm.precision = assemblyRs.getInt("max_length");
qm.scale = assemblyRs.getInt("scale");
ssType = SSType.UDT;
}
}
else {
qm.precision = rsQueryMeta.getInt("suggested_precision");
qm.scale = rsQueryMeta.getInt("suggested_scale");
Matcher matcher = datatypePattern.matcher(typename);
if (matcher.matches()) {
// the datatype has some precision/scale defined explicitly.
ssType = SSType.of(matcher.group(1));
if (typename.equalsIgnoreCase("varchar(max)") || typename.equalsIgnoreCase("varbinary(max)")) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE;
}
else if (typename.equalsIgnoreCase("nvarchar(max)")) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE / 2;
}
else if (SSType.Category.CHARACTER == ssType.category || SSType.Category.BINARY == ssType.category
|| SSType.Category.NCHARACTER == ssType.category) {
try {
// For character/binary data types "suggested_precision" is 0. So get the precision from the type itself.
qm.precision = Integer.parseInt(matcher.group(2));
}
catch (NumberFormatException e) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_metaDataErrorForParameter"));
Object[] msgArgs = {new Integer(paramOrdinal)};
SQLServerException.makeFromDriverError(con, stmtParent, form.format(msgArgs) + " " + e.toString(), null, false);
}
}
}
else
ssType = SSType.of(typename);
// For float and real types suggested_precision returns the number of bits, not digits.
if (SSType.FLOAT == ssType) {
// https://msdn.microsoft.com/en-CA/library/ms173773.aspx
// real is float(24) and is 7 digits. Float is 15 digits.
qm.precision = 15;
}
else if (SSType.REAL == ssType) {
qm.precision = 7;
}
else if (SSType.TEXT == ssType) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE;
}
else if (SSType.NTEXT == ssType) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE / 2;
}
else if (SSType.IMAGE == ssType) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE;
}
else if (SSType.GUID == ssType) {
qm.precision = SQLServerDatabaseMetaData.uniqueidentifierSize;
}
else if (SSType.TIMESTAMP == ssType) {
qm.precision = 8;
}
else if (SSType.XML == ssType) {
qm.precision = SQLServerDatabaseMetaData.MAXLOBSIZE / 2;
}
qm.parameterTypeName = ssType.toString();
}
// Check if ssType is null. Was caught by static analysis.
if (null == ssType) {
throw new SQLServerException(SQLServerException.getErrString("R_metaDataErrorForParameter"), null);
}
JDBCType jdbcType = ssType.getJDBCType();
qm.parameterClassName = jdbcType.className();
qm.parameterType = jdbcType.getIntValue();
// The parameter can be signed if it is a NUMERIC type (except bit or tinyint).
qm.isSigned = ((SSType.Category.NUMERIC == ssType.category) && (SSType.BIT != ssType) && (SSType.TINYINT != ssType));
queryMetaMap.put(paramOrdinal, qm);
}
}
catch (SQLException e) {
throw new SQLServerException(SQLServerException.getErrString("R_metaDataErrorForParameter"), e);
}
}
private void parseQueryMetaFor2008(ResultSet rsQueryMeta) throws SQLServerException {
ResultSetMetaData md;
try {
md = rsQueryMeta.getMetaData();
for (int i = 1; i <= md.getColumnCount(); i++) {
QueryMeta qm = new QueryMeta();
qm.parameterClassName = md.getColumnClassName(i);
qm.parameterType = md.getColumnType(i);
qm.parameterTypeName = md.getColumnTypeName(i);
qm.precision = md.getPrecision(i);
qm.scale = md.getScale(i);
qm.isNullable = md.isNullable(i);
qm.isSigned = md.isSigned(i);
queryMetaMap.put(i, qm);
}
}
catch (SQLException e) {
throw new SQLServerException(SQLServerException.getErrString("R_metaDataErrorForParameter"), e);
}
}
/**
* Escape parser, using the tokenizer tokenizes escaped strings properly e.g.[Table Name, ]
*
* @param st
* string tokenizer
* @param firstToken
* @returns the full token
*/
private String escapeParse(StringTokenizer st,
String firstToken) {
String nameFragment;
String fullName;
nameFragment = firstToken;
// skip spaces
while (nameFragment.equals(" ") && st.hasMoreTokens()) {
nameFragment = st.nextToken();
}
fullName = nameFragment;
if (nameFragment.charAt(0) == '[' && nameFragment.charAt(nameFragment.length() - 1) != ']') {
while (st.hasMoreTokens()) {
nameFragment = st.nextToken();
fullName = fullName.concat(nameFragment);
if (nameFragment.charAt(nameFragment.length() - 1) == ']') {
break;
}
}
}
fullName = fullName.trim();
return fullName;
}
private class MetaInfo {
String table;
String fields;
MetaInfo(String table,
String fields) {
this.table = table;
this.fields = fields;
}
}
/**
* Parse a SQL syntax.
*
* @param sql
* String
* @param sTableMarker
* the location of the table in the syntax
*/
private MetaInfo parseStatement(String sql,
String sTableMarker) {
StringTokenizer st = new StringTokenizer(sql, " ,\r\n", true);
/* Find the table */
String metaTable = null;
String metaFields = "";
while (st.hasMoreTokens()) {
String sToken = st.nextToken().trim();
if(sToken.contains("*/")){
sToken = removeCommentsInTheBeginning(sToken, 0, 0, "/*", "*/");
}
if (sToken.equalsIgnoreCase(sTableMarker)) {
if (st.hasMoreTokens()) {
metaTable = escapeParse(st, st.nextToken());
break;
}
}
}
if (null != metaTable) {
if (sTableMarker.equalsIgnoreCase("UPDATE"))
metaFields = parseColumns(sql, "SET"); // Get the set fields
else if (sTableMarker.equalsIgnoreCase("INTO")) // insert
metaFields = parseInsertColumns(sql, "("); // Get the value fields
else
metaFields = parseColumns(sql, "WHERE"); // Get the where fields
return new MetaInfo(metaTable, metaFields);
}
return null;
}
/**
* Parse a SQL syntax.
*
* @param sql
* the syntax
* @throws SQLServerException
*/
private MetaInfo parseStatement(String sql) throws SQLServerException {
StringTokenizer st = new StringTokenizer(sql, " ");
if (st.hasMoreTokens()) {
String sToken = st.nextToken().trim();
// filter out multiple line comments in the beginning of the query
if (sToken.contains("/*")) {
String sqlWithoutCommentsInBeginning = removeCommentsInTheBeginning(sql, 0, 0, "/*", "*/");
return parseStatement(sqlWithoutCommentsInBeginning);
}
// filter out single line comments in the beginning of the query
if (sToken.contains("--")) {
String sqlWithoutCommentsInBeginning = removeCommentsInTheBeginning(sql, 0, 0, "--", "\n");
return parseStatement(sqlWithoutCommentsInBeginning);
}
if (sToken.equalsIgnoreCase("INSERT"))
return parseStatement(sql, "INTO"); // INTO marks the table name
if (sToken.equalsIgnoreCase("UPDATE"))
return parseStatement(sql, "UPDATE");
if (sToken.equalsIgnoreCase("SELECT"))
return parseStatement(sql, "FROM");
if (sToken.equalsIgnoreCase("DELETE"))
return parseStatement(sql, "FROM");
}
return null;
}
private String removeCommentsInTheBeginning(String sql,
int startCommentMarkCount,
int endCommentMarkCount,
String startMark,
String endMark) {
int startCommentMarkIndex = sql.indexOf(startMark);
int endCommentMarkIndex = sql.indexOf(endMark);
if (-1 == startCommentMarkIndex) {
startCommentMarkIndex = Integer.MAX_VALUE;
}
if (-1 == endCommentMarkIndex) {
endCommentMarkIndex = Integer.MAX_VALUE;
}
// Base case. startCommentMarkCount is guaranteed to be bigger than 0 because the method is called when /* occurs
if (startCommentMarkCount == endCommentMarkCount) {
if (startCommentMarkCount != 0 && endCommentMarkCount != 0) {
return sql;
}
}
// filter out first start comment mark
if (startCommentMarkIndex < endCommentMarkIndex) {
String sqlWithoutCommentsInBeginning = sql.substring(startCommentMarkIndex + startMark.length());
return removeCommentsInTheBeginning(sqlWithoutCommentsInBeginning, ++startCommentMarkCount, endCommentMarkCount, startMark, endMark);
}
// filter out first end comment mark
else {
String sqlWithoutCommentsInBeginning = sql.substring(endCommentMarkIndex + endMark.length());
return removeCommentsInTheBeginning(sqlWithoutCommentsInBeginning, startCommentMarkCount, ++endCommentMarkCount, startMark, endMark);
}
}
String parseThreePartNames(String threeName) throws SQLServerException {
int noofitems = 0;
String procedureName = null;
String procedureOwner = null;
String procedureQualifier = null;
StringTokenizer st = new StringTokenizer(threeName, ".", true);
// parse left to right looking for three part name
// note the user can provide three part, two part or one part name
while (st.hasMoreTokens()) {
String sToken = st.nextToken();
String nextItem = escapeParse(st, sToken);
if (nextItem.equals(".") == false) {
switch (noofitems) {
case 2:
procedureQualifier = procedureOwner;
procedureOwner = procedureName;
procedureName = nextItem;
noofitems++;
break;
case 1:
procedureOwner = procedureName;
procedureName = nextItem;
noofitems++;
break;
case 0:
procedureName = nextItem;
noofitems++;
break;
default:
noofitems++;
break;
}
}
}
StringBuilder sb = new StringBuilder(100);
if (noofitems > 3 && 1 < noofitems)
SQLServerException.makeFromDriverError(con, stmtParent, SQLServerException.getErrString("R_noMetadata"), null, false);
switch (noofitems) {
case 3:
sb.append("@procedure_qualifier =");
sb.append(procedureQualifier);
sb.append(", ");
sb.append("@procedure_owner =");
sb.append(procedureOwner);
sb.append(", ");
sb.append("@procedure_name =");
sb.append(procedureName);
sb.append(", ");
break;
case 2:
sb.append("@procedure_owner =");
sb.append(procedureOwner);
sb.append(", ");
sb.append("@procedure_name =");
sb.append(procedureName);
sb.append(", ");
break;
case 1:
sb.append("@procedure_name =");
sb.append(procedureName);
sb.append(", ");
break;
default:
break;
}
return sb.toString();
}
private void checkClosed() throws SQLServerException {
stmtParent.checkClosed();
}
/**
* Create new parameter meta data.
*
* @param st
* the prepared statement
* @param sProcString
* the pricedure name
* @throws SQLServerException
*/
SQLServerParameterMetaData(SQLServerStatement st,
String sProcString) throws SQLServerException {
assert null != st;
stmtParent = st;
con = st.connection;
if (logger.isLoggable(java.util.logging.Level.FINE)) {
logger.fine(toString() + " created by (" + st.toString() + ")");
}
try {
// If the CallableStatement/PreparedStatement is a stored procedure call
// then we can extract metadata using sp_sproc_columns
if (null != st.procedureName) {
SQLServerStatement s = (SQLServerStatement) con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
String sProc = parseThreePartNames(st.procedureName);
if (con.isKatmaiOrLater())
rsProcedureMeta = s.executeQueryInternal("exec sp_sproc_columns_100 " + sProc + " @ODBCVer=3");
else
rsProcedureMeta = s.executeQueryInternal("exec sp_sproc_columns " + sProc + " @ODBCVer=3");
// Sixth is DATA_TYPE
rsProcedureMeta.getColumn(6).setFilter(new DataTypeFilter());
if (con.isKatmaiOrLater()) {
rsProcedureMeta.getColumn(8).setFilter(new ZeroFixupFilter());
rsProcedureMeta.getColumn(9).setFilter(new ZeroFixupFilter());
rsProcedureMeta.getColumn(17).setFilter(new ZeroFixupFilter());
}
}
// Otherwise we just have a parameterized statement.
// if SQL server version is 2012 and above use stored
// procedure "sp_describe_undeclared_parameters" to retrieve parameter meta data
// if SQL server version is 2008, then use FMTONLY
else {
queryMetaMap = new HashMap<Integer, QueryMeta>();
if (con.getServerMajorVersion() >= SQL_SERVER_2012_VERSION) {
// new implementation for SQL verser 2012 and above
String preparedSQL = con.replaceParameterMarkers(((SQLServerPreparedStatement) stmtParent).userSQL,
((SQLServerPreparedStatement) stmtParent).inOutParam, ((SQLServerPreparedStatement) stmtParent).bReturnValueSyntax);
SQLServerCallableStatement cstmt = (SQLServerCallableStatement) con.prepareCall("exec sp_describe_undeclared_parameters ?");
cstmt.setNString(1, preparedSQL);
parseQueryMeta(cstmt.executeQueryInternal());
cstmt.close();
}
else {
// old implementation for SQL server 2008
MetaInfo metaInfo = parseStatement(sProcString);
if (null == metaInfo) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_cantIdentifyTableMetadata"));
Object[] msgArgs = {sProcString};
SQLServerException.makeFromDriverError(con, stmtParent, form.format(msgArgs), null, false);
}
if (metaInfo.fields.length() <= 0)
return;
Statement stmt = con.createStatement();
String sCom = "sp_executesql N'SET FMTONLY ON SELECT " + metaInfo.fields + " FROM " + metaInfo.table + " WHERE 1 = 2'";
ResultSet rs = stmt.executeQuery(sCom);
parseQueryMetaFor2008(rs);
stmt.close();
rs.close();
}
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
}
}
public boolean isWrapperFor(Class<?> iface) throws SQLException {
DriverJDBCVersion.checkSupportsJDBC4();
boolean f = iface.isInstance(this);
return f;
}
public <T> T unwrap(Class<T> iface) throws SQLException {
DriverJDBCVersion.checkSupportsJDBC4();
T t;
try {
t = iface.cast(this);
}
catch (ClassCastException e) {
throw new SQLServerException(e.getMessage(), e);
}
return t;
}
/* L2 */ private void verifyParameterPosition(int param) throws SQLServerException {
boolean bFound = false;
try {
if (((SQLServerPreparedStatement) stmtParent).bReturnValueSyntax && isTVP) {
bFound = rsProcedureMeta.absolute(param);
}
else {
bFound = rsProcedureMeta.absolute(param + 1); // Note row 1 is the 'return value' meta data
}
}
catch (SQLException e) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_metaDataErrorForParameter"));
Object[] msgArgs = {new Integer(param)};
SQLServerException.makeFromDriverError(con, stmtParent, form.format(msgArgs) + " " + e.toString(), null, false);
}
if (!bFound) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_invalidParameterNumber"));
Object[] msgArgs = {new Integer(param)};
SQLServerException.makeFromDriverError(con, stmtParent, form.format(msgArgs), null, false);
}
}
/* L2 */ private void checkParam(int n) throws SQLServerException {
if (!queryMetaMap.containsKey(n)) {
SQLServerException.makeFromDriverError(con, stmtParent, SQLServerException.getErrString("R_noMetadata"), null, false);
}
}
/* L2 */ public String getParameterClassName(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).parameterClassName;
}
else {
verifyParameterPosition(param);
JDBCType jdbcType = JDBCType.of(rsProcedureMeta.getShort("DATA_TYPE"));
return jdbcType.className();
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return null;
}
}
/* L2 */ public int getParameterCount() throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement
return queryMetaMap.size();
}
else {
rsProcedureMeta.last();
int nCount = rsProcedureMeta.getRow() - 1;
if (nCount < 0)
nCount = 0;
return nCount;
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public int getParameterMode(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
checkParam(param);
// if it is not a stored proc, the param can only be input.
return parameterModeIn;
}
else {
verifyParameterPosition(param);
int n = rsProcedureMeta.getInt("COLUMN_TYPE");
switch (n) {
case 1:
return parameterModeIn;
case 2:
return parameterModeOut;
default:
return parameterModeUnknown;
}
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public int getParameterType(int param) throws SQLServerException {
checkClosed();
int parameterType;
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
parameterType = queryMetaMap.get(param).parameterType;
}
else {
verifyParameterPosition(param);
parameterType = rsProcedureMeta.getShort("DATA_TYPE");
}
switch (parameterType) {
case microsoft.sql.Types.DATETIME:
case microsoft.sql.Types.SMALLDATETIME:
parameterType = SSType.DATETIME2.getJDBCType().asJavaSqlType();
break;
case microsoft.sql.Types.MONEY:
case microsoft.sql.Types.SMALLMONEY:
parameterType = SSType.DECIMAL.getJDBCType().asJavaSqlType();
break;
case microsoft.sql.Types.GUID:
parameterType = SSType.CHAR.getJDBCType().asJavaSqlType();
break;
}
return parameterType;
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public String getParameterTypeName(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).parameterTypeName;
}
else {
verifyParameterPosition(param);
return rsProcedureMeta.getString("TYPE_NAME");
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return null;
}
}
/* L2 */ public int getPrecision(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).precision;
}
else {
verifyParameterPosition(param);
int nPrec = rsProcedureMeta.getInt("PRECISION");
return nPrec;
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public int getScale(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).scale;
}
else {
verifyParameterPosition(param);
int nScale = rsProcedureMeta.getInt("SCALE");
return nScale;
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/* L2 */ public int isNullable(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).isNullable;
}
else {
verifyParameterPosition(param);
int nNull = rsProcedureMeta.getInt("NULLABLE");
if (nNull == 1)
return parameterNullable;
if (nNull == 0)
return parameterNoNulls;
return parameterNullableUnknown;
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return 0;
}
}
/**
* Verify a supplied parameter index is valid
*
* @param param
* the param index
* @throws SQLServerException
* when an error occurs
* @return boolean
*/
/* L2 */ public boolean isSigned(int param) throws SQLServerException {
checkClosed();
try {
if (rsProcedureMeta == null) {
// PreparedStatement.
checkParam(param);
return queryMetaMap.get(param).isSigned;
}
else {
verifyParameterPosition(param);
return JDBCType.of(rsProcedureMeta.getShort("DATA_TYPE")).isSigned();
}
}
catch (SQLException e) {
SQLServerException.makeFromDriverError(con, stmtParent, e.toString(), null, false);
return false;
}
}
String getTVPSchemaFromStoredProcedure(int param) throws SQLServerException {
checkClosed();
verifyParameterPosition(param);
return rsProcedureMeta.getString("SS_TYPE_SCHEMA_NAME");
}
}
| wrap StringIndexOutOfBoundsException with SQLServerException
| src/main/java/com/microsoft/sqlserver/jdbc/SQLServerParameterMetaData.java | wrap StringIndexOutOfBoundsException with SQLServerException |
|
Java | mit | 0b55b5075595f8e03108248329f5718c21ab97ae | 0 | RaimondKempees/fast-classpath-scanner,lukehutch/fast-classpath-scanner,classgraph/classgraph,lukehutch/fast-classpath-scanner | /*
* This file is part of FastClasspathScanner.
*
* Author: Luke Hutchison
*
* Hosted at: https://github.com/lukehutch/fast-classpath-scanner
*
* --
*
* The MIT License (MIT)
*
* Copyright (c) 2016 Luke Hutchison
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
* EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.github.lukehutch.fastclasspathscanner.utils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class JarUtils {
private static final List<String> JRE_PATHS = new ArrayList<>();
private static String RT_JAR_PATH = null;
private static String getProperty(final String propName) {
try {
return System.getProperty(propName);
} catch (final SecurityException e) {
return null;
}
}
private static void addJREPath(final File dir, final Set<String> jrePathsSet) {
if (ClasspathUtils.canRead(dir) && dir.isDirectory()) {
String path = dir.getPath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
String jrePath = FastPathResolver.resolve("", path);
if (!jrePath.isEmpty()) {
jrePathsSet.add(jrePath);
}
try {
String canonicalPath = dir.getCanonicalPath();
if (!canonicalPath.endsWith(File.separator)) {
canonicalPath += File.separator;
}
String jreCanonicalPath = FastPathResolver.resolve("", canonicalPath);
if (!jreCanonicalPath.equals(jrePath) && !jreCanonicalPath.isEmpty()) {
jrePathsSet.add(jreCanonicalPath);
}
} catch (IOException | SecurityException e) {
}
}
}
// Find JRE jar dirs
static {
final Set<String> jrePathsSet = new HashSet<>();
final String javaHome = getProperty("java.home");
if (javaHome != null && !javaHome.isEmpty()) {
final File javaHomeFile = new File(javaHome);
addJREPath(javaHomeFile, jrePathsSet);
final File libFile = new File(javaHomeFile, "lib");
addJREPath(libFile, jrePathsSet);
final File extFile = new File(libFile, "ext");
addJREPath(extFile, jrePathsSet);
final File rtJarFile = new File(libFile, "rt.jar");
if (ClasspathUtils.canRead(rtJarFile)) {
RT_JAR_PATH = rtJarFile.getPath();
}
if (javaHomeFile.getName().equals("jre")) {
// Handle jre/../lib/tools.jar
final File parent = javaHomeFile.getParentFile();
if (parent != null) {
final File parentLibFile = new File(parent, "lib");
addJREPath(parentLibFile, jrePathsSet);
}
}
}
final String javaExtDirs = getProperty("java.ext.dirs");
if (javaExtDirs != null) {
for (final String javaExtDir : javaExtDirs.split(File.pathSeparator)) {
if (!javaExtDir.isEmpty()) {
final File javaExtDirFile = new File(javaExtDir);
addJREPath(javaExtDirFile, jrePathsSet);
}
}
}
JRE_PATHS.addAll(jrePathsSet);
Collections.sort(JRE_PATHS);
}
/** Get the path of rt.jar */
public static String getRtJarPath() {
return RT_JAR_PATH;
}
/** Log the Java version and the JRE paths that were found. */
public static void logJavaInfo(final LogNode log) {
if (log != null) {
log.log("Operating system: " + getProperty("os.name") + " " + getProperty("os.version") + " "
+ getProperty("os.arch"));
log.log("Java version: " + getProperty("java.version") + " (" + getProperty("java.vendor") + ")");
final LogNode javaLog = log.log("JRE paths:");
for (final String jrePath : JRE_PATHS) {
javaLog.log(jrePath);
}
if (RT_JAR_PATH != null) {
javaLog.log(RT_JAR_PATH);
}
}
}
/** Determine whether a given jarfile is in a JRE system directory (jre, jre/lib, jre/lib/ext, etc.). */
public static boolean isJREJar(final String filePath, final LogNode log) {
for (final String jrePathPrefix : JRE_PATHS) {
if (filePath.startsWith(jrePathPrefix)) {
return true;
}
}
return false;
}
/** Returns true if the path ends with a jarfile extension, ignoring case. */
public static boolean isJar(final String path) {
final int len = path.length();
return path.regionMatches(true, len - 4, ".jar", 0, 4) //
|| path.regionMatches(true, len - 4, ".zip", 0, 4) //
|| path.regionMatches(true, len - 4, ".war", 0, 4) //
|| path.regionMatches(true, len - 4, ".car", 0, 4) //
|| path.regionMatches(true, len - 6, ".wsjar", 0, 6);
}
/** Returns the leafname of a path. */
public static String leafName(final String path) {
final int lastSlashIdx = File.separatorChar == '/' ? path.lastIndexOf('/')
: Math.max(path.lastIndexOf('/'), path.lastIndexOf(File.separatorChar));
// In case of temp files (for jars extracted from within jars), remove the temp filename prefix
int sepIdx = path.indexOf(NestedJarHandler.TEMP_FILENAME_SEPARATOR);
if (sepIdx >= 0) {
sepIdx += NestedJarHandler.TEMP_FILENAME_SEPARATOR.length() - 1;
}
final int maxIdx = Math.max(lastSlashIdx, sepIdx);
return maxIdx < 0 ? path : path.substring(maxIdx + 1);
}
}
| src/main/java/io/github/lukehutch/fastclasspathscanner/utils/JarUtils.java | /*
* This file is part of FastClasspathScanner.
*
* Author: Luke Hutchison
*
* Hosted at: https://github.com/lukehutch/fast-classpath-scanner
*
* --
*
* The MIT License (MIT)
*
* Copyright (c) 2016 Luke Hutchison
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
* EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.github.lukehutch.fastclasspathscanner.utils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class JarUtils {
private static final List<String> JRE_PATHS = new ArrayList<>();
private static String RT_JAR_PATH = null;
private static String getProperty(final String propName) {
try {
return System.getProperty(propName);
} catch (final SecurityException e) {
return null;
}
}
private static void addJREPath(final File dir, final Set<String> jrePathsSet) {
if (ClasspathUtils.canRead(dir) && dir.isDirectory()) {
String path = dir.getPath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
jrePathsSet.add(FastPathResolver.resolve("/", path));
try {
String canonicalPath = dir.getCanonicalPath();
if (!canonicalPath.endsWith(File.separator)) {
canonicalPath += File.separator;
}
if (!canonicalPath.equals(path)) {
jrePathsSet.add(FastPathResolver.resolve("", canonicalPath));
}
} catch (IOException | SecurityException e) {
}
}
}
// Find JRE jar dirs
static {
final Set<String> jrePathsSet = new HashSet<>();
final String javaHome = getProperty("java.home");
if (javaHome != null && !javaHome.isEmpty()) {
final File javaHomeFile = new File(javaHome);
addJREPath(javaHomeFile, jrePathsSet);
final File libFile = new File(javaHomeFile, "lib");
addJREPath(libFile, jrePathsSet);
final File extFile = new File(libFile, "ext");
addJREPath(extFile, jrePathsSet);
final File rtJarFile = new File(libFile, "rt.jar");
if (ClasspathUtils.canRead(rtJarFile)) {
RT_JAR_PATH = rtJarFile.getPath();
}
if (javaHomeFile.getName().equals("jre")) {
final File parent = javaHomeFile.getParentFile();
if (parent != null) {
final File parentLibFile = new File(parent, "lib");
addJREPath(parentLibFile, jrePathsSet);
}
}
}
final String javaExtDirs = getProperty("java.ext.dirs");
if (javaExtDirs != null) {
for (final String javaExtDir : javaExtDirs.split(File.pathSeparator)) {
if (!javaExtDir.isEmpty()) {
final File javaExtDirFile = new File(javaExtDir);
addJREPath(javaExtDirFile, jrePathsSet);
}
}
}
JRE_PATHS.addAll(jrePathsSet);
Collections.sort(JRE_PATHS);
}
/** Get the path of rt.jar */
public static String getRtJarPath() {
return RT_JAR_PATH;
}
/** Log the Java version and the JRE paths that were found. */
public static void logJavaInfo(final LogNode log) {
if (log != null) {
log.log("Operating system: " + getProperty("os.name") + " " + getProperty("os.version") + " "
+ getProperty("os.arch"));
log.log("Java version: " + getProperty("java.version") + " (" + getProperty("java.vendor") + ")");
final LogNode javaLog = log.log("JRE paths:");
for (final String jrePath : JRE_PATHS) {
javaLog.log(jrePath);
}
if (RT_JAR_PATH != null) {
javaLog.log(RT_JAR_PATH);
}
}
}
/** Determine whether a given jarfile is in a JRE system directory (jre, jre/lib, jre/lib/ext, etc.). */
public static boolean isJREJar(final String filePath, final LogNode log) {
for (final String jrePathPrefix : JRE_PATHS) {
if (filePath.startsWith(jrePathPrefix)) {
return true;
}
}
return false;
}
/** Returns true if the path ends with a jarfile extension, ignoring case. */
public static boolean isJar(final String path) {
final int len = path.length();
return path.regionMatches(true, len - 4, ".jar", 0, 4) //
|| path.regionMatches(true, len - 4, ".zip", 0, 4) //
|| path.regionMatches(true, len - 4, ".war", 0, 4) //
|| path.regionMatches(true, len - 4, ".car", 0, 4) //
|| path.regionMatches(true, len - 6, ".wsjar", 0, 6);
}
/** Returns the leafname of a path. */
public static String leafName(final String path) {
final int lastSlashIdx = File.separatorChar == '/' ? path.lastIndexOf('/')
: Math.max(path.lastIndexOf('/'), path.lastIndexOf(File.separatorChar));
// In case of temp files (for jars extracted from within jars), remove the temp filename prefix
int sepIdx = path.indexOf(NestedJarHandler.TEMP_FILENAME_SEPARATOR);
if (sepIdx >= 0) {
sepIdx += NestedJarHandler.TEMP_FILENAME_SEPARATOR.length() - 1;
}
final int maxIdx = Math.max(lastSlashIdx, sepIdx);
return maxIdx < 0 ? path : path.substring(maxIdx + 1);
}
}
| Increase robustness of JRE dir path handling | src/main/java/io/github/lukehutch/fastclasspathscanner/utils/JarUtils.java | Increase robustness of JRE dir path handling |
|
Java | epl-1.0 | f1e58adac7e84d16f4b776496107beadef2a090b | 0 | maeste/ironjacamar,ironjacamar/ironjacamar,jesperpedersen/ironjacamar,ironjacamar/ironjacamar,jandsu/ironjacamar,jpkrohling/ironjacamar,rarguello/ironjacamar,johnaoahra80/ironjacamar,darranl/ironjacamar,ironjacamar/ironjacamar | /*
* JBoss, Home of Professional Open Source.
* Copyright 2008, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.jca.common.api.metadata.ds;
import org.jboss.jca.common.api.metadata.ValidatableMetadata;
import org.jboss.jca.common.api.metadata.common.Extension;
import java.util.HashMap;
import java.util.Map;
/**
*
* A Validation.
*
* @author <a href="[email protected]">Stefano Maestri</a>
*
*/
public interface Validation extends org.jboss.jca.common.api.metadata.common.CommonValidation, ValidatableMetadata
{
/**
* Get the validConnectionChecker
*
* @return the validConnectionChecker
*/
public Extension getValidConnectionChecker();
/**
* Get the checkValidConnectionSql.
*
* @return the checkValidConnectionSql.
*/
public String getCheckValidConnectionSql();
/**
* Get the validateOnMatch.
*
* @return the validateOnMatch.
*/
public Boolean isValidateOnMatch();
/**
* Get the staleConnectionChecker
*
* @return the staleConnectionChecker
*/
public Extension getStaleConnectionChecker();
/**
* Get the exceptionSorter
*
* @return the exceptionSorter
*/
public Extension getExceptionSorter();
/**
*
* A Tag.
*
* @author <a href="[email protected]">Stefano Maestri</a>
*
*/
public enum Tag
{
/** always first
*
*/
UNKNOWN(null),
/**
* validConnectionCheckerClassName tag
*/
VALIDCONNECTIONCHECKER("valid-connection-checker"),
/**
* checkValidConnectionSql tag
*/
CHECKVALIDCONNECTIONSQL("check-valid-connection-sql"),
/**
* validateOnMatch tag
*/
VALIDATEONMATCH("validate-on-match"),
/**
* backgroundValidation tag
*/
BACKGROUNDVALIDATION("background-validation"),
/**
* backgroundValidationMillis tag
*/
BACKGROUNDVALIDATIONMILLIS("background-validation-millis"),
/**
* useFastFail tag
*/
USEFASTFAIL("use-fast-fail"),
/**
* staleConnectionCheckerClassName tag
*/
STALECONNECTIONCHECKER("stale-connection-checker"),
/**
* exceptionSorterClassName tag
*/
EXCEPTIONSORTER("exception-sorter");
private final String name;
/**
*
* Create a new Tag.
*
* @param name a name
*/
Tag(final String name)
{
this.name = name;
}
/**
* Get the local name of this element.
*
* @return the local name
*/
public String getLocalName()
{
return name;
}
private static final Map<String, Tag> MAP;
static
{
final Map<String, Tag> map = new HashMap<String, Tag>();
for (Tag element : values())
{
final String name = element.getLocalName();
if (name != null)
map.put(name, element);
}
MAP = map;
}
/**
*
* Static method to get enum instance given localName XsdString
*
* @param localName a XsdString used as localname (typically tag name as defined in xsd)
* @return the enum instance
*/
public static Tag forName(String localName)
{
final Tag element = MAP.get(localName);
return element == null ? UNKNOWN : element;
}
}
}
| common/src/main/java/org/jboss/jca/common/api/metadata/ds/Validation.java | /*
* JBoss, Home of Professional Open Source.
* Copyright 2008, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.jca.common.api.metadata.ds;
import org.jboss.jca.common.api.metadata.ValidatableMetadata;
import org.jboss.jca.common.api.metadata.common.Extension;
import java.util.HashMap;
import java.util.Map;
/**
*
* A Validation.
*
* @author <a href="[email protected]">Stefano Maestri</a>
*
*/
public interface Validation extends org.jboss.jca.common.api.metadata.common.CommonValidation, ValidatableMetadata
{
/**
* Get the validConnectionChecker
*
* @return the validConnectionChecker
*/
public Extension getValidConnectionChecker();
/**
* Get the checkValidConnectionSql.
*
* @return the checkValidConnectionSql.
*/
public String getCheckValidConnectionSql();
/**
* Get the validateOnMatch.
*
* @return the validateOnMatch.
*/
public Boolean isValidateOnMatch();
/**
* Get the staleConnectionChecker
*
* @return the staleConnectionChecker
*/
public Extension getStaleConnectionChecker();
/**
* Get the exceptionSorter
*
* @return the exceptionSorter
*/
public Extension getExceptionSorter();
/**
*
* A Tag.
*
* @author <a href="[email protected]">Stefano Maestri</a>
*
*/
public enum Tag
{
/** always first
*
*/
UNKNOWN(null),
/**
* validConnectionCheckerClassName tag
*/
VALIDCONNECTIONCHECKER("valid-connection-checker"),
/**
* checkValidConnectionSql tag
*/
CHECKVALIDCONNECTIONSQL("check-valid-connection-sql"),
/**
* validateOnMatch tag
*/
VALIDATEONMATCH("validate-on-match"),
/**
* backgroundValidation tag
*/
BACKGROUNDVALIDATION("background-validation"),
/**
* backgroundValidationMillis tag
*/
BACKGROUNDVALIDATIONMILLIS("background-validation-millis"),
/**
* useFastFail tag
*/
USEFASTFAIL("useFastFail"),
/**
* staleConnectionCheckerClassName tag
*/
STALECONNECTIONCHECKER("stale-connection-checker"),
/**
* exceptionSorterClassName tag
*/
EXCEPTIONSORTER("exception-sorter");
private final String name;
/**
*
* Create a new Tag.
*
* @param name a name
*/
Tag(final String name)
{
this.name = name;
}
/**
* Get the local name of this element.
*
* @return the local name
*/
public String getLocalName()
{
return name;
}
private static final Map<String, Tag> MAP;
static
{
final Map<String, Tag> map = new HashMap<String, Tag>();
for (Tag element : values())
{
final String name = element.getLocalName();
if (name != null)
map.put(name, element);
}
MAP = map;
}
/**
*
* Static method to get enum instance given localName XsdString
*
* @param localName a XsdString used as localname (typically tag name as defined in xsd)
* @return the enum instance
*/
public static Tag forName(String localName)
{
final Tag element = MAP.get(localName);
return element == null ? UNKNOWN : element;
}
}
}
| [JBJCA-643] DS: use-fast-fail
| common/src/main/java/org/jboss/jca/common/api/metadata/ds/Validation.java | [JBJCA-643] DS: use-fast-fail |
|
Java | agpl-3.0 | fe711532f964701c0ce7512e987e39eff82a0607 | 0 | Audiveris/audiveris,Audiveris/audiveris | //-----------------------------------------------------------------------//
// //
// S c o r e C o n s t a n t s //
// //
// Copyright (C) Herve Bitteur 2000-2006. All rights reserved. //
// This software is released under the terms of the GNU General Public //
// License. Please contact the author at [email protected] //
// to report bugs & suggestions. //
//-----------------------------------------------------------------------//
package omr.score;
import omr.constant.Constant;
import omr.constant.ConstantSet;
/**
* Class <code>ScoreConstants</code> gathers all constants related to the
* display of a score.
*
* @author Hervé Bitteur
* @version $Id$
*/
public class ScoreConstants
{
//~ Static variables/initializers -------------------------------------
private static final Constants constants = new Constants();
/** Height in pixels of a stave display */
public static final int STAVE_AREA_HEIGHT = constants.staveAreaheight.getValue();
/** Height in pixels above/under a stave display */
public static final int STAVE_MARGIN_HEIGHT = constants.staveMarginHeight.getValue();
/** Horizontal offset in pixels of the score origin */
public static final int SCORE_INIT_X = constants.scoreInitX.getValue();
/** Vertical offset in pixels of the score origin */
public static final int SCORE_INIT_Y = constants.scoreInitY.getValue();
/** Horizontal gutter in pixels between two systems */
public static final int INTER_SYSTEM = constants.interSystem.getValue();
/** Vertical distance in pixels between two lines of a standard staff : {@value} */
public static final int INTER_LINE = 16;
/** Horizontal gutter in pixels between two pages */
public static final int INTER_PAGE = constants.interPage.getValue();
/** Number of lines in a staff */
public static final int LINE_NB = constants.lineNb.getValue();
/** Height in pixels of one staff */
public static final int STAFF_HEIGHT = (LINE_NB - 1) * INTER_LINE;
/** Used to code fractions with an integer value, with a resolution of 1/{@value} */
public static final int BASE = 1024;
//~ Constructors ------------------------------------------------------
//----------------//
// ScoreConstants // Not to be instantiated
//----------------//
private ScoreConstants ()
{
}
//~ Classes -----------------------------------------------------------
//-----------//
// Constants //
//-----------//
private static class Constants
extends ConstantSet
{
Constant.Integer staveAreaheight = new Constant.Integer
(100,
"Height in pixels of a stave display");
Constant.Integer staveMarginHeight = new Constant.Integer
(40,
"Height in pixels above/under a stave display : ");
Constant.Integer scoreInitX = new Constant.Integer
(200,
"Horizontal offset in pixels of the score origin");
Constant.Integer scoreInitY = new Constant.Integer
(150,
"Vertical offset in pixels of the score origin");
Constant.Integer interSystem = new Constant.Integer
(100,
"Horizontal gutter in pixels between two systems");
Constant.Integer interPage = new Constant.Integer
(200,
"Horizontal gutter in pixels between two pages");
Constant.Integer lineNb = new Constant.Integer
(5,
"Number of lines in a staff");
Constants ()
{
initialize();
}
}
}
| src/main/omr/score/ScoreConstants.java | //-----------------------------------------------------------------------//
// //
// S c o r e C o n s t a n t s //
// //
// Copyright (C) Herve Bitteur 2000-2006. All rights reserved. //
// This software is released under the terms of the GNU General Public //
// License. Please contact the author at [email protected] //
// to report bugs & suggestions. //
//-----------------------------------------------------------------------//
package omr.score;
import omr.constant.Constant;
import omr.constant.ConstantSet;
/**
* Class <code>ScoreConstants</code> gathers all constants related to the
* display of a score.
*
* @author Hervé Bitteur
* @version $Id$
*/
public class ScoreConstants
{
//~ Static variables/initializers -------------------------------------
private static final Constants constants = new Constants();
/** Height in pixels of a stave display */
public static final int STAVE_AREA_HEIGHT = constants.staveAreaheight.getValue();
/** Height in pixels above/under a stave display */
public static final int STAVE_MARGIN_HEIGHT = constants.staveMarginHeight.getValue();
/** Horizontal offset in pixels of the score origin */
public static final int SCORE_INIT_X = constants.scoreInitX.getValue();
/** Vertical offset in pixels of the score origin */
public static final int SCORE_INIT_Y = constants.scoreInitY.getValue();
/** Horizontal gutter in pixels between two systems */
public static final int INTER_SYSTEM = constants.interSystem.getValue();
/** Vertical distance in pixels between two lines of a staff : {@value} */
public static final int INTER_LINE = 16;
/** Horizontal gutter in pixels between two pages */
public static final int INTER_PAGE = constants.interPage.getValue();
/** Number of lines in a staff */
public static final int LINE_NB = constants.lineNb.getValue();
/** Height in pixels of one staff */
public static final int STAFF_HEIGHT = (LINE_NB - 1) * INTER_LINE;
/** Used to code fractions with an integer value, with a resolution of 1/{@value} */
public static final int BASE = 1024;
//~ Constructors ------------------------------------------------------
//----------------//
// ScoreConstants // Not to be instantiated
//----------------//
private ScoreConstants ()
{
}
//~ Classes -----------------------------------------------------------
//-----------//
// Constants //
//-----------//
private static class Constants
extends ConstantSet
{
Constant.Integer staveAreaheight = new Constant.Integer
(100,
"Height in pixels of a stave display");
Constant.Integer staveMarginHeight = new Constant.Integer
(40,
"Height in pixels above/under a stave display : ");
Constant.Integer scoreInitX = new Constant.Integer
(200,
"Horizontal offset in pixels of the score origin");
Constant.Integer scoreInitY = new Constant.Integer
(150,
"Vertical offset in pixels of the score origin");
Constant.Integer interSystem = new Constant.Integer
(100,
"Horizontal gutter in pixels between two systems");
Constant.Integer interPage = new Constant.Integer
(200,
"Horizontal gutter in pixels between two pages");
Constant.Integer lineNb = new Constant.Integer
(5,
"Number of lines in a staff");
Constants ()
{
initialize();
}
}
}
| Better comment
| src/main/omr/score/ScoreConstants.java | Better comment |
|
Java | agpl-3.0 | a099e5ccdd39d3ae2311f2b5aac78ecf23ab88a2 | 0 | paulmartel/voltdb,simonzhangsm/voltdb,migue/voltdb,deerwalk/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,deerwalk/voltdb,VoltDB/voltdb,migue/voltdb,paulmartel/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,paulmartel/voltdb,paulmartel/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,migue/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,paulmartel/voltdb,paulmartel/voltdb,migue/voltdb,migue/voltdb,VoltDB/voltdb,migue/voltdb,migue/voltdb,deerwalk/voltdb,VoltDB/voltdb,migue/voltdb,deerwalk/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,deerwalk/voltdb,VoltDB/voltdb | /* This file is part of VoltDB.
* Copyright (C) 2008-2016 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.voltdb.planner;
import java.util.List;
import org.voltdb.expressions.AbstractExpression;
import org.voltdb.plannodes.AbstractJoinPlanNode;
import org.voltdb.plannodes.AbstractPlanNode;
import org.voltdb.plannodes.AbstractScanPlanNode;
import org.voltdb.plannodes.IndexScanPlanNode;
import org.voltdb.plannodes.NestLoopPlanNode;
import org.voltdb.plannodes.PlanNodeTree;
import org.voltdb.plannodes.ReceivePlanNode;
import org.voltdb.types.ExpressionType;
import org.voltdb.types.IndexLookupType;
import org.voltdb.types.JoinType;
import org.voltdb.types.PlanNodeType;
public class TestMultipleOuterJoinPlans extends PlannerTestCase {
private void verifyJoinNode(AbstractPlanNode n, PlanNodeType nodeType, JoinType joinType,
ExpressionType preJoinExpressionType, ExpressionType joinExpressionType, ExpressionType whereExpressionType,
PlanNodeType outerNodeType, PlanNodeType innerNodeType,
String outerTableAlias, String innerTableAlias) {
assertEquals(nodeType, n.getPlanNodeType());
AbstractJoinPlanNode jn = (AbstractJoinPlanNode) n;
assertEquals(joinType, jn.getJoinType());
if (preJoinExpressionType != null) {
assertEquals(preJoinExpressionType, jn.getPreJoinPredicate().getExpressionType());
} else {
assertNull(jn.getPreJoinPredicate());
}
if (joinExpressionType != null) {
assertEquals(joinExpressionType, jn.getJoinPredicate().getExpressionType());
} else {
assertNull(jn.getJoinPredicate());
}
if (whereExpressionType != null) {
assertEquals(whereExpressionType, jn.getWherePredicate().getExpressionType());
} else {
assertNull(jn.getWherePredicate());
}
assertEquals(outerNodeType, jn.getChild(0).getPlanNodeType());
if (outerTableAlias != null) {
assertEquals(outerTableAlias, ((AbstractScanPlanNode) jn.getChild(0)).getTargetTableAlias());
}
if (nodeType == PlanNodeType.NESTLOOP) {
assertEquals(innerNodeType, jn.getChild(1).getPlanNodeType());
}
if (innerTableAlias != null) {
if (nodeType == PlanNodeType.NESTLOOP) {
assertEquals(innerTableAlias, ((AbstractScanPlanNode) jn.getChild(1)).getTargetTableAlias());
} else {
IndexScanPlanNode sn = (IndexScanPlanNode) jn.getInlinePlanNode(PlanNodeType.INDEXSCAN);
assertEquals(innerTableAlias, sn.getTargetTableAlias());
}
}
}
private void verifyJoinNode(AbstractPlanNode n, PlanNodeType nodeType, JoinType joinType,
ExpressionType preJoinExpressionType, ExpressionType joinExpressionType, ExpressionType whereExpressionType,
PlanNodeType outerNodeType, PlanNodeType innerNodeType) {
verifyJoinNode(n, nodeType, joinType, preJoinExpressionType, joinExpressionType, whereExpressionType, outerNodeType, innerNodeType, null, null);
}
private void verifyIndexScanNode(AbstractPlanNode n, IndexLookupType lookupType, ExpressionType predExpressionType) {
assertNotNull(n);
assertEquals(PlanNodeType.INDEXSCAN, n.getPlanNodeType());
IndexScanPlanNode isn = (IndexScanPlanNode) n;
assertEquals(lookupType, isn.getLookupType());
if (predExpressionType != null) {
assertEquals(predExpressionType, isn.getPredicate().getExpressionType());
} else {
assertNull(isn.getPredicate());
}
}
public void testInnerOuterJoin() {
AbstractPlanNode pn;
AbstractPlanNode n;
pn = compile("select * FROM R1 INNER JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
pn = compile("select * FROM R1, R2 LEFT JOIN R3 ON R3.C = R2.C WHERE R1.A = R2.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
}
public void testOuterOuterJoin() {
AbstractPlanNode pn;
AbstractPlanNode n;
pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A RIGHT JOIN R3 ON R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
pn = compile("select * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A RIGHT JOIN R3 ON R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1");
pn = compile("select * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1");
pn = compile("select * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R1.C WHERE R1.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
}
public void testMultiTableJoinExpressions() {
AbstractPlanNode pn = compile("select * FROM R1, R2 LEFT JOIN R3 ON R3.A = R2.C OR R3.A = R1.A WHERE R1.C = R2.C");
AbstractPlanNode n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.CONJUNCTION_OR, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
NestLoopPlanNode nlj = (NestLoopPlanNode) n;
AbstractExpression p = nlj.getJoinPredicate();
assertEquals(ExpressionType.CONJUNCTION_OR, p.getExpressionType());
}
public void testPushDownExprJoin() {
AbstractPlanNode pn;
AbstractPlanNode n;
// R3.A > 0 gets pushed down all the way to the R3 scan node and used as an index
pn = compile("select * FROM R3, R2 LEFT JOIN R1 ON R1.C = R2.C WHERE R3.C = R2.C AND R3.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R1");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.INDEXSCAN, PlanNodeType.SEQSCAN, "R3", "R2");
// R3.A > 0 is now outer join expression and must stay at the LEFT join
pn = compile("select * FROM R3, R2 LEFT JOIN R1 ON R1.C = R2.C AND R3.A > 0 WHERE R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R1");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R3", "R2");
pn = compile("select * FROM R3 JOIN R2 ON R3.C = R2.C RIGHT JOIN R1 ON R1.C = R2.C AND R3.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R1", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R3", "R2");
// R3.A > 0 gets pushed down all the way to the R3 scan node and used as an index
pn = compile("select * FROM R2, R3 LEFT JOIN R1 ON R1.C = R2.C WHERE R3.C = R2.C AND R3.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R1");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R2", "R3");
// R3.A = R2.C gets pushed down to the R2, R3 join node scan node and used as an index
pn = compile("select * FROM R2, R3 LEFT JOIN R1 ON R1.C = R2.C WHERE R3.A = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.SEQSCAN, null, "R1");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, null, "R2", "R3");
}
public void testOuterSimplificationJoin() {
// NULL_rejection simplification is the first transformation -
// before the LEFT-to-RIGHT and the WHERE expressions push down
AbstractPlanNode pn;
AbstractPlanNode n;
pn = compile("select * FROM R1, R3 RIGHT JOIN R2 ON R1.A = R2.A WHERE R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// The second R3.C = R2.C join condition is NULL-rejecting for the outer table
// from the first LEFT join - can't simplify (not the inner table)
pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
// The second R3.C = R2.C join condition is NULL-rejecting for the first LEFT join
pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A RIGHT JOIN R3 ON R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// The R3.A = R1.A join condition is NULL-rejecting for the FULL join OUTER (R1) table
// simplifying it to R1 LEFT JOIN R2
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A " +
"RIGHT JOIN R3 ON R3.A = R1.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
// The R3.A = R2.A join condition is NULL-rejecting for the FULL join INNER (R2) table
// simplifying it to R1 RIGHT JOIN R2 which gets converted to R2 LEFT JOIN R1
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A " +
"RIGHT JOIN R3 ON R3.A = R2.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1");
// The R1-R2 FULL join is an outer node in the top LEFT join - not simplified
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A " +
"LEFT JOIN R3 ON R3.A = R2.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
// The R3.A = R2.A AND R3.A = R1.A join condition is NULL-rejecting for the FULL join
// OUTER (R1) and INNER (R1) tables simplifying it to R1 JOIN R2
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A " +
"RIGHT JOIN R3 ON R3.A = R2.A AND R3.A = R1.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
// HSQL doubles the join expression for the first join. Once it's corrected the join expression type
// should be ExpressionType.COMPARE_EQUAL
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// The R4 FULL join is an outer node in the R5 FULL join and can not be simplified by the R1.A = R5.A ON expression
// R1 RIGHT JOIN R2 ON R1.A = R2.A R1 JOIN R3 ON R1.A = R3.A
// JOIN R3 ON R1.A = R3.A ==> JOIN R2 ON R1.A = R2.A
// FULL JOIN R4 ON R1.A = R4.A FULL JOIN R4 ON R1.A = R4.A
// FULL JOIN R5 ON R1.A = R5.A FULL JOIN R5 ON R1.A = R5.A
pn = compile("select * FROM " +
"R1 RIGHT JOIN R2 ON R1.A = R2.A " +
"JOIN R3 ON R1.A = R3.A " +
"FULL JOIN R4 ON R1.A = R4.A " +
"FULL JOIN R5 ON R1.A = R5.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN, null, "R5");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R4");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.SEQSCAN, null, "R2");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3");
// The R1-R2 LEFT JOIN belongs to the outer node of the top FULL join
// and can't be simplified by the R2.A = R4.A ON join condition
pn = compile("select * FROM " +
"R1 LEFT JOIN R2 ON R1.A = R2.A " +
"JOIN R3 ON R1.A = R3.A " +
"FULL JOIN R4 ON R2.A = R4.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN, null, "R4");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
// The R2.A > 0 WHERE expression is NULL rejecting for all outer joins
pn = compile("select * FROM " +
"R1 LEFT JOIN R2 ON R1.A = R2.A " +
"JOIN R3 ON R1.A = R3.A " +
"FULL JOIN R4 ON R1.A = R4.A WHERE R2.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R4");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.SEQSCAN, null, "R2");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3");
// The R1-R2 RIGHT join is an outer node in the top FULL join - not simplified
pn = compile("SELECT * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R3.A = R1.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1");
// The R1-R2 LEFT join is an outer node in the top FULL join - not simplified
pn = compile("SELECT * FROM R1 LEFT JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R3.A = R2.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
}
public void testMultitableDistributedJoin() {
List<AbstractPlanNode> lpn;
AbstractPlanNode n;
// One distributed table
lpn = compileToFragments("select * FROM R3,R1 LEFT JOIN P2 ON R3.A = P2.A WHERE R3.A=R1.A ");
assertTrue(lpn.size() == 2);
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.RECEIVE);
// R3.A and P2.A have an index. P2,R1 is NLIJ/inlined IndexScan because it's an inner join even P2 is distributed
lpn = compileToFragments("select * FROM P2,R1 LEFT JOIN R3 ON R3.A = P2.A WHERE P2.A=R1.A ");
assertTrue(lpn.size() == 2);
n = lpn.get(0).getChild(0).getChild(0);
assertTrue(n instanceof ReceivePlanNode);
n = lpn.get(1).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN);
// R3.A has an index. R3,P2 is NLJ because it's an outer join and P2 is distributed
lpn = compileToFragments("select * FROM R3,R1 LEFT JOIN P2 ON R3.A = P2.A WHERE R3.A=R1.A ");
assertTrue(lpn.size() == 2);
// to debug */ System.out.println("DEBUG 0.0: " + lpn.get(0).toExplainPlanString());
// to debug */ System.out.println("DEBUG 0.1: " + lpn.get(1).toExplainPlanString());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.RECEIVE);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN);
n = lpn.get(1).getChild(0);
// For determinism reason
assertTrue(n instanceof IndexScanPlanNode);
// R3.A has an index. P2,R1 is NLJ because P2 is distributed and it's an outer join
lpn = compileToFragments("select * FROM R1 LEFT JOIN P2 ON R1.A = P2.A, R3 WHERE R1.A=R3.A ");
assertTrue(lpn.size() == 2);
// to debug */ System.out.println("DEBUG 1.0: " + lpn.get(0).toExplainPlanString());
// to debug */ System.out.println("DEBUG 1.1: " + lpn.get(1).toExplainPlanString());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE);
n = lpn.get(1).getChild(0);
// For determinism reason
assertTrue(n instanceof IndexScanPlanNode);
// Two distributed table
lpn = compileToFragments("select * FROM R3,P1 LEFT JOIN P2 ON R3.A = P2.A WHERE R3.A=P1.A ");
assertTrue(lpn.size() == 2);
n = lpn.get(0).getChild(0).getChild(0);
assertTrue(n instanceof ReceivePlanNode);
n = lpn.get(1).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN);
}
public void testFullJoinExpressions() {
AbstractPlanNode pn;
AbstractPlanNode n;
// WHERE outer and inner expressions stay at the FULL NLJ node
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A WHERE R2.C IS NULL AND R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.CONJUNCTION_AND, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// WHERE outer and inner expressions stay at the FULL NLJ node
// The outer node is a join itself
pn = compile("select * FROM " +
"R1 JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R3.C = R2.C WHERE R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.OPERATOR_IS_NULL, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
// WHERE outer-inner expressions stay at the FULL NLJ node
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A WHERE R2.C IS NULL OR R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.CONJUNCTION_OR, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// WHERE outer and inner expressions push down process stops at the FULL join (R1,R2) node -
// FULL join is itself an outer node
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R2.C WHERE R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.OPERATOR_IS_NULL, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// OUTER JOIN expression (R1.A > 0) is pre-predicate, inner and inner - outer expressions R3.C = R2.C AND R3.C < 0 are predicate
pn = compile("select * FROM R1 JOIN R2 ON R1.A = R2.C FULL JOIN R3 ON R3.C = R2.C AND R1.A > 0 AND R3.C < 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
// NLJ JOIN outer expression is pre-join expression, NLJ JOIN inner expression together with
// JOIN inner-outer one are part of the join predicate
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A AND R1.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// NLJ JOIN outer expression is pre-join expression, NLJ JOIN inner expression together with
// JOIN inner-outer one are part of the join predicate
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A AND R1.C < 0 AND R2.C > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_LESSTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// NLJ JOIN outer expression is pre-join expression, NLJ JOIN inner expression together with
// JOIN inner-outer one are part of the join predicate
pn = compile("select * FROM " +
"R1 JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R1.A = R3.C AND R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.OPERATOR_IS_NULL, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
}
public void testFullIndexJoinExpressions() {
AbstractPlanNode pn;
AbstractPlanNode n;
// Simple FULL NLIJ
pn = compile("select * FROM " +
"R3 FULL JOIN R1 ON R3.A = R1.A WHERE R3.C IS NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, ExpressionType.OPERATOR_IS_NULL, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN);
String json = (new PlanNodeTree(pn)).toJSONString();
// Same Join as above but using FULL OUTER JOIN syntax
pn = compile("select * FROM " +
"R3 FULL OUTER JOIN R1 ON R3.A = R1.A WHERE R3.C IS NULL");
String json1 = (new PlanNodeTree(pn)).toJSONString();
assertEquals(json, json1);
// FULL NLJ. R3.A is an index column but R3.A > 0 expression is used as a PREDICATE only
pn = compile("select * FROM " +
"R1 FULL JOIN R3 ON R3.C = R1.A AND R3.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R3");
// FULL NLIJ, inner join R3.A > 0 is added as a post-predicate to the inline Index scan
pn = compile("select * FROM R1 FULL JOIN R3 ON R3.A = R1.A AND R3.A > 55");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, ExpressionType.COMPARE_GREATERTHAN);
// FULL NLIJ, inner join L.A > 0 is added as a pre-predicate to the NLIJ
pn = compile("select * FROM R3 L FULL JOIN R3 R ON L.A = R.A AND L.A > 55");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "L", "R");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, null);
// FULL NLIJ, inner-outer join R3.c = R1.c is a post-predicate for the inline Index scan
pn = compile("select * FROM R1 FULL JOIN R3 ON R3.A = R1.A AND R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, ExpressionType.COMPARE_EQUAL);
// FULL NLIJ, outer join (R1, R2) expression R1.A > 0 is a pre-predicate
pn = compile("select * FROM R1 JOIN R2 ON R1.A = R2.C FULL JOIN R3 ON R3.A = R2.C AND R1.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, null);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
}
public void testDistributedFullJoin() {
List<AbstractPlanNode> lpn;
AbstractPlanNode n;
// FULL join on partition column
lpn = compileToFragments("select * FROM " +
"P1 FULL JOIN R2 ON P1.A = R2.A ");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// FULL join on partition column
lpn = compileToFragments("select * FROM " +
"R2 FULL JOIN P1 ON P1.A = R2.A ");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// FULL join on non-partition column
lpn = compileToFragments("select * FROM " +
"P1 FULL JOIN R2 ON P1.C = R2.A ");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// NLJ FULL join (R2, P2) on partition column R2.A > 0 is a pre-predicate, P2.A = R2.A AND P2.E < 0 are join predicate
// It can't be a NLIJ because P2 is partitioned - P2.A index is not used
lpn = compileToFragments("select * FROM " +
"P2 FULL JOIN R2 ON P2.A = R2.A AND R2.A > 0 AND P2.E < 0");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// NLJ FULL join (R2, P2) on partition column P2.E = R2.A AND P2.A > 0 are join predicate
// Inner join expression P2.A > 0 can't be used as index expression with NLJ
lpn = compileToFragments("select * FROM " +
"P2 FULL JOIN R2 ON P2.E = R2.A AND P2.A > 0");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// NLJ (R3, P2) on partition column P2.A. R3.A > 0 is a PRE_PREDICTAE
// NLIJ (P2,R3) on partition column P2.A using index R3.A is an invalid plan for a FULL join
lpn = compileToFragments("select * FROM " +
"P2 FULL JOIN R3 ON P2.A = R3.A AND R3.A > 0 AND P2.E < 0");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R3", null);
// FULL NLJ join of two partition tables on partition column
lpn = compileToFragments("select * FROM P1 FULL JOIN P4 ON P1.A = P4.A ");
assertEquals(2, lpn.size());
n = lpn.get(1).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "P1", "P4");
// FULL NLIJ (P1,P2) on partition column P2.A
lpn = compileToFragments("select * FROM P2 FULL JOIN P1 ON P1.A = P2.A AND P2.A > 0");
assertEquals(2, lpn.size());
n = lpn.get(1).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "P1", "P2");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, ExpressionType.COMPARE_GREATERTHAN);
// FULL join of two partition tables on non-partition column
failToCompile("select * FROM P1 FULL JOIN P4 ON P1.C = P4.A ",
"Join of multiple partitioned tables has insufficient join criteria");
}
@Override
protected void setUp() throws Exception {
setupSchema(TestJoinOrder.class.getResource("testplans-join-ddl.sql"), "testplansjoin", false);
}
}
| tests/frontend/org/voltdb/planner/TestMultipleOuterJoinPlans.java | /* This file is part of VoltDB.
* Copyright (C) 2008-2016 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.voltdb.planner;
import java.util.List;
import org.voltdb.expressions.AbstractExpression;
import org.voltdb.plannodes.AbstractJoinPlanNode;
import org.voltdb.plannodes.AbstractPlanNode;
import org.voltdb.plannodes.AbstractScanPlanNode;
import org.voltdb.plannodes.IndexScanPlanNode;
import org.voltdb.plannodes.NestLoopPlanNode;
import org.voltdb.plannodes.PlanNodeTree;
import org.voltdb.plannodes.ReceivePlanNode;
import org.voltdb.types.ExpressionType;
import org.voltdb.types.IndexLookupType;
import org.voltdb.types.JoinType;
import org.voltdb.types.PlanNodeType;
public class TestMultipleOuterJoinPlans extends PlannerTestCase {
private void verifyJoinNode(AbstractPlanNode n, PlanNodeType nodeType, JoinType joinType,
ExpressionType preJoinExpressionType, ExpressionType joinExpressionType, ExpressionType whereExpressionType,
PlanNodeType outerNodeType, PlanNodeType innerNodeType,
String outerTableAlias, String innerTableAlias) {
assertEquals(nodeType, n.getPlanNodeType());
AbstractJoinPlanNode jn = (AbstractJoinPlanNode) n;
assertEquals(joinType, jn.getJoinType());
if (preJoinExpressionType != null) {
assertEquals(preJoinExpressionType, jn.getPreJoinPredicate().getExpressionType());
} else {
assertNull(jn.getPreJoinPredicate());
}
if (joinExpressionType != null) {
assertEquals(joinExpressionType, jn.getJoinPredicate().getExpressionType());
} else {
assertNull(jn.getJoinPredicate());
}
if (whereExpressionType != null) {
assertEquals(whereExpressionType, jn.getWherePredicate().getExpressionType());
} else {
assertNull(jn.getWherePredicate());
}
assertEquals(outerNodeType, jn.getChild(0).getPlanNodeType());
if (outerTableAlias != null) {
assertEquals(outerTableAlias, ((AbstractScanPlanNode) jn.getChild(0)).getTargetTableAlias());
}
if (nodeType == PlanNodeType.NESTLOOP) {
assertEquals(innerNodeType, jn.getChild(1).getPlanNodeType());
}
if (innerTableAlias != null) {
if (nodeType == PlanNodeType.NESTLOOP) {
assertEquals(innerTableAlias, ((AbstractScanPlanNode) jn.getChild(1)).getTargetTableAlias());
} else {
IndexScanPlanNode sn = (IndexScanPlanNode) jn.getInlinePlanNode(PlanNodeType.INDEXSCAN);
assertEquals(innerTableAlias, sn.getTargetTableAlias());
}
}
}
private void verifyJoinNode(AbstractPlanNode n, PlanNodeType nodeType, JoinType joinType,
ExpressionType preJoinExpressionType, ExpressionType joinExpressionType, ExpressionType whereExpressionType,
PlanNodeType outerNodeType, PlanNodeType innerNodeType) {
verifyJoinNode(n, nodeType, joinType, preJoinExpressionType, joinExpressionType, whereExpressionType, outerNodeType, innerNodeType, null, null);
}
private void verifyIndexScanNode(AbstractPlanNode n, IndexLookupType lookupType, ExpressionType predExpressionType) {
assertNotNull(n);
assertEquals(PlanNodeType.INDEXSCAN, n.getPlanNodeType());
IndexScanPlanNode isn = (IndexScanPlanNode) n;
assertEquals(lookupType, isn.getLookupType());
if (predExpressionType != null) {
assertEquals(predExpressionType, isn.getPredicate().getExpressionType());
} else {
assertNull(isn.getPredicate());
}
}
public void testInnerOuterJoin() {
AbstractPlanNode pn;
AbstractPlanNode n;
pn = compile("select * FROM R1 INNER JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
pn = compile("select * FROM R1, R2 LEFT JOIN R3 ON R3.C = R2.C WHERE R1.A = R2.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
}
public void testOuterOuterJoin() {
AbstractPlanNode pn;
AbstractPlanNode n;
pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A RIGHT JOIN R3 ON R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
pn = compile("select * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A RIGHT JOIN R3 ON R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1");
pn = compile("select * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1");
pn = compile("select * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R1.C WHERE R1.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
}
public void testMultiTableJoinExpressions() {
AbstractPlanNode pn = compile("select * FROM R1, R2 LEFT JOIN R3 ON R3.A = R2.C OR R3.A = R1.A WHERE R1.C = R2.C");
AbstractPlanNode n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.CONJUNCTION_OR, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
NestLoopPlanNode nlj = (NestLoopPlanNode) n;
AbstractExpression p = nlj.getJoinPredicate();
assertEquals(ExpressionType.CONJUNCTION_OR, p.getExpressionType());
}
public void testPushDownExprJoin() {
AbstractPlanNode pn;
AbstractPlanNode n;
// R3.A > 0 gets pushed down all the way to the R3 scan node and used as an index
pn = compile("select * FROM R3, R2 LEFT JOIN R1 ON R1.C = R2.C WHERE R3.C = R2.C AND R3.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R1");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.INDEXSCAN, PlanNodeType.SEQSCAN, "R3", "R2");
// R3.A > 0 is now outer join expression and must stay at the LEFT join
pn = compile("select * FROM R3, R2 LEFT JOIN R1 ON R1.C = R2.C AND R3.A > 0 WHERE R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R1");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R3", "R2");
pn = compile("select * FROM R3 JOIN R2 ON R3.C = R2.C RIGHT JOIN R1 ON R1.C = R2.C AND R3.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R1", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R3", "R2");
// R3.A > 0 gets pushed down all the way to the R3 scan node and used as an index
pn = compile("select * FROM R2, R3 LEFT JOIN R1 ON R1.C = R2.C WHERE R3.C = R2.C AND R3.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R1");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R2", "R3");
// R3.A = R2.C gets pushed down to the R2, R3 join node scan node and used as an index
pn = compile("select * FROM R2, R3 LEFT JOIN R1 ON R1.C = R2.C WHERE R3.A = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.SEQSCAN, null, "R1");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, null, "R2", "R3");
}
public void testOuterSimplificationJoin() {
// NULL_rejection simplification is the first transformation -
// before the LEFT-to-RIGHT and the WHERE expressions push down
AbstractPlanNode pn;
AbstractPlanNode n;
pn = compile("select * FROM R1, R3 RIGHT JOIN R2 ON R1.A = R2.A WHERE R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// The second R3.C = R2.C join condition is NULL-rejecting for the outer table
// from the first LEFT join - can't simplify (not the inner table)
pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
// The second R3.C = R2.C join condition is NULL-rejecting for the first LEFT join
pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A RIGHT JOIN R3 ON R3.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// The R3.A = R1.A join condition is NULL-rejecting for the FULL join OUTER (R1) table
// simplifying it to R1 LEFT JOIN R2
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A " +
"RIGHT JOIN R3 ON R3.A = R1.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
// The R3.A = R2.A join condition is NULL-rejecting for the FULL join INNER (R2) table
// simplifying it to R1 RIGHT JOIN R2 which gets converted to R2 LEFT JOIN R1
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A " +
"RIGHT JOIN R3 ON R3.A = R2.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1");
// The R1-R2 FULL join is an outer node in the top LEFT join - not simplified
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A " +
"LEFT JOIN R3 ON R3.A = R2.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
// The R3.A = R2.A AND R3.A = R1.A join condition is NULL-rejecting for the FULL join
// OUTER (R1) and INNER (R1) tables simplifying it to R1 JOIN R2
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A " +
"RIGHT JOIN R3 ON R3.A = R2.A AND R3.A = R1.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null);
n = n.getChild(1);
// HSQL doubles the join expression for the first join. Once it's corrected the join expression type
// should be ExpressionType.COMPARE_EQUAL
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// The R4 FULL join is an outer node in the R5 FULL join and can not be simplified by the R1.A = R5.A ON expression
// R1 RIGHT JOIN R2 ON R1.A = R2.A R1 JOIN R3 ON R1.A = R3.A
// JOIN R3 ON R1.A = R3.A ==> JOIN R2 ON R1.A = R2.A
// FULL JOIN R4 ON R1.A = R4.A FULL JOIN R4 ON R1.A = R4.A
// FULL JOIN R5 ON R1.A = R5.A FULL JOIN R5 ON R1.A = R5.A
pn = compile("select * FROM " +
"R1 RIGHT JOIN R2 ON R1.A = R2.A " +
"JOIN R3 ON R1.A = R3.A " +
"FULL JOIN R4 ON R1.A = R4.A " +
"FULL JOIN R5 ON R1.A = R5.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN, null, "R5");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R4");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.SEQSCAN, null, "R2");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3");
// The R1-R2 RIGHT join is an outer node in the top FULL join - not simplified
pn = compile("SELECT * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R3.A = R1.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1");
// The R1-R2 LEFT join is an outer node in the top FULL join - not simplified
pn = compile("SELECT * FROM R1 LEFT JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R3.A = R2.A");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
}
public void testMultitableDistributedJoin() {
List<AbstractPlanNode> lpn;
AbstractPlanNode n;
// One distributed table
lpn = compileToFragments("select * FROM R3,R1 LEFT JOIN P2 ON R3.A = P2.A WHERE R3.A=R1.A ");
assertTrue(lpn.size() == 2);
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.RECEIVE);
// R3.A and P2.A have an index. P2,R1 is NLIJ/inlined IndexScan because it's an inner join even P2 is distributed
lpn = compileToFragments("select * FROM P2,R1 LEFT JOIN R3 ON R3.A = P2.A WHERE P2.A=R1.A ");
assertTrue(lpn.size() == 2);
n = lpn.get(0).getChild(0).getChild(0);
assertTrue(n instanceof ReceivePlanNode);
n = lpn.get(1).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN);
// R3.A has an index. R3,P2 is NLJ because it's an outer join and P2 is distributed
lpn = compileToFragments("select * FROM R3,R1 LEFT JOIN P2 ON R3.A = P2.A WHERE R3.A=R1.A ");
assertTrue(lpn.size() == 2);
// to debug */ System.out.println("DEBUG 0.0: " + lpn.get(0).toExplainPlanString());
// to debug */ System.out.println("DEBUG 0.1: " + lpn.get(1).toExplainPlanString());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.RECEIVE);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN);
n = lpn.get(1).getChild(0);
// For determinism reason
assertTrue(n instanceof IndexScanPlanNode);
// R3.A has an index. P2,R1 is NLJ because P2 is distributed and it's an outer join
lpn = compileToFragments("select * FROM R1 LEFT JOIN P2 ON R1.A = P2.A, R3 WHERE R1.A=R3.A ");
assertTrue(lpn.size() == 2);
// to debug */ System.out.println("DEBUG 1.0: " + lpn.get(0).toExplainPlanString());
// to debug */ System.out.println("DEBUG 1.1: " + lpn.get(1).toExplainPlanString());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE);
n = lpn.get(1).getChild(0);
// For determinism reason
assertTrue(n instanceof IndexScanPlanNode);
// Two distributed table
lpn = compileToFragments("select * FROM R3,P1 LEFT JOIN P2 ON R3.A = P2.A WHERE R3.A=P1.A ");
assertTrue(lpn.size() == 2);
n = lpn.get(0).getChild(0).getChild(0);
assertTrue(n instanceof ReceivePlanNode);
n = lpn.get(1).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN);
}
public void testFullJoinExpressions() {
AbstractPlanNode pn;
AbstractPlanNode n;
// WHERE outer and inner expressions stay at the FULL NLJ node
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A WHERE R2.C IS NULL AND R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.CONJUNCTION_AND, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// WHERE outer and inner expressions stay at the FULL NLJ node
// The outer node is a join itself
pn = compile("select * FROM " +
"R1 JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R3.C = R2.C WHERE R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.OPERATOR_IS_NULL, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
// WHERE outer-inner expressions stay at the FULL NLJ node
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A WHERE R2.C IS NULL OR R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.CONJUNCTION_OR, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// WHERE outer and inner expressions push down process stops at the FULL join (R1,R2) node -
// FULL join is itself an outer node
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R2.C WHERE R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.OPERATOR_IS_NULL, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// OUTER JOIN expression (R1.A > 0) is pre-predicate, inner and inner - outer expressions R3.C = R2.C AND R3.C < 0 are predicate
pn = compile("select * FROM R1 JOIN R2 ON R1.A = R2.C FULL JOIN R3 ON R3.C = R2.C AND R1.A > 0 AND R3.C < 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3");
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2");
// NLJ JOIN outer expression is pre-join expression, NLJ JOIN inner expression together with
// JOIN inner-outer one are part of the join predicate
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A AND R1.C = R2.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// NLJ JOIN outer expression is pre-join expression, NLJ JOIN inner expression together with
// JOIN inner-outer one are part of the join predicate
pn = compile("select * FROM " +
"R1 FULL JOIN R2 ON R1.A = R2.A AND R1.C < 0 AND R2.C > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_LESSTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
// NLJ JOIN outer expression is pre-join expression, NLJ JOIN inner expression together with
// JOIN inner-outer one are part of the join predicate
pn = compile("select * FROM " +
"R1 JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R1.A = R3.C AND R1.C is NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.OPERATOR_IS_NULL, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN);
}
public void testFullIndexJoinExpressions() {
AbstractPlanNode pn;
AbstractPlanNode n;
// Simple FULL NLIJ
pn = compile("select * FROM " +
"R3 FULL JOIN R1 ON R3.A = R1.A WHERE R3.C IS NULL");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, ExpressionType.OPERATOR_IS_NULL, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN);
String json = (new PlanNodeTree(pn)).toJSONString();
// Same Join as above but using FULL OUTER JOIN syntax
pn = compile("select * FROM " +
"R3 FULL OUTER JOIN R1 ON R3.A = R1.A WHERE R3.C IS NULL");
String json1 = (new PlanNodeTree(pn)).toJSONString();
assertEquals(json, json1);
// FULL NLJ. R3.A is an index column but R3.A > 0 expression is used as a PREDICATE only
pn = compile("select * FROM " +
"R1 FULL JOIN R3 ON R3.C = R1.A AND R3.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R3");
// FULL NLIJ, inner join R3.A > 0 is added as a post-predicate to the inline Index scan
pn = compile("select * FROM R1 FULL JOIN R3 ON R3.A = R1.A AND R3.A > 55");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, ExpressionType.COMPARE_GREATERTHAN);
// FULL NLIJ, inner join L.A > 0 is added as a pre-predicate to the NLIJ
pn = compile("select * FROM R3 L FULL JOIN R3 R ON L.A = R.A AND L.A > 55");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "L", "R");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, null);
// FULL NLIJ, inner-outer join R3.c = R1.c is a post-predicate for the inline Index scan
pn = compile("select * FROM R1 FULL JOIN R3 ON R3.A = R1.A AND R3.C = R1.C");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, ExpressionType.COMPARE_EQUAL);
// FULL NLIJ, outer join (R1, R2) expression R1.A > 0 is a pre-predicate
pn = compile("select * FROM R1 JOIN R2 ON R1.A = R2.C FULL JOIN R3 ON R3.A = R2.C AND R1.A > 0");
n = pn.getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, null);
n = n.getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN);
}
public void testDistributedFullJoin() {
List<AbstractPlanNode> lpn;
AbstractPlanNode n;
// FULL join on partition column
lpn = compileToFragments("select * FROM " +
"P1 FULL JOIN R2 ON P1.A = R2.A ");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// FULL join on partition column
lpn = compileToFragments("select * FROM " +
"R2 FULL JOIN P1 ON P1.A = R2.A ");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// FULL join on non-partition column
lpn = compileToFragments("select * FROM " +
"P1 FULL JOIN R2 ON P1.C = R2.A ");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// NLJ FULL join (R2, P2) on partition column R2.A > 0 is a pre-predicate, P2.A = R2.A AND P2.E < 0 are join predicate
// It can't be a NLIJ because P2 is partitioned - P2.A index is not used
lpn = compileToFragments("select * FROM " +
"P2 FULL JOIN R2 ON P2.A = R2.A AND R2.A > 0 AND P2.E < 0");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// NLJ FULL join (R2, P2) on partition column P2.E = R2.A AND P2.A > 0 are join predicate
// Inner join expression P2.A > 0 can't be used as index expression with NLJ
lpn = compileToFragments("select * FROM " +
"P2 FULL JOIN R2 ON P2.E = R2.A AND P2.A > 0");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null);
// NLJ (R3, P2) on partition column P2.A. R3.A > 0 is a PRE_PREDICTAE
// NLIJ (P2,R3) on partition column P2.A using index R3.A is an invalid plan for a FULL join
lpn = compileToFragments("select * FROM " +
"P2 FULL JOIN R3 ON P2.A = R3.A AND R3.A > 0 AND P2.E < 0");
assertEquals(2, lpn.size());
n = lpn.get(0).getChild(0).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R3", null);
// FULL NLJ join of two partition tables on partition column
lpn = compileToFragments("select * FROM P1 FULL JOIN P4 ON P1.A = P4.A ");
assertEquals(2, lpn.size());
n = lpn.get(1).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "P1", "P4");
// FULL NLIJ (P1,P2) on partition column P2.A
lpn = compileToFragments("select * FROM P2 FULL JOIN P1 ON P1.A = P2.A AND P2.A > 0");
assertEquals(2, lpn.size());
n = lpn.get(1).getChild(0);
verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "P1", "P2");
verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, ExpressionType.COMPARE_GREATERTHAN);
// FULL join of two partition tables on non-partition column
failToCompile("select * FROM P1 FULL JOIN P4 ON P1.C = P4.A ",
"Join of multiple partitioned tables has insufficient join criteria");
}
@Override
protected void setUp() throws Exception {
setupSchema(TestJoinOrder.class.getResource("testplans-join-ddl.sql"), "testplansjoin", false);
}
}
| One more testcase for NULL rejecting WHERE expression
| tests/frontend/org/voltdb/planner/TestMultipleOuterJoinPlans.java | One more testcase for NULL rejecting WHERE expression |
|
Java | lgpl-2.1 | e92c46f7c7cb452f822f339a42b1e35e5b89c995 | 0 | wolfgangmm/exist,lcahlander/exist,dizzzz/exist,olvidalo/exist,kohsah/exist,RemiKoutcherawy/exist,RemiKoutcherawy/exist,wshager/exist,wshager/exist,ambs/exist,patczar/exist,MjAbuz/exist,opax/exist,adamretter/exist,kohsah/exist,zwobit/exist,lcahlander/exist,zwobit/exist,patczar/exist,olvidalo/exist,opax/exist,wolfgangmm/exist,eXist-db/exist,dizzzz/exist,MjAbuz/exist,kohsah/exist,ljo/exist,windauer/exist,eXist-db/exist,opax/exist,eXist-db/exist,hungerburg/exist,windauer/exist,zwobit/exist,jessealama/exist,opax/exist,patczar/exist,windauer/exist,eXist-db/exist,kohsah/exist,shabanovd/exist,adamretter/exist,hungerburg/exist,jensopetersen/exist,zwobit/exist,wshager/exist,shabanovd/exist,jessealama/exist,jensopetersen/exist,ljo/exist,MjAbuz/exist,jessealama/exist,opax/exist,wolfgangmm/exist,hungerburg/exist,patczar/exist,wshager/exist,lcahlander/exist,ljo/exist,jessealama/exist,jessealama/exist,windauer/exist,olvidalo/exist,hungerburg/exist,wolfgangmm/exist,eXist-db/exist,dizzzz/exist,ambs/exist,joewiz/exist,windauer/exist,lcahlander/exist,olvidalo/exist,kohsah/exist,jensopetersen/exist,RemiKoutcherawy/exist,ambs/exist,adamretter/exist,dizzzz/exist,patczar/exist,ljo/exist,joewiz/exist,lcahlander/exist,shabanovd/exist,MjAbuz/exist,shabanovd/exist,ljo/exist,zwobit/exist,MjAbuz/exist,jensopetersen/exist,RemiKoutcherawy/exist,MjAbuz/exist,kohsah/exist,ambs/exist,lcahlander/exist,patczar/exist,dizzzz/exist,wshager/exist,ambs/exist,jensopetersen/exist,joewiz/exist,olvidalo/exist,RemiKoutcherawy/exist,joewiz/exist,wolfgangmm/exist,adamretter/exist,hungerburg/exist,joewiz/exist,shabanovd/exist,wolfgangmm/exist,dizzzz/exist,shabanovd/exist,jessealama/exist,windauer/exist,ljo/exist,adamretter/exist,eXist-db/exist,RemiKoutcherawy/exist,ambs/exist,zwobit/exist,joewiz/exist,adamretter/exist,wshager/exist,jensopetersen/exist | package org.exist.xquery.test;
import java.net.BindException;
import java.util.Iterator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.transform.OutputKeys;
import org.custommonkey.xmlunit.XMLTestCase;
import org.exist.StandaloneServer;
import org.exist.storage.DBBroker;
import org.exist.xmldb.CollectionImpl;
import org.exist.xmldb.DatabaseInstanceManager;
import org.exist.xmldb.XPathQueryServiceImpl;
import org.exist.xquery.XPathException;
import org.mortbay.util.MultiException;
import org.w3c.dom.Node;
import org.xmldb.api.DatabaseManager;
import org.xmldb.api.base.Collection;
import org.xmldb.api.base.CompiledExpression;
import org.xmldb.api.base.Database;
import org.xmldb.api.base.Resource;
import org.xmldb.api.base.ResourceIterator;
import org.xmldb.api.base.ResourceSet;
import org.xmldb.api.base.XMLDBException;
import org.xmldb.api.modules.CollectionManagementService;
import org.xmldb.api.modules.XMLResource;
import org.xmldb.api.modules.XPathQueryService;
import org.xmldb.api.modules.XQueryService;
public class XPathQueryTest extends XMLTestCase {
private final static String nested =
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<test><c></c><b><c><b></b></c></b><b></b><c></c></test>";
private final static String numbers =
"<test>"
+ "<item id='1' type='alphanum'><price>5.6</price><stock>22</stock></item>"
+ "<item id='2'><price>7.4</price><stock>43</stock></item>"
+ "<item id='3'><price>18.4</price><stock>5</stock></item>"
+ "<item id='4'><price>65.54</price><stock>16</stock></item>"
+ "</test>";
private final static String numbers2 =
"<test xmlns=\"http://numbers.org\">"
+ "<item id='1' type='alphanum'><price>5.6</price><stock>22</stock></item>"
+ "<item id='2'><price>7.4</price><stock>43</stock></item>"
+ "<item id='3'><price>18.4</price><stock>5</stock></item>"
+ "<item id='4'><price>65.54</price><stock>16</stock></item>"
+ "</test>";
private final static String namespaces =
"<test xmlns='http://www.foo.com'>"
+ " <section>"
+ " <title>Test Document</title>"
+ " <c:comment xmlns:c='http://www.other.com'>This is my comment</c:comment>"
+ " </section>"
+ "</test>";
private final static String strings =
"<test>"
+ "<string>Hello World!</string>"
+ "<string value='Hello World!'/>"
+ "<string>Hello</string>"
+ "</test>";
private final static String nested2 =
"<RootElement>" +
"<ChildA>" +
"<ChildB id=\"2\"/>" +
"</ChildA>" +
"</RootElement>";
private final static String nested3 =
"<test>" +
" <a>" +
" <t>1</t>" +
" <a>" +
" <t>2</t>" +
" <a>" +
" <t>3</t>" +
" </a>" +
" </a>" +
" </a>" +
"</test>";
private final static String siblings =
"<test>" +
" <a> <s>A</s> <n>1</n> </a>" +
" <a> <s>Z</s> <n>2</n> </a>" +
" <a> <s>B</s> <n>3</n> </a>" +
" <a> <s>Z</s> <n>4</n> </a>" +
" <a> <s>C</s> <n>5</n> </a>" +
" <a> <s>Z</s> <n>6</n> </a>" +
"</test>";
private final static String ids =
"<!DOCTYPE test [" +
"<!ELEMENT test (a | b | c | d)*>" +
"<!ATTLIST test xml:space CDATA #IMPLIED>" +
"<!ELEMENT a EMPTY>" +
"<!ELEMENT b (name)>" +
"<!ELEMENT c (name)>" +
"<!ELEMENT d EMPTY>" +
"<!ATTLIST d ref IDREF #IMPLIED>" +
"<!ELEMENT name (#PCDATA)>" +
"<!ATTLIST a ref IDREF #IMPLIED>" +
"<!ATTLIST b id ID #IMPLIED>" +
"<!ATTLIST c xml:id ID #IMPLIED>]>" +
"<test xml:space=\"preserve\">" +
"<a ref=\"id1\"/>" +
"<a ref=\"id1\"/>" +
"<d ref=\"id2\"/>" +
"<b id=\"id1\"><name>one</name></b>" +
"<c xml:id=\" id2 \"><name>two</name></c>" +
"</test>";
private final static String quotes =
"<test><title>"Hello"</title></test>";
private final static String ws =
"<test><parent xml:space=\"preserve\"><text> </text><text xml:space=\"default\"> </text></parent></test>";
private final static String self =
"<test-self><a>Hello</a><b>World!</b></test-self>";
private static String uri = "xmldb:exist://" + DBBroker.ROOT_COLLECTION;
public static void setURI(String collectionURI) {
uri = collectionURI;
}
private static StandaloneServer server = null;
private Collection testCollection;
private String query;
protected void setUp() {
if (uri.startsWith("xmldb:exist://localhost"))
initServer();
try {
// initialize driver
Class cl = Class.forName("org.exist.xmldb.DatabaseImpl");
Database database = (Database) cl.newInstance();
database.setProperty("create-database", "true");
DatabaseManager.registerDatabase(database);
Collection root =
DatabaseManager.getCollection(
uri,
"admin",
null);
CollectionManagementService service =
(CollectionManagementService) root.getService(
"CollectionManagementService",
"1.0");
testCollection = service.createCollection("test");
assertNotNull(testCollection);
} catch (ClassNotFoundException e) {
} catch (InstantiationException e) {
} catch (IllegalAccessException e) {
} catch (XMLDBException e) {
e.printStackTrace();
}
}
private void initServer() {
try {
if (server == null) {
server = new StandaloneServer();
if (!server.isStarted()) {
try {
System.out.println("Starting standalone server...");
String[] args = {};
server.run(args);
while (!server.isStarted()) {
Thread.sleep(1000);
}
} catch (MultiException e) {
boolean rethrow = true;
Iterator i = e.getExceptions().iterator();
while (i.hasNext()) {
Exception e0 = (Exception)i.next();
if (e0 instanceof BindException) {
System.out.println("A server is running already !");
rethrow = false;
break;
}
}
if (rethrow) throw e;
}
}
}
} catch(Exception e) {
fail(e.getMessage());
}
}
protected void tearDown() throws Exception {
try {
if (!((CollectionImpl) testCollection).isRemoteCollection()) {
DatabaseInstanceManager dim =
(DatabaseInstanceManager) testCollection.getService(
"DatabaseInstanceManager", "1.0");
dim.shutdown();
}
testCollection = null;
System.out.println("tearDown PASSED");
} catch (XMLDBException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public void testPathExpression() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
//Invalid path expression left operand (not a node set).
String message = "";
try {
queryAndAssert(service, "('a', 'b', 'c')/position()", -1, null);
} catch (XMLDBException e) {
message = e.getMessage();
}
assertTrue("Exception wanted: " + message, message.indexOf("XPTY0019") > -1);
//Undefined context sequence
message = "";
try {
queryAndAssert(service, "for $a in (<a/>, <b/>, doh, <c/>) return $a", -1, null);
} catch (XMLDBException e) {
message = e.getMessage();
}
assertTrue("Exception wanted: " + message, message.indexOf("XPDY0002") > -1);
message = "";
try {
//"1 to 2" is resolved as a (1, 2), i.e. a sequence of *integers* which is *not* a singleton
queryAndAssert(service, "let $a := (1, 2, 3) for $b in $a[1 to 2] return $b", -1, null);
} catch (XMLDBException e) {
message = e.getMessage();
}
//No effective boolean value for such a kind of sequence !
assertTrue("Exception wanted: " + message, message.indexOf("FORG0006") >-1);
queryAndAssert(service, "let $a := ('a', 'b', 'c') return $a[2 to 2]", 1, null);
queryAndAssert(service, "let $a := ('a', 'b', 'c') return $a[(2 to 2)]", 1, null);
queryAndAssert(service, "()/position()", 0, null);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
/** test simple queries involving attributes */
public void testAttributes() {
ResourceSet result;
try {
String testDocument = "numbers.xml";
String query;
XQueryService service = storeXMLStringAndGetQueryService(
testDocument, numbers);
query = "/test/item[ @id='1' ]";
result = service.queryResource(testDocument, query);
System.out.println("testAttributes 1: ========");
printResult(result);
assertEquals("XPath: " + query, 1, result.getSize());
XMLResource resource = (XMLResource)result.getResource(0);
Node node = resource.getContentAsDOM();
if (node.getNodeType() == Node.DOCUMENT_NODE)
node = node.getFirstChild();
assertEquals("XPath: " + query, "item", node.getNodeName());
query = "/test/item [ @type='alphanum' ]";
result = service.queryResource(testDocument, query);
System.out.println("testAttributes 2: ========");
printResult(result);
assertEquals("XPath: " + query, 1, result.getSize());
} catch (XMLDBException e) {
System.out.println("testAttributes(): XMLDBException: " + e);
fail(e.getMessage());
}
}
public void testStarAxis() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
result = service.queryResource("numbers.xml", "/*/item");
System.out.println("testStarAxis 1: ========");
printResult(result);
assertEquals("XPath: /*/item", 4, result.getSize());
result = service.queryResource("numbers.xml", "/test/*");
System.out.println("testStarAxis 2: ========");
printResult(result);
assertEquals("XPath: /test/*", 4, result.getSize());
result = service.queryResource("numbers.xml", "/test/descendant-or-self::*");
System.out.println("testStarAxis 3: ========");
printResult(result);
assertEquals( "XPath: /test/descendant-or-self::*", 13, result.getSize());
result = service.queryResource("numbers.xml", "/*/*");
System.out.println("testStarAxis 4: ========" );
printResult(result);
//Strange !!! Should be 8
assertEquals("XPath: /*/*", 4, result.getSize());
} catch (XMLDBException e) {
System.out.println("testStarAxis(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testStarAxisConstraints() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("namespaces.xml", namespaces);
service.setNamespace("t", "http://www.foo.com");
query = "// t:title/text() [ . != 'aaaa' ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxis2 : ========" ); printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
result = service.queryResource("namespaces.xml", "/t:test/*:section[. &= 'comment']");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "/t:test/t:*[. &= 'comment']");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "/t:test/t:section[. &= 'comment']");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "/t:test/t:section/*[. &= 'comment']");
assertEquals("", 1, result.getSize());
query = "/ * / * [ t:title ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxis2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
query = "/ t:test / t:section [ t:title ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxis2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
query = "/ t:test / t:section";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxis2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
} catch (XMLDBException e) {
System.out.println("testStarAxis(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testStarAxisConstraints2() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("namespaces.xml", namespaces);
service.setNamespace("t", "http://www.foo.com");
query = "/ * [ ./ * / t:title ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxisConstraints2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
query = "/ * [ * / t:title ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxisConstraints2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
} catch (XMLDBException e) {
//org.xmldb.api.base.XMLDBException: Internal evaluation error: context node is missing for node 3 !
System.out.println("testStarAxisConstraints2(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testStarAxisConstraints3() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("namespaces.xml", namespaces);
service.setNamespace("t", "http://www.foo.com");
query = "// * [ . = 'Test Document' ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxisConstraints3 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
query = "// * [ . &= 'Test Document' ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxisConstraints3 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
} catch (XMLDBException e) {
//org.xmldb.api.base.XMLDBException: Internal evaluation error: context node is missing !
System.out.println("testStarAxisConstraints3(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testRoot() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested2.xml", nested2);
String query = "let $doc := <a><b/></a> return root($doc)";
ResourceSet result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 1, result.getSize());
XMLResource resource = (XMLResource)result.getResource(0);
assertEquals("XPath: " + query, "a", resource.getContentAsDOM().getFirstChild().getLocalName());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testParentAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested2.xml", nested2);
queryResource(service, "nested2.xml", "(<a/>, <b/>, <c/>)/parent::*", 0);
queryResource(service, "nested2.xml", "/RootElement//ChildB/parent::*", 1);
queryResource(service, "nested2.xml", "/RootElement//ChildB/parent::*/ChildB", 1);
queryResource(service, "nested2.xml", "/RootElement/ChildA/parent::*/ChildA/ChildB", 1);
service =
storeXMLStringAndGetQueryService("numbers.xml", numbers2);
service.setNamespace("n", "http://numbers.org");
queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::*[@id = '3']", 1);
queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::n:item[@id = '3']", 1);
queryResource(service, "numbers.xml", "//n:price/parent::n:item[@id = '3']", 1);
ResourceSet result =
queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::n:*/string(@id)", 1);
assertEquals(result.getResource(0).getContent().toString(), "3");
result = queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::*:item/string(@id)", 1);
assertEquals(result.getResource(0).getContent().toString(), "3");
result = queryResource(service, "numbers.xml", "//n:price[. = 18.4]/../string(@id)", 1);
assertEquals(result.getResource(0).getContent().toString(), "3");
result = queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::n:item/string(@id)", 1);
assertEquals(result.getResource(0).getContent().toString(), "3");
queryResource(service, "numbers.xml",
"for $price in //n:price where $price/parent::*[@id = '3']/n:stock = '5' return $price", 1);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testParentSelfAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested2.xml", nested2);
queryResource(service, "nested2.xml", "/RootElement/descendant::*/parent::ChildA", 1);
queryResource(service, "nested2.xml", "/RootElement/descendant::*[self::ChildB]/parent::RootElement", 0);
queryResource(service, "nested2.xml", "/RootElement/descendant::*[self::ChildA]/parent::RootElement", 1);
queryResource(service, "nested2.xml", "let $a := ('', 'b', '', '') for $b in $a[.] return <blah>{$b}</blah>", 1);
String query = "let $doc := <root><page><a>a</a><b>b</b></page></root>" +
"return " +
"for $element in $doc/page/* " +
"return " +
"if($element[self::a] or $element[self::b]) then (<found/>) else (<notfound/>)";
ResourceSet result = service.queryResource("numbers.xml", query);
assertEquals(2, result.getSize());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testSelfAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("self.xml", self);
queryResource(service, "self.xml", "/test-self/*[not(self::a)]", 1);
queryResource(service, "self.xml", "/test-self/*[self::a]", 1);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testAncestorAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested3.xml", nested3);
// test ancestor axis with positional predicate
queryResource(service, "nested3.xml", "//a[ancestor::a[2]/t = '1']", 1);
queryResource(service, "nested3.xml", "//a[ancestor::*[2]/t = '1']", 1);
queryResource(service, "nested3.xml", "//a[ancestor::a[1]/t = '2']", 1);
queryResource(service, "nested3.xml", "//a[ancestor::*[1]/t = '2']", 1);
queryResource(service, "nested3.xml", "//a[ancestor-or-self::*[3]/t = '1']", 1);
queryResource(service, "nested3.xml", "//a[ancestor-or-self::a[3]/t = '1']", 1);
// Following test fails
// queryResource(service, "nested3.xml", "//a[ancestor-or-self::*[2]/t = '2']", 1);
queryResource(service, "nested3.xml", "//a[ancestor-or-self::a[2]/t = '2']", 1);
queryResource(service, "nested3.xml", "//a[t = '3'][ancestor-or-self::a[3]/t = '1']", 1);
queryResource(service, "nested3.xml", "//a[t = '3'][ancestor-or-self::*[3]/t = '1']", 1);
} catch (XMLDBException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public void testAncestorIndex() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested2.xml", nested2);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[1]/self::ChildA", 1);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[2]/self::RootElement", 1);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[position() = 1]/self::ChildA", 1);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[position() = 2]/self::RootElement", 1);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[position() = 2]/self::RootElement", 1);
queryResource(service, "nested2.xml", "(<a/>, <b/>, <c/>)/ancestor::*", 0);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testPrecedingSiblingAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("siblings.xml", siblings);
service.setProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
service.setProperty(OutputKeys.INDENT, "no");
ResourceSet result = queryResource(service, "siblings.xml", "//a[preceding-sibling::*[1]/s = 'B']", 1);
assertXMLEqual("<a><s>Z</s><n>4</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[preceding-sibling::a[1]/s = 'B']", 1);
assertXMLEqual("<a><s>Z</s><n>4</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[preceding-sibling::*[2]/s = 'B']", 1);
assertXMLEqual("<a><s>C</s><n>5</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[preceding-sibling::a[2]/s = 'B']", 1);
assertXMLEqual("<a><s>C</s><n>5</n></a>", result.getResource(0).getContent().toString());
queryResource(service, "siblings.xml", "(<a/>, <b/>, <c/>)/following-sibling::*", 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public void testFollowingSiblingAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("siblings.xml", siblings);
service.setProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
service.setProperty(OutputKeys.INDENT, "no");
ResourceSet result = queryResource(service, "siblings.xml", "//a[following-sibling::*[1]/s = 'B']", 1);
assertXMLEqual("<a><s>Z</s><n>2</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[following-sibling::a[1]/s = 'B']", 1);
assertXMLEqual("<a><s>Z</s><n>2</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[following-sibling::*[2]/s = 'B']", 1);
assertXMLEqual("<a><s>A</s><n>1</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[following-sibling::a[2]/s = 'B']", 1);
assertXMLEqual("<a><s>A</s><n>1</n></a>", result.getResource(0).getContent().toString());
queryResource(service, "siblings.xml", "(<a/>, <b/>, <c/>)/following-sibling::*", 0);
} catch (Exception e) {
fail(e.getMessage());
}
}
public void testFollowingAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("siblings.xml", siblings);
service.setProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
service.setProperty(OutputKeys.INDENT, "no");
queryResource(service, "siblings.xml", "//a/s[. = 'B']/following::s", 3);
ResourceSet result = queryResource(service, "siblings.xml", "//a/s[. = 'B']/following::s[1]", 1);
assertXMLEqual("<s>Z</s>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a/s[. = 'B']/following::s[2]", 1);
assertXMLEqual("<s>C</s>", result.getResource(0).getContent().toString());
} catch (Exception e) {
fail(e.getMessage());
}
}
public void testPosition() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
queryResource(service, "numbers.xml", "//item[position() = 3]", 1);
queryResource(service, "numbers.xml", "//item[position() < 3]", 2);
queryResource(service, "numbers.xml", "//item[position() <= 3]", 3);
queryResource(service, "numbers.xml", "//item[position() > 3]", 1);
queryResource(service, "numbers.xml", "//item[position() >= 3]", 2);
queryResource(service, "numbers.xml", "//item[position() eq 3]", 1);
queryResource(service, "numbers.xml", "//item[position() lt 3]", 2);
queryResource(service, "numbers.xml", "//item[position() le 3]", 3);
queryResource(service, "numbers.xml", "//item[position() gt 3]", 1);
queryResource(service, "numbers.xml", "//item[position() ge 3]", 2);
// Currently fails with error XPTY0004
queryResource(service, "numbers.xml", "//item[last() - 1]", 1);
queryResource(service, "numbers.xml", "//item[count(('a','b')) - 1]", 1);
String query = "for $a in (<a/>, <b/>, <c/>) return $a/position()";
ResourceSet result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 3, result.getSize());
XMLResource resource = (XMLResource)result.getResource(0);
assertEquals("XPath: " + query, "1", resource.getContent().toString());
resource = (XMLResource)result.getResource(1);
assertEquals("XPath: " + query, "1", resource.getContent().toString());
resource = (XMLResource)result.getResource(2);
assertEquals("XPath: " + query, "1", resource.getContent().toString());
query = "declare variable $doc { <root>" +
"<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a><a>6</a><a>7</a>" +
"</root> }; " +
"(for $x in $doc/a return $x)[position() mod 3 = 2]";
result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 2, result.getSize());
query = "declare variable $doc { <root>" +
"<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a><a>6</a><a>7</a>" +
"</root> }; " +
"for $x in $doc/a return $x[position() mod 3 = 2]";
result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 0, result.getSize());
query = "declare variable $doc { <root>" +
"<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a><a>6</a><a>7</a>" +
"</root> }; " +
"for $x in $doc/a[position() mod 3 = 2] return $x";
result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 2, result.getSize());
// TODO: clarify
// query = "let $a := ('a', 'b', 'c') for $b in $a[position()] return <blah>{$b}</blah>";
// result = service.queryResource("numbers.xml", query);
// assertEquals("XPath: " + query, 3, result.getSize());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testNumbers() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
ResourceSet result = queryResource(service, "numbers.xml", "sum(/test/item/price)", 1);
assertEquals( "96.94", result.getResource(0).getContent() );
result = queryResource(service, "numbers.xml", "round(sum(/test/item/price))", 1);
assertEquals( "97", result.getResource(0).getContent() );
result = queryResource(service, "numbers.xml", "floor(sum(/test/item/stock))", 1);
assertEquals( "86", result.getResource(0).getContent());
queryResource(service, "numbers.xml", "/test/item[round(price + 3) > 60]", 1);
result = queryResource(service, "numbers.xml", "min(( 123456789123456789123456789, " +
"123456789123456789123456789123456789123456789 ))", 1);
assertEquals("minimum of big integers",
"123456789123456789123456789",
result.getResource(0).getContent() );
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testPredicates() throws Exception {
String numbers =
"<test>"
+ "<item id='1' type='alphanum'><price>5.6</price><stock>22</stock></item>"
+ "<item id='2'><price>7.4</price><stock>43</stock></item>"
+ "<item id='3'><price>18.4</price><stock>5</stock></item>"
+ "<item id='4'><price>65.54</price><stock>16</stock></item>"
+ "</test>";
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
service.setProperty(OutputKeys.INDENT, "no");
ResourceSet result = queryResource(service, "numbers.xml", "/test/item[2]/price/text()", 1);
assertEquals("7.4", result.getResource(0).getContent().toString());
result = queryResource(service, "numbers.xml", "/test/item[5]", 0);
result = queryResource(service, "numbers.xml", "/test/item[@id='4'][1]/price[1]/text()", 1);
assertEquals("65.54",
result.getResource(0).getContent().toString());
result = queryResource(service, "numbers.xml", "for $i in //item return " +
"<item>{$i/price, $i/stock}</item>", 4);
assertXMLEqual("<item><price>5.6</price><stock>22</stock></item>",
result.getResource(0).getContent().toString());
assertXMLEqual("<item><price>65.54</price><stock>16</stock></item>",
result.getResource(3).getContent().toString());
// test positional predicates
result = queryResource(service, "numbers.xml", "/test/node()[2]", 1);
assertXMLEqual("<item id='2'><price>7.4</price><stock>43</stock></item>",
result.getResource(0).getContent().toString());
result = queryResource(service, "numbers.xml", "/test/element()[2]", 1);
assertXMLEqual("<item id='2'><price>7.4</price><stock>43</stock></item>",
result.getResource(0).getContent().toString());
// positional predicate on sequence of atomic values
result = queryResource(service, "numbers.xml", "('test', 'pass')[2]", 1);
assertEquals(result.getResource(0).getContent().toString(), "pass");
result = queryResource(service, "numbers.xml", "let $credentials := ('test', 'pass') let $user := $credentials[1] return $user", 1);
assertEquals(result.getResource(0).getContent().toString(), "test");
result = queryResource(service, "numbers.xml", "let $credentials := ('test', 'pass') let $user := $credentials[2] return $user", 1);
assertEquals(result.getResource(0).getContent().toString(), "pass");
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testPredicates2() throws Exception {
String numbers =
"<test>"
+ "<item id='1' type='alphanum'><price>5.6</price><stock>22</stock></item>"
+ "<item id='2'><price>7.4</price><stock>43</stock></item>"
+ "<item id='3'><price>18.4</price><stock>5</stock></item>"
+ "<item id='4'><price>65.54</price><stock>16</stock></item>"
+ "</test>";
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
service.setProperty(OutputKeys.INDENT, "no");
String query = "let $t := <test>" + "<a> <s>A</s> 1 </a>"
+ "<a> <s>Z</s> 2 </a>" + "<a> <s>B</s> 3 </a>"
+ "<a> <s>Z</s> 4 </a>" + "<a> <s>C</s> 5 </a>"
+ "<a> <s>Z</s> 6 </a>" + "</test>"
+ "return $t//a[s='Z' and preceding-sibling::*[1]/s='B']";
ResourceSet result = queryResource(service, "numbers.xml", query, 1);
assertXMLEqual("<a><s>Z</s> 4 </a>", result.getResource(0)
.getContent().toString());
query = "let $t := <test>" + "<a> <s>A</s> 1 </a>"
+ "<a> <s>Z</s> 2 </a>" + "<a> <s>B</s> 3 </a>"
+ "<a> <s>Z</s> 4 </a>" + "<a> <s>C</s> 5 </a>"
+ "<a> <s>Z</s> 6 </a>" + "</test>"
+ "return $t//a[s='Z' and ./preceding-sibling::*[1]/s='B']";
result = queryResource(service, "numbers.xml", query, 1);
assertXMLEqual("<a><s>Z</s> 4 </a>", result.getResource(0)
.getContent().toString());
query = "let $doc := <doc><rec n='1'><a>first</a><b>second</b></rec>" +
"<rec n='2'><a>first</a><b>third</b></rec></doc> " +
"return $doc//rec[fn:not(b = 'second') and (./a = 'first')]";
result = queryResource(service, "numbers.xml", query, 1);
assertXMLEqual("<rec n=\"2\"><a>first</a><b>third</b></rec>", result.getResource(0)
.getContent().toString());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
// @see http://sourceforge.net/tracker/index.php?func=detail&aid=1460610&group_id=17691&atid=117691
public void testPredicatesBUG1460610() throws Exception {
String xQuery = "(1, 2, 3)[ . lt 3]";
XQueryService service = getQueryService();
ResourceSet rs = service.query(xQuery);
assertEquals("SFBUG 1460610 nr of results", 2, rs.getSize());
assertEquals("SFBUG 1460610 1st result", "1",
rs.getResource(0).getContent());
assertEquals("SFBUG 1460610 2nd result", "2",
rs.getResource(1).getContent());
}
public void testStrings() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("strings.xml", strings);
ResourceSet result = queryResource(service, "strings.xml", "substring(/test/string[1], 1, 5)", 1);
assertEquals( "Hello", result.getResource(0).getContent() );
queryResource(service, "strings.xml", "/test/string[starts-with(string(.), 'Hello')]", 2);
result = queryResource(service, "strings.xml", "count(/test/item/price)", 1,
"Query should return an empty set (wrong document)");
assertEquals("0", result.getResource(0).getContent());
} catch (XMLDBException e) {
System.out.println("testStrings(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testQuotes() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("quotes.xml", quotes);
// ResourceSet result =
queryResource(service, "quotes.xml", "/test[title = '"Hello"']", 1);
service.declareVariable("content", ""Hello"");
// result =
queryResource(service, "quotes.xml", "/test[title = $content]", 1);
} catch (XMLDBException e) {
System.out.println("testQuotes(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testBoolean() {
try {
System.out.println("Testing effective boolean value of expressions ...");
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
ResourceSet result = queryResource(service, "numbers.xml", "boolean(1.0)", 1);
assertEquals("boolean value of 1.0 should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(0.0)", 1);
assertEquals("boolean value of 0.0 should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:double(0.0))", 1);
assertEquals("boolean value of double 0.0 should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:double(1.0))", 1);
assertEquals("boolean value of double 1.0 should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:float(1.0))", 1);
assertEquals("boolean value of float 1.0 should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:float(0.0))", 1);
assertEquals("boolean value of float 0.0 should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:integer(0))", 1);
assertEquals("boolean value of integer 0 should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:integer(1))", 1);
assertEquals("boolean value of integer 1 should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "'true' cast as xs:boolean", 1);
assertEquals("boolean value of 'true' cast to xs:boolean should be true",
"true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "'false' cast as xs:boolean", 1);
assertEquals("boolean value of 'false' cast to xs:boolean should be false",
"false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean('Hello')", 1);
assertEquals("boolean value of string 'Hello' should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean('')", 1);
assertEquals("boolean value of empty string should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(())", 1);
assertEquals("boolean value of empty sequence should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(('Hello'))", 1);
assertEquals("boolean value of sequence with non-empty string should be true",
"true", result.getResource(0).getContent());
// result = queryResource(service, "numbers.xml", "boolean((0.0, 0.0))", 1);
// assertEquals("boolean value of sequence with two elements should be true", "true",
// result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(//item[@id = '1']/price)", 1);
assertEquals("boolean value of 5.6 should be true", "true",
result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(current-time())", 1);
assertEquals("boolean value of current-time() should be true", "true",
result.getResource(0).getContent());
} catch (XMLDBException e) {
System.out.println("testBoolean(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testNot() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("strings.xml", strings);
queryResource(service, "strings.xml", "/test/string[not(@value)]", 2);
ResourceSet result = queryResource(service, "strings.xml", "not(/test/abcd)", 1);
Resource r = result.getResource(0);
assertEquals("true", r.getContent().toString());
result = queryResource(service, "strings.xml", "not(/test)", 1);
r = result.getResource(0);
assertEquals("false", r.getContent().toString());
result = queryResource(service, "strings.xml", "/test/string[not(@id)]", 3);
r = result.getResource(0);
assertEquals("<string>Hello World!</string>", r.getContent().toString());
// test with non-existing items
queryResource( service, "strings.xml", "document()/blah[not(blah)]", 0);
queryResource(service, "strings.xml", "//*[string][not(@value)]", 1);
queryResource(service, "strings.xml", "//*[string][not(@blah)]", 1);
queryResource(service, "strings.xml", "//*[blah][not(@blah)]", 0);
} catch (XMLDBException e) {
System.out.println("testStrings(): XMLDBException: "+e);
e.printStackTrace();
fail(e.getMessage());
}
}
public void testIds() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("ids.xml", ids);
queryResource(service, "ids.xml", "//a/id(@ref)", 1);
queryResource(service, "ids.xml", "id(//a/@ref)", 1);
ResourceSet result = queryResource(service, "ids.xml", "//a/id(@ref)/name", 1);
Resource r = result.getResource(0);
assertEquals("<name>one</name>", r.getContent().toString());
result = queryResource(service, "ids.xml", "//d/id(@ref)/name", 1);
r = result.getResource(0);
assertEquals("<name>two</name>", r.getContent().toString());
String update = "update insert <t xml:id=\"id3\">Hello</t> into /test";
queryResource(service, "ids.xml", update, 0);
queryResource(service, "ids.xml", "id('id3')", 1);
update = "update value //t/@xml:id with 'id4'";
queryResource(service, "ids.xml", update, 0);
queryResource(service, "ids.xml", "id('id4')", 1);
} catch (XMLDBException e) {
System.out.println("testIds(): XMLDBException: "+e);
e.printStackTrace();
fail(e.getMessage());
}
}
public void testExternalVars() {
try {
XQueryService service = (XQueryService)
storeXMLStringAndGetQueryService("strings.xml", strings);
String query =
"declare variable $local:string external;" +
"/test/string[. = $local:string]";
CompiledExpression expr = service.compile(query);
service.declareVariable("local:string", "Hello");
ResourceSet result = service.execute(expr);
XMLResource r = (XMLResource) result.getResource(0);
Node node = r.getContentAsDOM();
if (node.getNodeType() == Node.DOCUMENT_NODE)
node = node.getFirstChild();
assertEquals("string", node.getNodeName());
} catch (XMLDBException e) {
System.out.println("testExternalVars(): XMLDBException");
e.printStackTrace();
fail(e.getMessage());
}
}
public void testQueryResource() {
try {
XMLResource doc =
(XMLResource) testCollection.createResource(
"strings.xml", "XMLResource" );
doc.setContent(strings);
testCollection.storeResource(doc);
doc =
(XMLResource) testCollection.createResource(
"strings2.xml", "XMLResource" );
doc.setContent(strings);
testCollection.storeResource(doc);
XPathQueryService query =
(XPathQueryService) testCollection.getService(
"XPathQueryService",
"1.0");
ResourceSet result = query.queryResource("strings2.xml", "/test/string[. = 'Hello World!']");
assertEquals(1, result.getSize());
result = query.query("/test/string[. = 'Hello World!']");
assertEquals(2, result.getSize());
} catch (XMLDBException e) {
System.out.println("testQueryResource(): XMLDBException: "+e);
fail(e.getMessage());
}
}
/** test involving ancestor::
* >>>>>>> currently this produces variable corruption :
* The result is the ancestor <<<<<<<<<< */
public void testAncestor() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers );
query =
"let $all_items := /test/item " +
"(: Note: variable non used but computed anyway :)" +
"let $unused_variable :=" +
" for $one_item in $all_items " +
" / ancestor::* (: <<<<< if you remove this line all is normal :)" +
" return 'foo'" +
"return $all_items";
result = service.queryResource("numbers.xml", query );
assertEquals(4, result.getSize());
} catch (XMLDBException e) {
System.out.println("testAncestor(): XMLDBException: "+e);
fail(e.getMessage());
}
}
/** Helper that performs an XQuery and does JUnit assertion on result size.
* @see #queryResource(XQueryService, String, String, int, String)
*/
private ResourceSet queryResource(XQueryService service, String resource,
String query, int expected) throws XMLDBException {
return queryResource(service, resource, query, expected, null);
}
/** Helper that performs an XQuery and does JUnit assertion on result size.
* @param service XQuery service
* @param resource database resource (collection) to query
* @param query
* @param expected size of result
* @param message for JUnit
* @return a ResourceSet, allowing to do more assertions if necessary.
* @throws XMLDBException
*/
private ResourceSet queryResource(XQueryService service, String resource,
String query, int expected, String message) throws XMLDBException {
ResourceSet result = service.queryResource(resource, query);
if(message == null)
assertEquals(query, expected, result.getSize());
else
assertEquals(message, expected, result.getSize());
return result;
}
/** For queries without associated data */
private ResourceSet queryAndAssert(XQueryService service, String query,
int expected, String message) throws XMLDBException {
ResourceSet result = service.query(query);
if(message == null)
assertEquals(expected, result.getSize());
else
assertEquals(message, expected, result.getSize());
return result;
}
/** For queries without associated data */
private XQueryService getQueryService() throws XMLDBException {
XQueryService service = (XQueryService) testCollection.getService(
"XPathQueryService", "1.0");
return service;
}
/** stores XML String and get Query Service
* @param documentName to be stored in the DB
* @param content to be stored in the DB
* @return the XQuery Service
* @throws XMLDBException
*/
private XQueryService storeXMLStringAndGetQueryService(String documentName,
String content) throws XMLDBException {
XMLResource doc =
(XMLResource) testCollection.createResource(
documentName, "XMLResource" );
doc.setContent(content);
testCollection.storeResource(doc);
XQueryService service =
(XQueryService) testCollection.getService(
"XPathQueryService",
"1.0");
return service;
}
public void testNamespaces() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("namespaces.xml", namespaces);
service.setNamespace("t", "http://www.foo.com");
service.setNamespace("c", "http://www.other.com");
ResourceSet result =
service.queryResource("namespaces.xml", "//t:section");
assertEquals(1, result.getSize());
result =
service.queryResource("namespaces.xml", "/t:test//c:comment");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "//c:*");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "//*:comment");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "namespace-uri(//t:test)");
assertEquals(1, result.getSize());
assertEquals("http://www.foo.com", result.getResource(0).getContent());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testPreserveSpace() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("whitespace.xml", ws);
ResourceSet result =
service.queryResource("whitespace.xml", "//text");
assertEquals(2, result.getSize());
String item = result.getResource(0).getContent().toString();
assertXMLEqual("<text> </text>", item);
item = result.getResource(1).getContent().toString();
assertXMLEqual("<text xml:space=\"default\"/>", item);
} catch (Exception e) {
fail(e.getMessage());
}
}
public void testNestedElements() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested.xml", nested);
ResourceSet result = service.queryResource("nested.xml", "//c");
printResult(result);
assertEquals( 3, result.getSize() );
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testStaticVariables() {
ResourceSet result = null;
try {
XMLResource doc =
(XMLResource) testCollection.createResource(
"numbers.xml", "XMLResource" );
doc.setContent(numbers);
testCollection.storeResource(doc);
XPathQueryService service =
(XPathQueryService) testCollection.getService(
"XPathQueryService",
"1.0");
XPathQueryServiceImpl service2 = (XPathQueryServiceImpl) service;
service2.declareVariable("name", "MONTAGUE");
service2.declareVariable("name", "43");
//ResourceSet result = service.query("//SPEECH[SPEAKER=$name]");
result = service2.query( doc, "//item[stock=$name]");
System.out.println( "testStaticVariables 1: ========" ); printResult(result);
result = service2.query("$name");
assertEquals( 1, result.getSize() );
System.out.println("testStaticVariables 2: ========" ); printResult(result);
result = service2.query( doc, "//item[stock=43]");
assertEquals( 1, result.getSize() );
System.out.println("testStaticVariables 3: ========" ); printResult(result);
result = service2.query( doc, "//item");
assertEquals( 4, result.getSize() );
// assertEquals( 10, result.getSize() );
} catch (XMLDBException e) {
System.out.println("testStaticVariables(): XMLDBException: "+e);
fail(e.getMessage());
}
}
/**
* @param result
* @throws XMLDBException
*/
private void printResult(ResourceSet result) throws XMLDBException {
for (ResourceIterator i = result.getIterator();
i.hasMoreResources();
) {
Resource r = i.nextResource();
System.out.println(r.getContent());
}
}
public void testMembersAsResource() {
try {
// XPathQueryService service =
// (XPathQueryService) testCollection.getService(
// "XPathQueryService",
// "1.0");
// ResourceSet result = service.query("//SPEECH[LINE &= 'marriage']");
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
ResourceSet result = service.query("//item/price");
Resource r = result.getMembersAsResource();
String content = (String)r.getContent();
System.out.println(content);
Pattern p = Pattern.compile( ".*(<price>.*){4}", Pattern.DOTALL);
Matcher m = p.matcher(content);
assertTrue( "get whole document numbers.xml", m.matches() );
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testSatisfies() {
try {
XQueryService service = getQueryService();
ResourceSet result;
result = queryAndAssert( service,
"every $foo in (1,2,3) satisfies" +
" let $bar := 'baz'" +
" return false() ",
1, "" );
assertEquals( "satisfies + FLWR expression allways false 1", "false", result.getResource(0).getContent() );
result = queryAndAssert( service,
"declare function local:foo() { false() };" +
" every $bar in (1,2,3) satisfies" +
" local:foo()",
1, "" );
assertEquals( "satisfies + FLWR expression allways false 2", "false", result.getResource(0).getContent() );
query = "every $x in () satisfies false()";
result = queryAndAssert( service, query, 1, "" );
assertEquals( query, "true", result.getResource(0).getContent() );
query = "some $x in () satisfies true()";
result = queryAndAssert( service, query, 1, "" );
assertEquals( query, "false", result.getResource(0).getContent() );
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testIntersect() {
try {
XQueryService service = getQueryService();
ResourceSet result;
query = "() intersect ()";
result = queryAndAssert( service, query, 0, "");
query = "() intersect (1)";
result = queryAndAssert( service, query, 0, "");
query = "(1) intersect ()";
result = queryAndAssert( service, query, 0, "");
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testUnion() {
try {
XQueryService service = getQueryService();
ResourceSet result;
query = "() union ()";
result = queryAndAssert( service, query, 0, "");
boolean exceptionThrown = false;
String message = "";
try {
query = "() union (1)";
result = queryAndAssert( service, query, 0, "");
} catch (XMLDBException e) {
exceptionThrown = true;
message = e.getMessage();
}
assertTrue(message.indexOf("XPTY0004") > -1);
exceptionThrown = false;
message = "";
try {
query = "(1) union ()";
result = queryAndAssert( service, query, 0, "");
} catch (XMLDBException e) {
exceptionThrown = true;
message = e.getMessage();
}
assertTrue(message.indexOf("XPTY0004") > -1);
query = "<a/> union ()";
result = queryAndAssert( service, query, 1, "");
query = "() union <a/>";
result = queryAndAssert( service, query, 1, "");
//Not the same nodes
query = "<a/> union <a/>";
result = queryAndAssert( service, query, 2, "");
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testExcept() {
try {
XQueryService service = getQueryService();
query = "() except ()";
queryAndAssert( service, query, 0, "");
query = "() except (1)";
queryAndAssert( service, query, 0, "");
String message = "";
try {
query = "(1) except ()";
queryAndAssert( service, query, 0, "");
} catch (XMLDBException e) {
message = e.getMessage();
}
assertTrue(message.indexOf("XPTY0004") > -1);
query = "<a/> except ()";
queryAndAssert( service, query, 1, "");
query = "() except <a/>";
queryAndAssert( service, query, 0, "");
//Not the same nodes
query = "<a/> except <a/>";
queryAndAssert( service, query, 1, "");
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testConvertToBoolean() throws XMLDBException {
XQueryService service = getQueryService();
ResourceSet result;
try {
result = queryAndAssert(
service,
"let $doc := <element attribute=''/>" + "return ("
+ " <true>{boolean(($doc,2,3))}</true> ,"
+ " <true>{boolean(($doc/@*,2,3))}</true> ,"
+ " <true>{boolean(true())}</true> ,"
+ " <true>{boolean('test')}</true> ,"
+ " <true>{boolean(number(1))}</true> ,"
+ " <false>{boolean((0))}</false> ,"
+ " <false>{boolean(false())}</false> ,"
+ " <false>{boolean('')}</false> ,"
+ " <false>{boolean(number(0))}</false> ,"
+ " <false>{boolean(number('NaN'))}</false>" + ")",
10, "");
for (int i = 0; i < 5; i++) {
assertEquals("true " + (i + 1), "<true>true</true>", result
.getResource(i).getContent());
}
for (int i = 5; i < 10; i++) {
assertEquals("false " + (i + 1), "<false>false</false>", result
.getResource(i).getContent());
}
} catch (XMLDBException e) {
fail(e.getMessage());
}
boolean exceptionThrown = false;
String message = "";
try {
result = queryAndAssert(service,
"let $doc := <element attribute=''/>"
+ " return boolean( (1,2,$doc) )", 1, "");
} catch (XMLDBException e) {
exceptionThrown = true;
message = e.getMessage();
}
assertTrue("Exception wanted: " + message, exceptionThrown);
}
public void testCompile() throws XMLDBException {
String invalidQuery = "for $i in (1 to 10)\n return $b";
String validQuery = "for $i in (1 to 10) return $i";
String validModule = "module namespace foo=\"urn:foo\";\n" +
"declare function foo:test() { \"Hello World!\" };";
String invalidModule = "module namespace foo=\"urn:foo\";\n" +
"declare function foo:test() { \"Hello World! };";
org.exist.xmldb.XQueryService service = (org.exist.xmldb.XQueryService) getQueryService();
boolean exceptionOccurred = false;
try {
service.compile(invalidQuery);
} catch (XMLDBException e) {
assertEquals(((XPathException)e.getCause()).getLine(), 2);
exceptionOccurred = true;
}
assertTrue("Expected an exception", exceptionOccurred);
exceptionOccurred = false;
try {
service.compileAndCheck(invalidModule);
} catch (XPathException e) {
exceptionOccurred = true;
}
assertTrue("Expected an exception", exceptionOccurred);
try {
service.compile(validQuery);
service.compile(validModule);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public static void main(String[] args) {
junit.textui.TestRunner.run(XPathQueryTest.class);
}
} | src/org/exist/xquery/test/XPathQueryTest.java | package org.exist.xquery.test;
import java.net.BindException;
import java.util.Iterator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.transform.OutputKeys;
import org.custommonkey.xmlunit.XMLTestCase;
import org.exist.StandaloneServer;
import org.exist.storage.DBBroker;
import org.exist.xmldb.CollectionImpl;
import org.exist.xmldb.DatabaseInstanceManager;
import org.exist.xmldb.XPathQueryServiceImpl;
import org.exist.xquery.XPathException;
import org.mortbay.util.MultiException;
import org.w3c.dom.Node;
import org.xmldb.api.DatabaseManager;
import org.xmldb.api.base.Collection;
import org.xmldb.api.base.CompiledExpression;
import org.xmldb.api.base.Database;
import org.xmldb.api.base.Resource;
import org.xmldb.api.base.ResourceIterator;
import org.xmldb.api.base.ResourceSet;
import org.xmldb.api.base.XMLDBException;
import org.xmldb.api.modules.CollectionManagementService;
import org.xmldb.api.modules.XMLResource;
import org.xmldb.api.modules.XPathQueryService;
import org.xmldb.api.modules.XQueryService;
public class XPathQueryTest extends XMLTestCase {
private final static String nested =
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<test><c></c><b><c><b></b></c></b><b></b><c></c></test>";
private final static String numbers =
"<test>"
+ "<item id='1' type='alphanum'><price>5.6</price><stock>22</stock></item>"
+ "<item id='2'><price>7.4</price><stock>43</stock></item>"
+ "<item id='3'><price>18.4</price><stock>5</stock></item>"
+ "<item id='4'><price>65.54</price><stock>16</stock></item>"
+ "</test>";
private final static String numbers2 =
"<test xmlns=\"http://numbers.org\">"
+ "<item id='1' type='alphanum'><price>5.6</price><stock>22</stock></item>"
+ "<item id='2'><price>7.4</price><stock>43</stock></item>"
+ "<item id='3'><price>18.4</price><stock>5</stock></item>"
+ "<item id='4'><price>65.54</price><stock>16</stock></item>"
+ "</test>";
private final static String namespaces =
"<test xmlns='http://www.foo.com'>"
+ " <section>"
+ " <title>Test Document</title>"
+ " <c:comment xmlns:c='http://www.other.com'>This is my comment</c:comment>"
+ " </section>"
+ "</test>";
private final static String strings =
"<test>"
+ "<string>Hello World!</string>"
+ "<string value='Hello World!'/>"
+ "<string>Hello</string>"
+ "</test>";
private final static String nested2 =
"<RootElement>" +
"<ChildA>" +
"<ChildB id=\"2\"/>" +
"</ChildA>" +
"</RootElement>";
private final static String nested3 =
"<test>" +
" <a>" +
" <t>1</t>" +
" <a>" +
" <t>2</t>" +
" <a>" +
" <t>3</t>" +
" </a>" +
" </a>" +
" </a>" +
"</test>";
private final static String siblings =
"<test>" +
" <a> <s>A</s> <n>1</n> </a>" +
" <a> <s>Z</s> <n>2</n> </a>" +
" <a> <s>B</s> <n>3</n> </a>" +
" <a> <s>Z</s> <n>4</n> </a>" +
" <a> <s>C</s> <n>5</n> </a>" +
" <a> <s>Z</s> <n>6</n> </a>" +
"</test>";
private final static String ids =
"<!DOCTYPE test [" +
"<!ELEMENT test (a | b | c | d)*>" +
"<!ATTLIST test xml:space CDATA #IMPLIED>" +
"<!ELEMENT a EMPTY>" +
"<!ELEMENT b (name)>" +
"<!ELEMENT c (name)>" +
"<!ELEMENT d EMPTY>" +
"<!ATTLIST d ref IDREF #IMPLIED>" +
"<!ELEMENT name (#PCDATA)>" +
"<!ATTLIST a ref IDREF #IMPLIED>" +
"<!ATTLIST b id ID #IMPLIED>" +
"<!ATTLIST c xml:id ID #IMPLIED>]>" +
"<test xml:space=\"preserve\">" +
"<a ref=\"id1\"/>" +
"<a ref=\"id1\"/>" +
"<d ref=\"id2\"/>" +
"<b id=\"id1\"><name>one</name></b>" +
"<c xml:id=\" id2 \"><name>two</name></c>" +
"</test>";
private final static String quotes =
"<test><title>"Hello"</title></test>";
private final static String ws =
"<test><parent xml:space=\"preserve\"><text> </text><text xml:space=\"default\"> </text></parent></test>";
private final static String self =
"<test-self><a>Hello</a><b>World!</b></test-self>";
private static String uri = "xmldb:exist://" + DBBroker.ROOT_COLLECTION;
public static void setURI(String collectionURI) {
uri = collectionURI;
}
private static StandaloneServer server = null;
private Collection testCollection;
private String query;
protected void setUp() {
if (uri.startsWith("xmldb:exist://localhost"))
initServer();
try {
// initialize driver
Class cl = Class.forName("org.exist.xmldb.DatabaseImpl");
Database database = (Database) cl.newInstance();
database.setProperty("create-database", "true");
DatabaseManager.registerDatabase(database);
Collection root =
DatabaseManager.getCollection(
uri,
"admin",
null);
CollectionManagementService service =
(CollectionManagementService) root.getService(
"CollectionManagementService",
"1.0");
testCollection = service.createCollection("test");
assertNotNull(testCollection);
} catch (ClassNotFoundException e) {
} catch (InstantiationException e) {
} catch (IllegalAccessException e) {
} catch (XMLDBException e) {
e.printStackTrace();
}
}
private void initServer() {
try {
if (server == null) {
server = new StandaloneServer();
if (!server.isStarted()) {
try {
System.out.println("Starting standalone server...");
String[] args = {};
server.run(args);
while (!server.isStarted()) {
Thread.sleep(1000);
}
} catch (MultiException e) {
boolean rethrow = true;
Iterator i = e.getExceptions().iterator();
while (i.hasNext()) {
Exception e0 = (Exception)i.next();
if (e0 instanceof BindException) {
System.out.println("A server is running already !");
rethrow = false;
break;
}
}
if (rethrow) throw e;
}
}
}
} catch(Exception e) {
fail(e.getMessage());
}
}
protected void tearDown() throws Exception {
try {
if (!((CollectionImpl) testCollection).isRemoteCollection()) {
DatabaseInstanceManager dim =
(DatabaseInstanceManager) testCollection.getService(
"DatabaseInstanceManager", "1.0");
dim.shutdown();
}
testCollection = null;
System.out.println("tearDown PASSED");
} catch (XMLDBException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public void testPathExpression() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
//Invalid path expression left operand (not a node set).
String message = "";
try {
queryAndAssert(service, "('a', 'b', 'c')/position()", -1, null);
} catch (XMLDBException e) {
message = e.getMessage();
}
assertTrue("Exception wanted: " + message, message.indexOf("XPTY0019") > -1);
//Undefined context sequence
message = "";
try {
queryAndAssert(service, "for $a in (<a/>, <b/>, doh, <c/>) return $a", -1, null);
} catch (XMLDBException e) {
message = e.getMessage();
}
assertTrue("Exception wanted: " + message, message.indexOf("XPDY0002") > -1);
message = "";
try {
//"1 to 2" is resolved as a (1, 2), i.e. a sequence of *integers* which is *not* a singleton
queryAndAssert(service, "let $a := (1, 2, 3) for $b in $a[1 to 2] return $b", -1, null);
} catch (XMLDBException e) {
message = e.getMessage();
}
//No effective boolean value for such a kind of sequence !
assertTrue("Exception wanted: " + message, message.indexOf("FORG0006") >-1);
queryAndAssert(service, "let $a := ('a', 'b', 'c') return $a[2 to 2]", 1, null);
queryAndAssert(service, "let $a := ('a', 'b', 'c') return $a[(2 to 2)]", 1, null);
queryAndAssert(service, "()/position()", 0, null);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
/** test simple queries involving attributes */
public void testAttributes() {
ResourceSet result;
try {
String testDocument = "numbers.xml";
String query;
XQueryService service = storeXMLStringAndGetQueryService(
testDocument, numbers);
query = "/test/item[ @id='1' ]";
result = service.queryResource(testDocument, query);
System.out.println("testAttributes 1: ========");
printResult(result);
assertEquals("XPath: " + query, 1, result.getSize());
XMLResource resource = (XMLResource)result.getResource(0);
Node node = resource.getContentAsDOM();
if (node.getNodeType() == Node.DOCUMENT_NODE)
node = node.getFirstChild();
assertEquals("XPath: " + query, "item", node.getNodeName());
query = "/test/item [ @type='alphanum' ]";
result = service.queryResource(testDocument, query);
System.out.println("testAttributes 2: ========");
printResult(result);
assertEquals("XPath: " + query, 1, result.getSize());
} catch (XMLDBException e) {
System.out.println("testAttributes(): XMLDBException: " + e);
fail(e.getMessage());
}
}
public void testStarAxis() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
result = service.queryResource("numbers.xml", "/*/item");
System.out.println("testStarAxis 1: ========");
printResult(result);
assertEquals("XPath: /*/item", 4, result.getSize());
result = service.queryResource("numbers.xml", "/test/*");
System.out.println("testStarAxis 2: ========");
printResult(result);
assertEquals("XPath: /test/*", 4, result.getSize());
result = service.queryResource("numbers.xml", "/test/descendant-or-self::*");
System.out.println("testStarAxis 3: ========");
printResult(result);
assertEquals( "XPath: /test/descendant-or-self::*", 13, result.getSize());
result = service.queryResource("numbers.xml", "/*/*");
System.out.println("testStarAxis 4: ========" );
printResult(result);
//Strange !!! Should be 8
assertEquals("XPath: /*/*", 4, result.getSize());
} catch (XMLDBException e) {
System.out.println("testStarAxis(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testStarAxisConstraints() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("namespaces.xml", namespaces);
service.setNamespace("t", "http://www.foo.com");
query = "// t:title/text() [ . != 'aaaa' ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxis2 : ========" ); printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
result = service.queryResource("namespaces.xml", "/t:test/*:section[. &= 'comment']");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "/t:test/t:*[. &= 'comment']");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "/t:test/t:section[. &= 'comment']");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "/t:test/t:section/*[. &= 'comment']");
assertEquals("", 1, result.getSize());
query = "/ * / * [ t:title ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxis2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
query = "/ t:test / t:section [ t:title ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxis2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
query = "/ t:test / t:section";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxis2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
} catch (XMLDBException e) {
System.out.println("testStarAxis(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testStarAxisConstraints2() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("namespaces.xml", namespaces);
service.setNamespace("t", "http://www.foo.com");
query = "/ * [ ./ * / t:title ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxisConstraints2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
query = "/ * [ * / t:title ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxisConstraints2 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
} catch (XMLDBException e) {
//org.xmldb.api.base.XMLDBException: Internal evaluation error: context node is missing for node 3 !
System.out.println("testStarAxisConstraints2(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testStarAxisConstraints3() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("namespaces.xml", namespaces);
service.setNamespace("t", "http://www.foo.com");
query = "// * [ . = 'Test Document' ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxisConstraints3 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
query = "// * [ . &= 'Test Document' ]";
result = service.queryResource( "namespaces.xml", query );
System.out.println("testStarAxisConstraints3 : ========" );
printResult(result);
assertEquals( "XPath: "+query, 1, result.getSize() );
} catch (XMLDBException e) {
//org.xmldb.api.base.XMLDBException: Internal evaluation error: context node is missing !
System.out.println("testStarAxisConstraints3(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testRoot() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested2.xml", nested2);
String query = "let $doc := <a><b/></a> return root($doc)";
ResourceSet result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 1, result.getSize());
XMLResource resource = (XMLResource)result.getResource(0);
assertEquals("XPath: " + query, "a", resource.getContentAsDOM().getFirstChild().getLocalName());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testParentAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested2.xml", nested2);
queryResource(service, "nested2.xml", "(<a/>, <b/>, <c/>)/parent::*", 0);
queryResource(service, "nested2.xml", "/RootElement//ChildB/parent::*", 1);
queryResource(service, "nested2.xml", "/RootElement//ChildB/parent::*/ChildB", 1);
queryResource(service, "nested2.xml", "/RootElement/ChildA/parent::*/ChildA/ChildB", 1);
service =
storeXMLStringAndGetQueryService("numbers.xml", numbers2);
service.setNamespace("n", "http://numbers.org");
queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::*[@id = '3']", 1);
queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::n:item[@id = '3']", 1);
queryResource(service, "numbers.xml", "//n:price/parent::n:item[@id = '3']", 1);
ResourceSet result =
queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::n:*/string(@id)", 1);
assertEquals(result.getResource(0).getContent().toString(), "3");
result = queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::*:item/string(@id)", 1);
assertEquals(result.getResource(0).getContent().toString(), "3");
result = queryResource(service, "numbers.xml", "//n:price[. = 18.4]/../string(@id)", 1);
assertEquals(result.getResource(0).getContent().toString(), "3");
result = queryResource(service, "numbers.xml", "//n:price[. = 18.4]/parent::n:item/string(@id)", 1);
assertEquals(result.getResource(0).getContent().toString(), "3");
queryResource(service, "numbers.xml",
"for $price in //n:price where $price/parent::*[@id = '3']/n:stock = '5' return $price", 1);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testParentSelfAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested2.xml", nested2);
queryResource(service, "nested2.xml", "/RootElement/descendant::*/parent::ChildA", 1);
queryResource(service, "nested2.xml", "/RootElement/descendant::*[self::ChildB]/parent::RootElement", 0);
queryResource(service, "nested2.xml", "/RootElement/descendant::*[self::ChildA]/parent::RootElement", 1);
queryResource(service, "nested2.xml", "let $a := ('', 'b', '', '') for $b in $a[.] return <blah>{$b}</blah>", 1);
String query = "let $doc := <root><page><a>a</a><b>b</b></page></root>" +
"return " +
"for $element in $doc/page/* " +
"return " +
"if($element[self::a] or $element[self::b]) then (<found/>) else (<notfound/>)";
ResourceSet result = service.queryResource("numbers.xml", query);
assertEquals(2, result.getSize());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testSelfAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("self.xml", self);
queryResource(service, "self.xml", "/test-self/*[not(self::a)]", 1);
queryResource(service, "self.xml", "/test-self/*[self::a]", 1);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testAncestorAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested3.xml", nested3);
// test ancestor axis with positional predicate
queryResource(service, "nested3.xml", "//a[ancestor::a[2]/t = '1']", 1);
queryResource(service, "nested3.xml", "//a[ancestor::*[2]/t = '1']", 1);
queryResource(service, "nested3.xml", "//a[ancestor::a[1]/t = '2']", 1);
queryResource(service, "nested3.xml", "//a[ancestor::*[1]/t = '2']", 1);
queryResource(service, "nested3.xml", "//a[ancestor-or-self::*[3]/t = '1']", 1);
queryResource(service, "nested3.xml", "//a[ancestor-or-self::a[3]/t = '1']", 1);
// Following test fails
// queryResource(service, "nested3.xml", "//a[ancestor-or-self::*[2]/t = '2']", 1);
queryResource(service, "nested3.xml", "//a[ancestor-or-self::a[2]/t = '2']", 1);
queryResource(service, "nested3.xml", "//a[t = '3'][ancestor-or-self::a[3]/t = '1']", 1);
queryResource(service, "nested3.xml", "//a[t = '3'][ancestor-or-self::*[3]/t = '1']", 1);
} catch (XMLDBException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public void testAncestorIndex() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested2.xml", nested2);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[1]/self::ChildA", 1);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[2]/self::RootElement", 1);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[position() = 1]/self::ChildA", 1);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[position() = 2]/self::RootElement", 1);
queryResource(service, "nested2.xml", "//ChildB/ancestor::*[position() = 2]/self::RootElement", 1);
queryResource(service, "nested2.xml", "(<a/>, <b/>, <c/>)/ancestor::*", 0);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testPrecedingSiblingAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("siblings.xml", siblings);
service.setProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
service.setProperty(OutputKeys.INDENT, "no");
ResourceSet result = queryResource(service, "siblings.xml", "//a[preceding-sibling::*[1]/s = 'B']", 1);
assertXMLEqual("<a><s>Z</s><n>4</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[preceding-sibling::a[1]/s = 'B']", 1);
assertXMLEqual("<a><s>Z</s><n>4</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[preceding-sibling::*[2]/s = 'B']", 1);
assertXMLEqual("<a><s>C</s><n>5</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[preceding-sibling::a[2]/s = 'B']", 1);
assertXMLEqual("<a><s>C</s><n>5</n></a>", result.getResource(0).getContent().toString());
queryResource(service, "siblings.xml", "(<a/>, <b/>, <c/>)/following-sibling::*", 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public void testFollowingSiblingAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("siblings.xml", siblings);
service.setProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
service.setProperty(OutputKeys.INDENT, "no");
ResourceSet result = queryResource(service, "siblings.xml", "//a[following-sibling::*[1]/s = 'B']", 1);
assertXMLEqual("<a><s>Z</s><n>2</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[following-sibling::a[1]/s = 'B']", 1);
assertXMLEqual("<a><s>Z</s><n>2</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[following-sibling::*[2]/s = 'B']", 1);
assertXMLEqual("<a><s>A</s><n>1</n></a>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a[following-sibling::a[2]/s = 'B']", 1);
assertXMLEqual("<a><s>A</s><n>1</n></a>", result.getResource(0).getContent().toString());
queryResource(service, "siblings.xml", "(<a/>, <b/>, <c/>)/following-sibling::*", 0);
} catch (Exception e) {
fail(e.getMessage());
}
}
public void testFollowingAxis() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("siblings.xml", siblings);
service.setProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
service.setProperty(OutputKeys.INDENT, "no");
queryResource(service, "siblings.xml", "//a/s[. = 'B']/following::s", 3);
ResourceSet result = queryResource(service, "siblings.xml", "//a/s[. = 'B']/following::s[1]", 1);
assertXMLEqual("<s>Z</s>", result.getResource(0).getContent().toString());
result = queryResource(service, "siblings.xml", "//a/s[. = 'B']/following::s[2]", 1);
assertXMLEqual("<s>C</s>", result.getResource(0).getContent().toString());
} catch (Exception e) {
fail(e.getMessage());
}
}
public void testPosition() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
queryResource(service, "numbers.xml", "//item[position() = 3]", 1);
queryResource(service, "numbers.xml", "//item[position() < 3]", 2);
queryResource(service, "numbers.xml", "//item[position() <= 3]", 3);
queryResource(service, "numbers.xml", "//item[position() > 3]", 1);
queryResource(service, "numbers.xml", "//item[position() >= 3]", 2);
queryResource(service, "numbers.xml", "//item[position() eq 3]", 1);
queryResource(service, "numbers.xml", "//item[position() lt 3]", 2);
queryResource(service, "numbers.xml", "//item[position() le 3]", 3);
queryResource(service, "numbers.xml", "//item[position() gt 3]", 1);
queryResource(service, "numbers.xml", "//item[position() ge 3]", 2);
// Currently fails with error XPTY0004
queryResource(service, "numbers.xml", "//item[last() - 1]", 1);
queryResource(service, "numbers.xml", "//item[count(('a','b')) - 1]", 1);
String query = "for $a in (<a/>, <b/>, <c/>) return $a/position()";
ResourceSet result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 3, result.getSize());
XMLResource resource = (XMLResource)result.getResource(0);
assertEquals("XPath: " + query, "1", resource.getContent().toString());
resource = (XMLResource)result.getResource(1);
assertEquals("XPath: " + query, "1", resource.getContent().toString());
resource = (XMLResource)result.getResource(2);
assertEquals("XPath: " + query, "1", resource.getContent().toString());
query = "declare variable $doc { <root>" +
"<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a><a>6</a><a>7</a>" +
"</root> }; " +
"(for $x in $doc/a return $x)[position() mod 3 = 2]";
result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 2, result.getSize());
query = "declare variable $doc { <root>" +
"<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a><a>6</a><a>7</a>" +
"</root> }; " +
"for $x in $doc/a return $x[position() mod 3 = 2]";
result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 0, result.getSize());
query = "declare variable $doc { <root>" +
"<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a><a>6</a><a>7</a>" +
"</root> }; " +
"for $x in $doc/a[position() mod 3 = 2] return $x";
result = service.queryResource("numbers.xml", query);
assertEquals("XPath: " + query, 2, result.getSize());
// TODO: clarify
// query = "let $a := ('a', 'b', 'c') for $b in $a[position()] return <blah>{$b}</blah>";
// result = service.queryResource("numbers.xml", query);
// assertEquals("XPath: " + query, 3, result.getSize());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testNumbers() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
ResourceSet result = queryResource(service, "numbers.xml", "sum(/test/item/price)", 1);
assertEquals( "96.94", result.getResource(0).getContent() );
result = queryResource(service, "numbers.xml", "round(sum(/test/item/price))", 1);
assertEquals( "97", result.getResource(0).getContent() );
result = queryResource(service, "numbers.xml", "floor(sum(/test/item/stock))", 1);
assertEquals( "86", result.getResource(0).getContent());
queryResource(service, "numbers.xml", "/test/item[round(price + 3) > 60]", 1);
result = queryResource(service, "numbers.xml", "min(( 123456789123456789123456789, " +
"123456789123456789123456789123456789123456789 ))", 1);
assertEquals("minimum of big integers",
"123456789123456789123456789",
result.getResource(0).getContent() );
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testPredicates() throws Exception {
String numbers =
"<test>"
+ "<item id='1' type='alphanum'><price>5.6</price><stock>22</stock></item>"
+ "<item id='2'><price>7.4</price><stock>43</stock></item>"
+ "<item id='3'><price>18.4</price><stock>5</stock></item>"
+ "<item id='4'><price>65.54</price><stock>16</stock></item>"
+ "</test>";
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
service.setProperty(OutputKeys.INDENT, "no");
ResourceSet result = queryResource(service, "numbers.xml", "/test/item[2]/price/text()", 1);
assertEquals("7.4", result.getResource(0).getContent().toString());
result = queryResource(service, "numbers.xml", "/test/item[5]", 0);
result = queryResource(service, "numbers.xml", "/test/item[@id='4'][1]/price[1]/text()", 1);
assertEquals("65.54",
result.getResource(0).getContent().toString());
result = queryResource(service, "numbers.xml", "for $i in //item return " +
"<item>{$i/price, $i/stock}</item>", 4);
assertXMLEqual("<item><price>5.6</price><stock>22</stock></item>",
result.getResource(0).getContent().toString());
assertXMLEqual("<item><price>65.54</price><stock>16</stock></item>",
result.getResource(3).getContent().toString());
// test positional predicates
result = queryResource(service, "numbers.xml", "/test/node()[2]", 1);
assertXMLEqual("<item id='2'><price>7.4</price><stock>43</stock></item>",
result.getResource(0).getContent().toString());
result = queryResource(service, "numbers.xml", "/test/element()[2]", 1);
assertXMLEqual("<item id='2'><price>7.4</price><stock>43</stock></item>",
result.getResource(0).getContent().toString());
// positional predicate on sequence of atomic values
result = queryResource(service, "numbers.xml", "('test', 'pass')[2]", 1);
assertEquals(result.getResource(0).getContent().toString(), "pass");
result = queryResource(service, "numbers.xml", "let $credentials := ('test', 'pass') let $user := $credentials[1] return $user", 1);
assertEquals(result.getResource(0).getContent().toString(), "test");
result = queryResource(service, "numbers.xml", "let $credentials := ('test', 'pass') let $user := $credentials[2] return $user", 1);
assertEquals(result.getResource(0).getContent().toString(), "pass");
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testPredicates2() throws Exception {
String numbers =
"<test>"
+ "<item id='1' type='alphanum'><price>5.6</price><stock>22</stock></item>"
+ "<item id='2'><price>7.4</price><stock>43</stock></item>"
+ "<item id='3'><price>18.4</price><stock>5</stock></item>"
+ "<item id='4'><price>65.54</price><stock>16</stock></item>"
+ "</test>";
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
service.setProperty(OutputKeys.INDENT, "no");
String query = "let $t := <test>" + "<a> <s>A</s> 1 </a>"
+ "<a> <s>Z</s> 2 </a>" + "<a> <s>B</s> 3 </a>"
+ "<a> <s>Z</s> 4 </a>" + "<a> <s>C</s> 5 </a>"
+ "<a> <s>Z</s> 6 </a>" + "</test>"
+ "return $t//a[s='Z' and preceding-sibling::*[1]/s='B']";
ResourceSet result = queryResource(service, "numbers.xml", query, 1);
assertXMLEqual("<a><s>Z</s> 4 </a>", result.getResource(0)
.getContent().toString());
query = "let $t := <test>" + "<a> <s>A</s> 1 </a>"
+ "<a> <s>Z</s> 2 </a>" + "<a> <s>B</s> 3 </a>"
+ "<a> <s>Z</s> 4 </a>" + "<a> <s>C</s> 5 </a>"
+ "<a> <s>Z</s> 6 </a>" + "</test>"
+ "return $t//a[s='Z' and ./preceding-sibling::*[1]/s='B']";
result = queryResource(service, "numbers.xml", query, 1);
assertXMLEqual("<a><s>Z</s> 4 </a>", result.getResource(0)
.getContent().toString());
query = "let $doc := <doc><rec n='1'><a>first</a><b>second</b></rec>" +
"<rec n='2'><a>first</a><b>third</b></rec></doc> " +
"return $doc//rec[fn:not(b = 'second') and (./a = 'first')]";
result = queryResource(service, "numbers.xml", query, 1);
assertXMLEqual("<rec n=\"2\"><a>first</a><b>third</b></rec>", result.getResource(0)
.getContent().toString());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testStrings() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("strings.xml", strings);
ResourceSet result = queryResource(service, "strings.xml", "substring(/test/string[1], 1, 5)", 1);
assertEquals( "Hello", result.getResource(0).getContent() );
queryResource(service, "strings.xml", "/test/string[starts-with(string(.), 'Hello')]", 2);
result = queryResource(service, "strings.xml", "count(/test/item/price)", 1,
"Query should return an empty set (wrong document)");
assertEquals("0", result.getResource(0).getContent());
} catch (XMLDBException e) {
System.out.println("testStrings(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testQuotes() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("quotes.xml", quotes);
// ResourceSet result =
queryResource(service, "quotes.xml", "/test[title = '"Hello"']", 1);
service.declareVariable("content", ""Hello"");
// result =
queryResource(service, "quotes.xml", "/test[title = $content]", 1);
} catch (XMLDBException e) {
System.out.println("testQuotes(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testBoolean() {
try {
System.out.println("Testing effective boolean value of expressions ...");
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
ResourceSet result = queryResource(service, "numbers.xml", "boolean(1.0)", 1);
assertEquals("boolean value of 1.0 should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(0.0)", 1);
assertEquals("boolean value of 0.0 should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:double(0.0))", 1);
assertEquals("boolean value of double 0.0 should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:double(1.0))", 1);
assertEquals("boolean value of double 1.0 should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:float(1.0))", 1);
assertEquals("boolean value of float 1.0 should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:float(0.0))", 1);
assertEquals("boolean value of float 0.0 should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:integer(0))", 1);
assertEquals("boolean value of integer 0 should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(xs:integer(1))", 1);
assertEquals("boolean value of integer 1 should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "'true' cast as xs:boolean", 1);
assertEquals("boolean value of 'true' cast to xs:boolean should be true",
"true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "'false' cast as xs:boolean", 1);
assertEquals("boolean value of 'false' cast to xs:boolean should be false",
"false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean('Hello')", 1);
assertEquals("boolean value of string 'Hello' should be true", "true", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean('')", 1);
assertEquals("boolean value of empty string should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(())", 1);
assertEquals("boolean value of empty sequence should be false", "false", result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(('Hello'))", 1);
assertEquals("boolean value of sequence with non-empty string should be true",
"true", result.getResource(0).getContent());
// result = queryResource(service, "numbers.xml", "boolean((0.0, 0.0))", 1);
// assertEquals("boolean value of sequence with two elements should be true", "true",
// result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(//item[@id = '1']/price)", 1);
assertEquals("boolean value of 5.6 should be true", "true",
result.getResource(0).getContent());
result = queryResource(service, "numbers.xml", "boolean(current-time())", 1);
assertEquals("boolean value of current-time() should be true", "true",
result.getResource(0).getContent());
} catch (XMLDBException e) {
System.out.println("testBoolean(): XMLDBException: "+e);
fail(e.getMessage());
}
}
public void testNot() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("strings.xml", strings);
queryResource(service, "strings.xml", "/test/string[not(@value)]", 2);
ResourceSet result = queryResource(service, "strings.xml", "not(/test/abcd)", 1);
Resource r = result.getResource(0);
assertEquals("true", r.getContent().toString());
result = queryResource(service, "strings.xml", "not(/test)", 1);
r = result.getResource(0);
assertEquals("false", r.getContent().toString());
result = queryResource(service, "strings.xml", "/test/string[not(@id)]", 3);
r = result.getResource(0);
assertEquals("<string>Hello World!</string>", r.getContent().toString());
// test with non-existing items
queryResource( service, "strings.xml", "document()/blah[not(blah)]", 0);
queryResource(service, "strings.xml", "//*[string][not(@value)]", 1);
queryResource(service, "strings.xml", "//*[string][not(@blah)]", 1);
queryResource(service, "strings.xml", "//*[blah][not(@blah)]", 0);
} catch (XMLDBException e) {
System.out.println("testStrings(): XMLDBException: "+e);
e.printStackTrace();
fail(e.getMessage());
}
}
public void testIds() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("ids.xml", ids);
queryResource(service, "ids.xml", "//a/id(@ref)", 1);
queryResource(service, "ids.xml", "id(//a/@ref)", 1);
ResourceSet result = queryResource(service, "ids.xml", "//a/id(@ref)/name", 1);
Resource r = result.getResource(0);
assertEquals("<name>one</name>", r.getContent().toString());
result = queryResource(service, "ids.xml", "//d/id(@ref)/name", 1);
r = result.getResource(0);
assertEquals("<name>two</name>", r.getContent().toString());
String update = "update insert <t xml:id=\"id3\">Hello</t> into /test";
queryResource(service, "ids.xml", update, 0);
queryResource(service, "ids.xml", "id('id3')", 1);
update = "update value //t/@xml:id with 'id4'";
queryResource(service, "ids.xml", update, 0);
queryResource(service, "ids.xml", "id('id4')", 1);
} catch (XMLDBException e) {
System.out.println("testIds(): XMLDBException: "+e);
e.printStackTrace();
fail(e.getMessage());
}
}
public void testExternalVars() {
try {
XQueryService service = (XQueryService)
storeXMLStringAndGetQueryService("strings.xml", strings);
String query =
"declare variable $local:string external;" +
"/test/string[. = $local:string]";
CompiledExpression expr = service.compile(query);
service.declareVariable("local:string", "Hello");
ResourceSet result = service.execute(expr);
XMLResource r = (XMLResource) result.getResource(0);
Node node = r.getContentAsDOM();
if (node.getNodeType() == Node.DOCUMENT_NODE)
node = node.getFirstChild();
assertEquals("string", node.getNodeName());
} catch (XMLDBException e) {
System.out.println("testExternalVars(): XMLDBException");
e.printStackTrace();
fail(e.getMessage());
}
}
public void testQueryResource() {
try {
XMLResource doc =
(XMLResource) testCollection.createResource(
"strings.xml", "XMLResource" );
doc.setContent(strings);
testCollection.storeResource(doc);
doc =
(XMLResource) testCollection.createResource(
"strings2.xml", "XMLResource" );
doc.setContent(strings);
testCollection.storeResource(doc);
XPathQueryService query =
(XPathQueryService) testCollection.getService(
"XPathQueryService",
"1.0");
ResourceSet result = query.queryResource("strings2.xml", "/test/string[. = 'Hello World!']");
assertEquals(1, result.getSize());
result = query.query("/test/string[. = 'Hello World!']");
assertEquals(2, result.getSize());
} catch (XMLDBException e) {
System.out.println("testQueryResource(): XMLDBException: "+e);
fail(e.getMessage());
}
}
/** test involving ancestor::
* >>>>>>> currently this produces variable corruption :
* The result is the ancestor <<<<<<<<<< */
public void testAncestor() {
ResourceSet result;
try {
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers );
query =
"let $all_items := /test/item " +
"(: Note: variable non used but computed anyway :)" +
"let $unused_variable :=" +
" for $one_item in $all_items " +
" / ancestor::* (: <<<<< if you remove this line all is normal :)" +
" return 'foo'" +
"return $all_items";
result = service.queryResource("numbers.xml", query );
assertEquals(4, result.getSize());
} catch (XMLDBException e) {
System.out.println("testAncestor(): XMLDBException: "+e);
fail(e.getMessage());
}
}
/** Helper that performs an XQuery and does JUnit assertion on result size.
* @see #queryResource(XQueryService, String, String, int, String)
*/
private ResourceSet queryResource(XQueryService service, String resource,
String query, int expected) throws XMLDBException {
return queryResource(service, resource, query, expected, null);
}
/** Helper that performs an XQuery and does JUnit assertion on result size.
* @param service XQuery service
* @param resource database resource (collection) to query
* @param query
* @param expected size of result
* @param message for JUnit
* @return a ResourceSet, allowing to do more assertions if necessary.
* @throws XMLDBException
*/
private ResourceSet queryResource(XQueryService service, String resource,
String query, int expected, String message) throws XMLDBException {
ResourceSet result = service.queryResource(resource, query);
if(message == null)
assertEquals(query, expected, result.getSize());
else
assertEquals(message, expected, result.getSize());
return result;
}
/** For queries without associated data */
private ResourceSet queryAndAssert(XQueryService service, String query,
int expected, String message) throws XMLDBException {
ResourceSet result = service.query(query);
if(message == null)
assertEquals(expected, result.getSize());
else
assertEquals(message, expected, result.getSize());
return result;
}
/** For queries without associated data */
private XQueryService getQueryService() throws XMLDBException {
XQueryService service = (XQueryService) testCollection.getService(
"XPathQueryService", "1.0");
return service;
}
/** stores XML String and get Query Service
* @param documentName to be stored in the DB
* @param content to be stored in the DB
* @return the XQuery Service
* @throws XMLDBException
*/
private XQueryService storeXMLStringAndGetQueryService(String documentName,
String content) throws XMLDBException {
XMLResource doc =
(XMLResource) testCollection.createResource(
documentName, "XMLResource" );
doc.setContent(content);
testCollection.storeResource(doc);
XQueryService service =
(XQueryService) testCollection.getService(
"XPathQueryService",
"1.0");
return service;
}
public void testNamespaces() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("namespaces.xml", namespaces);
service.setNamespace("t", "http://www.foo.com");
service.setNamespace("c", "http://www.other.com");
ResourceSet result =
service.queryResource("namespaces.xml", "//t:section");
assertEquals(1, result.getSize());
result =
service.queryResource("namespaces.xml", "/t:test//c:comment");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "//c:*");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "//*:comment");
assertEquals(1, result.getSize());
result = service.queryResource("namespaces.xml", "namespace-uri(//t:test)");
assertEquals(1, result.getSize());
assertEquals("http://www.foo.com", result.getResource(0).getContent());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testPreserveSpace() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("whitespace.xml", ws);
ResourceSet result =
service.queryResource("whitespace.xml", "//text");
assertEquals(2, result.getSize());
String item = result.getResource(0).getContent().toString();
assertXMLEqual("<text> </text>", item);
item = result.getResource(1).getContent().toString();
assertXMLEqual("<text xml:space=\"default\"/>", item);
} catch (Exception e) {
fail(e.getMessage());
}
}
public void testNestedElements() {
try {
XQueryService service =
storeXMLStringAndGetQueryService("nested.xml", nested);
ResourceSet result = service.queryResource("nested.xml", "//c");
printResult(result);
assertEquals( 3, result.getSize() );
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testStaticVariables() {
ResourceSet result = null;
try {
XMLResource doc =
(XMLResource) testCollection.createResource(
"numbers.xml", "XMLResource" );
doc.setContent(numbers);
testCollection.storeResource(doc);
XPathQueryService service =
(XPathQueryService) testCollection.getService(
"XPathQueryService",
"1.0");
XPathQueryServiceImpl service2 = (XPathQueryServiceImpl) service;
service2.declareVariable("name", "MONTAGUE");
service2.declareVariable("name", "43");
//ResourceSet result = service.query("//SPEECH[SPEAKER=$name]");
result = service2.query( doc, "//item[stock=$name]");
System.out.println( "testStaticVariables 1: ========" ); printResult(result);
result = service2.query("$name");
assertEquals( 1, result.getSize() );
System.out.println("testStaticVariables 2: ========" ); printResult(result);
result = service2.query( doc, "//item[stock=43]");
assertEquals( 1, result.getSize() );
System.out.println("testStaticVariables 3: ========" ); printResult(result);
result = service2.query( doc, "//item");
assertEquals( 4, result.getSize() );
// assertEquals( 10, result.getSize() );
} catch (XMLDBException e) {
System.out.println("testStaticVariables(): XMLDBException: "+e);
fail(e.getMessage());
}
}
/**
* @param result
* @throws XMLDBException
*/
private void printResult(ResourceSet result) throws XMLDBException {
for (ResourceIterator i = result.getIterator();
i.hasMoreResources();
) {
Resource r = i.nextResource();
System.out.println(r.getContent());
}
}
public void testMembersAsResource() {
try {
// XPathQueryService service =
// (XPathQueryService) testCollection.getService(
// "XPathQueryService",
// "1.0");
// ResourceSet result = service.query("//SPEECH[LINE &= 'marriage']");
XQueryService service =
storeXMLStringAndGetQueryService("numbers.xml", numbers);
ResourceSet result = service.query("//item/price");
Resource r = result.getMembersAsResource();
String content = (String)r.getContent();
System.out.println(content);
Pattern p = Pattern.compile( ".*(<price>.*){4}", Pattern.DOTALL);
Matcher m = p.matcher(content);
assertTrue( "get whole document numbers.xml", m.matches() );
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testSatisfies() {
try {
XQueryService service = getQueryService();
ResourceSet result;
result = queryAndAssert( service,
"every $foo in (1,2,3) satisfies" +
" let $bar := 'baz'" +
" return false() ",
1, "" );
assertEquals( "satisfies + FLWR expression allways false 1", "false", result.getResource(0).getContent() );
result = queryAndAssert( service,
"declare function local:foo() { false() };" +
" every $bar in (1,2,3) satisfies" +
" local:foo()",
1, "" );
assertEquals( "satisfies + FLWR expression allways false 2", "false", result.getResource(0).getContent() );
query = "every $x in () satisfies false()";
result = queryAndAssert( service, query, 1, "" );
assertEquals( query, "true", result.getResource(0).getContent() );
query = "some $x in () satisfies true()";
result = queryAndAssert( service, query, 1, "" );
assertEquals( query, "false", result.getResource(0).getContent() );
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testIntersect() {
try {
XQueryService service = getQueryService();
ResourceSet result;
query = "() intersect ()";
result = queryAndAssert( service, query, 0, "");
query = "() intersect (1)";
result = queryAndAssert( service, query, 0, "");
query = "(1) intersect ()";
result = queryAndAssert( service, query, 0, "");
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testUnion() {
try {
XQueryService service = getQueryService();
ResourceSet result;
query = "() union ()";
result = queryAndAssert( service, query, 0, "");
boolean exceptionThrown = false;
String message = "";
try {
query = "() union (1)";
result = queryAndAssert( service, query, 0, "");
} catch (XMLDBException e) {
exceptionThrown = true;
message = e.getMessage();
}
assertTrue(message.indexOf("XPTY0004") > -1);
exceptionThrown = false;
message = "";
try {
query = "(1) union ()";
result = queryAndAssert( service, query, 0, "");
} catch (XMLDBException e) {
exceptionThrown = true;
message = e.getMessage();
}
assertTrue(message.indexOf("XPTY0004") > -1);
query = "<a/> union ()";
result = queryAndAssert( service, query, 1, "");
query = "() union <a/>";
result = queryAndAssert( service, query, 1, "");
//Not the same nodes
query = "<a/> union <a/>";
result = queryAndAssert( service, query, 2, "");
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testExcept() {
try {
XQueryService service = getQueryService();
query = "() except ()";
queryAndAssert( service, query, 0, "");
query = "() except (1)";
queryAndAssert( service, query, 0, "");
String message = "";
try {
query = "(1) except ()";
queryAndAssert( service, query, 0, "");
} catch (XMLDBException e) {
message = e.getMessage();
}
assertTrue(message.indexOf("XPTY0004") > -1);
query = "<a/> except ()";
queryAndAssert( service, query, 1, "");
query = "() except <a/>";
queryAndAssert( service, query, 0, "");
//Not the same nodes
query = "<a/> except <a/>";
queryAndAssert( service, query, 1, "");
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public void testConvertToBoolean() throws XMLDBException {
XQueryService service = getQueryService();
ResourceSet result;
try {
result = queryAndAssert(
service,
"let $doc := <element attribute=''/>" + "return ("
+ " <true>{boolean(($doc,2,3))}</true> ,"
+ " <true>{boolean(($doc/@*,2,3))}</true> ,"
+ " <true>{boolean(true())}</true> ,"
+ " <true>{boolean('test')}</true> ,"
+ " <true>{boolean(number(1))}</true> ,"
+ " <false>{boolean((0))}</false> ,"
+ " <false>{boolean(false())}</false> ,"
+ " <false>{boolean('')}</false> ,"
+ " <false>{boolean(number(0))}</false> ,"
+ " <false>{boolean(number('NaN'))}</false>" + ")",
10, "");
for (int i = 0; i < 5; i++) {
assertEquals("true " + (i + 1), "<true>true</true>", result
.getResource(i).getContent());
}
for (int i = 5; i < 10; i++) {
assertEquals("false " + (i + 1), "<false>false</false>", result
.getResource(i).getContent());
}
} catch (XMLDBException e) {
fail(e.getMessage());
}
boolean exceptionThrown = false;
String message = "";
try {
result = queryAndAssert(service,
"let $doc := <element attribute=''/>"
+ " return boolean( (1,2,$doc) )", 1, "");
} catch (XMLDBException e) {
exceptionThrown = true;
message = e.getMessage();
}
assertTrue("Exception wanted: " + message, exceptionThrown);
}
public void testCompile() throws XMLDBException {
String invalidQuery = "for $i in (1 to 10)\n return $b";
String validQuery = "for $i in (1 to 10) return $i";
String validModule = "module namespace foo=\"urn:foo\";\n" +
"declare function foo:test() { \"Hello World!\" };";
String invalidModule = "module namespace foo=\"urn:foo\";\n" +
"declare function foo:test() { \"Hello World! };";
org.exist.xmldb.XQueryService service = (org.exist.xmldb.XQueryService) getQueryService();
boolean exceptionOccurred = false;
try {
service.compile(invalidQuery);
} catch (XMLDBException e) {
assertEquals(((XPathException)e.getCause()).getLine(), 2);
exceptionOccurred = true;
}
assertTrue("Expected an exception", exceptionOccurred);
exceptionOccurred = false;
try {
service.compileAndCheck(invalidModule);
} catch (XPathException e) {
exceptionOccurred = true;
}
assertTrue("Expected an exception", exceptionOccurred);
try {
service.compile(validQuery);
service.compile(validModule);
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
public static void main(String[] args) {
junit.textui.TestRunner.run(XPathQueryTest.class);
}
} | Added regression test for sfnet [1460610] Predicate issue: ClassCastException
svn path=/trunk/eXist-1.0/; revision=3046
| src/org/exist/xquery/test/XPathQueryTest.java | Added regression test for sfnet [1460610] Predicate issue: ClassCastException |
|
Java | lgpl-2.1 | 374ad44d60368b7f95a2841321dd8194996af760 | 0 | travisbrown/marc4j-old | // $Id$
/**
* Copyright (C) 2002 Bas Peters
*
* This file is part of MARC4J
*
* MARC4J is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* MARC4J is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MARC4J; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.marc4j.marcxml;
import java.io.*;
import java.net.URL;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
import org.xml.sax.helpers.XMLFilterImpl;
import org.xml.sax.helpers.AttributesImpl;
import org.marc4j.MarcHandler;
import org.marc4j.ErrorHandler;
import org.marc4j.MarcReader;
import org.marc4j.marc.MarcConstants;
import org.marc4j.marc.Leader;
import org.marc4j.marc.Tag;
import org.marc4j.util.CharacterConverter;
import org.marc4j.util.CharacterConverterLoader;
import org.marc4j.util.CharacterConverterLoaderException;
import org.marc4j.util.AnselToUnicode;
/**
* <p><code>MarcXmlFilter</code> is an <code>XMLFilter</code> that
* consumes <code>MarcHandler</code> events and reports events to
* a SAX2 <code>ContentHandler</code>. </p>
*
* @author <a href="mailto:[email protected]">Bas Peters</a>
* @version $Revision$
*
* @see ExtendedFilter
* @see MarcHandler
* @see ContentHandler
*/
public class MarcXmlFilter extends ExtendedFilter
implements MarcHandler {
/** Enables pretty printing */
private boolean prettyPrinting = true;
/** Empty attributes */
private static final Attributes EMPTY_ATTS =
new AttributesImpl();
/** MARC4J error handler property */
private static final String ERROR_HANDLER =
"http://marc4j.org/properties/error-handler";
/** MARC4J ansel to unicode conversion */
private static final String ANSEL_TO_UNICODE =
"http://marc4j.org/features/ansel-to-unicode";
/** MARC4J character conversion */
private static final String CHARACTER_CONVERTER =
"http://marc4j.org/properties/character-conversion";
/** MARC4J pretty printing */
private static final String PRETTY_PRINTING =
"http://marc4j.org/features/pretty-printing";
/** MARC4J document type declaration property */
private static final String DOC_TYPE_DECL =
"http://marc4j.org/properties/document-type-declaration";
/** MARC4J schema location property */
private static final String SCHEMA_LOC =
"http://marc4j.org/properties/schema-location";
/** Namespace for MARCXML */
private static final String NS_URI =
"http://www.loc.gov/MARC21/slim";
/** Namespace for W3C XML Schema instance */
private static final String NS_XSI =
"http://www.w3.org/2001/XMLSchema-instance";
/** Schema location */
private String schemaLocation = null;
/** {@link DocType} object */
private DoctypeDecl doctype = null;
/** {@link ContentHandler} object */
private ContentHandler ch;
/** {@link ErrorHandler} object */
private ErrorHandler eh;
private CharacterConverter charconv = null;
/**
* <p>Sets the object for the given property.</p>
*
* @param name the property name
* @param obj the property object
*/
public void setProperty(String name, Object obj)
throws SAXNotRecognizedException, SAXNotSupportedException {
if (DOC_TYPE_DECL.equals(name))
this.doctype = (DoctypeDecl)obj;
else if (ERROR_HANDLER.equals(name))
this.eh = (ErrorHandler)obj;
else if (SCHEMA_LOC.equals(name))
this.schemaLocation = (String)obj;
else if (CHARACTER_CONVERTER.equals(name))
this.charconv = (CharacterConverter)obj;
else
super.setProperty(name, obj);
}
/**
* <p>Sets the boolean for the feature with the given name.</p>
*
* @param name the name of the feature
* @param value the boolean value
*/
public void setFeature(String name, boolean value)
throws SAXNotRecognizedException, SAXNotSupportedException {
if (ANSEL_TO_UNICODE.equals(name))
setCharacterConverter(true);
else if (PRETTY_PRINTING.equals(name))
this.prettyPrinting = value;
else
super.setFeature(name, value);
}
/**
* <p>Sends the input source to the <code>MarcReader</code>.</p>
*
* @param input the {@link InputSource}
*/
public void parse(InputSource input) {
ch = getContentHandler();
if (ch == null) {
return;
}
try {
// Convert the InputSource into a BufferedReader.
BufferedReader br = null;
if (input.getCharacterStream() != null) {
br = new BufferedReader(input.getCharacterStream());
} else if (input.getByteStream() != null) {
br = new BufferedReader(new InputStreamReader(input.getByteStream(), "ISO8859_1"));
} else if (input.getSystemId() != null) {
java.net.URL url = new URL(input.getSystemId());
br = new BufferedReader(new InputStreamReader(url.openStream(), "ISO8859_1"));
} else {
throw new SAXException("Invalid InputSource object");
}
// Create a new MarcReader object.
MarcReader marcReader = new MarcReader();
// Register the MarcHandler implementation.
marcReader.setMarcHandler(this);
// Register the ErrorHandler implementation.
if (eh != null)
marcReader.setErrorHandler(eh);
// Send the file to the parse method.
marcReader.parse(br);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* <p>Returns the document handler being used, starts the document
* and reports the root element. </p>
*
*/
public void startCollection() {
try {
AttributesImpl atts = new AttributesImpl();
// Report start of XML document.
ch.startDocument();
// Report document type declaration
if (lh != null && doctype != null && schemaLocation == null) {
lh.startDTD(doctype.getName(),
doctype.getPublicId(),
doctype.getSystemId());
lh.endDTD();
}
// Outputting namespace declarations through the attribute object,
// since the startPrefixMapping refuses to output namespace declarations.
if (schemaLocation != null) {
atts.addAttribute("", "xsi", "xmlns:xsi", "CDATA", NS_XSI);
atts.addAttribute(NS_XSI, "schemaLocation", "xsi:schemaLocation",
"CDATA", schemaLocation);
}
// Do not output the namespace declaration for MARCXML
// together with a document type declaration
if (doctype == null)
atts.addAttribute("", "", "xmlns", "CDATA", NS_URI);
// Report start of prefix mapping for MARCXML
// OK together with Document Type Delcaration?
ch.startPrefixMapping("", NS_URI);
// Report root element
ch.startElement(NS_URI, "collection", "collection", atts);
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the starting element for a record and the leader node. </p>
*
* @param leader the leader
*/
public void startRecord(Leader leader) {
try {
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 3);
ch.startElement(NS_URI, "record", "record", EMPTY_ATTS);
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 5);
writeElement(NS_URI,"leader","leader", EMPTY_ATTS, leader.marshal());
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports a control field node (001-009).</p>
*
* @param tag the tag name
* @param data the data element
*/
public void controlField(String tag, char[] data) {
try {
AttributesImpl atts = new AttributesImpl();
atts.addAttribute("", "tag", "tag", "CDATA", tag);
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 5);
writeElement(NS_URI,"controlfield","controlfield", atts, data);
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the starting element for a data field (010-999).</p>
*
* @param tag the tag name
* @param ind1 the first indicator value
* @param ind2 the second indicator value
*/
public void startDataField(String tag, char ind1, char ind2) {
try {
AttributesImpl atts = new AttributesImpl();
atts.addAttribute("", "tag", "tag", "CDATA", tag);
atts.addAttribute("", "ind1", "ind1", "CDATA", String.valueOf(ind1));
atts.addAttribute("", "ind2", "ind2", "CDATA", String.valueOf(ind2));
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 5);
ch.startElement(NS_URI,"datafield","datafield", atts);
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports a subfield node.</p>
*
* @param code the data element identifier
* @param data the data element
*/
public void subfield(char code, char[] data) {
try {
AttributesImpl atts = new AttributesImpl();
atts.addAttribute("", "code", "code", "CDATA", String.valueOf(code));
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 7);
ch.startElement(NS_URI,"subfield","subfield", atts);
if (charconv != null) {
char[] unicodeData = charconv.convert(data);
ch.characters(unicodeData, 0, unicodeData.length);
} else {
ch.characters(data, 0, data.length);
}
ch.endElement(NS_URI,"subfield","subfield");
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the closing element for a data field.</p>
*
* @param tag the tag name
*/
public void endDataField(String tag) {
try {
ch.ignorableWhitespace("\n ".toCharArray(), 0, 5);
ch.endElement(NS_URI,"datafield","datafield");
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the closing element for a record.</p>
*
*/
public void endRecord() {
try {
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 3);
ch.endElement(NS_URI,"record","record");
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the closing element for the root, reports the end
* of the prefix mapping and the end a document. </p>
*
*/
public void endCollection() {
try {
if (prettyPrinting)
ch.ignorableWhitespace("\n".toCharArray(), 0, 1);
ch.endElement(NS_URI,"collection","collection");
ch.endPrefixMapping("");
ch.endDocument();
} catch (SAXException e) {
e.printStackTrace();
}
}
private void writeElement(String uri, String localName,
String qName, Attributes atts, String content)
throws SAXException {
writeElement(uri, localName, qName, atts, content.toCharArray());
}
private void writeElement(String uri, String localName,
String qName, Attributes atts, char content)
throws SAXException {
writeElement(uri, localName, qName, atts, String.valueOf(content).toCharArray());
}
private void writeElement(String uri, String localName,
String qName, Attributes atts, char[] content)
throws SAXException {
ch.startElement(uri, localName, qName, atts);
ch.characters(content, 0, content.length);
ch.endElement(uri, localName, qName);
}
private void setCharacterConverter(boolean convert) {
if (convert) {
try {
charconv = (CharacterConverter)CharacterConverterLoader
.createCharacterConverter("org.marc4j.charconv",
"org.marc4j.util.AnselToUnicode");
} catch (CharacterConverterLoaderException e) {
e.printStackTrace();
}
}
}
}
| src/org/marc4j/marcxml/MarcXmlFilter.java | // $Id$
/**
* Copyright (C) 2002 Bas Peters
*
* This file is part of MARC4J
*
* MARC4J is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* MARC4J is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MARC4J; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.marc4j.marcxml;
import java.io.*;
import java.net.URL;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
import org.xml.sax.helpers.XMLFilterImpl;
import org.xml.sax.helpers.AttributesImpl;
import org.marc4j.MarcHandler;
import org.marc4j.ErrorHandler;
import org.marc4j.MarcReader;
import org.marc4j.marc.MarcConstants;
import org.marc4j.marc.Leader;
import org.marc4j.marc.Tag;
import org.marc4j.util.CharacterConverter;
import org.marc4j.util.CharacterConverterLoader;
import org.marc4j.util.CharacterConverterLoaderException;
import org.marc4j.util.AnselToUnicode;
/**
* <p><code>MarcXmlFilter</code> is an <code>XMLFilter</code> that
* consumes <code>MarcHandler</code> events and reports events to
* a SAX2 <code>ContentHandler</code>. </p>
*
* @author <a href="mailto:[email protected]">Bas Peters</a>
* @version $Revision$
*
* @see ExtendedFilter
* @see MarcHandler
* @see ContentHandler
*/
public class MarcXmlFilter extends ExtendedFilter
implements MarcHandler {
/** Enables pretty printing */
private boolean prettyPrinting = true;
/** Empty attributes */
private static final Attributes EMPTY_ATTS =
new AttributesImpl();
/** MARC4J error handler property */
private static final String ERROR_HANDLER =
"http://marc4j.org/properties/error-handler";
/** MARC4J ansel to unicode conversion */
private static final String ANSEL_TO_UNICODE =
"http://marc4j.org/features/ansel-to-unicode";
/** MARC4J character conversion */
private static final String CHARACTER_CONVERTER =
"http://marc4j.org/features/charconv";
/** MARC4J pretty printing */
private static final String PRETTY_PRINTING =
"http://marc4j.org/features/pretty-printing";
/** MARC4J document type declaration property */
private static final String DOC_TYPE_DECL =
"http://marc4j.org/properties/document-type-declaration";
/** MARC4J schema location property */
private static final String SCHEMA_LOC =
"http://marc4j.org/properties/schema-location";
/** Namespace for MARCXML */
private static final String NS_URI =
"http://www.loc.gov/MARC21/slim";
/** Namespace for W3C XML Schema instance */
private static final String NS_XSI =
"http://www.w3.org/2001/XMLSchema-instance";
/** Schema location */
private String schemaLocation = null;
/** {@link DocType} object */
private DoctypeDecl doctype = null;
/** {@link ContentHandler} object */
private ContentHandler ch;
/** {@link ErrorHandler} object */
private ErrorHandler eh;
private CharacterConverter charconv = null;
/**
* <p>Sets the object for the given property.</p>
*
* @param name the property name
* @param obj the property object
*/
public void setProperty(String name, Object obj)
throws SAXNotRecognizedException, SAXNotSupportedException {
if (DOC_TYPE_DECL.equals(name))
this.doctype = (DoctypeDecl)obj;
else if (ERROR_HANDLER.equals(name))
this.eh = (ErrorHandler)obj;
else if (SCHEMA_LOC.equals(name))
this.schemaLocation = (String)obj;
else
super.setProperty(name, obj);
}
/**
* <p>Sets the boolean for the feature with the given name.</p>
*
* @param name the name of the feature
* @param value the boolean value
*/
public void setFeature(String name, boolean value)
throws SAXNotRecognizedException, SAXNotSupportedException {
if (ANSEL_TO_UNICODE.equals(name))
setCharacterConverter(true);
else if (CHARACTER_CONVERTER.equals(name))
setCharacterConverter(true);
else if (PRETTY_PRINTING.equals(name))
this.prettyPrinting = value;
else
super.setFeature(name, value);
}
/**
* <p>Sends the input source to the <code>MarcReader</code>.</p>
*
* @param input the {@link InputSource}
*/
public void parse(InputSource input) {
ch = getContentHandler();
if (ch == null) {
return;
}
try {
// Convert the InputSource into a BufferedReader.
BufferedReader br = null;
if (input.getCharacterStream() != null) {
br = new BufferedReader(input.getCharacterStream());
} else if (input.getByteStream() != null) {
br = new BufferedReader(new InputStreamReader(input.getByteStream(), "ISO8859_1"));
} else if (input.getSystemId() != null) {
java.net.URL url = new URL(input.getSystemId());
br = new BufferedReader(new InputStreamReader(url.openStream(), "ISO8859_1"));
} else {
throw new SAXException("Invalid InputSource object");
}
// Create a new MarcReader object.
MarcReader marcReader = new MarcReader();
// Register the MarcHandler implementation.
marcReader.setMarcHandler(this);
// Register the ErrorHandler implementation.
if (eh != null)
marcReader.setErrorHandler(eh);
// Send the file to the parse method.
marcReader.parse(br);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* <p>Returns the document handler being used, starts the document
* and reports the root element. </p>
*
*/
public void startCollection() {
try {
AttributesImpl atts = new AttributesImpl();
// Report start of XML document.
ch.startDocument();
// Report document type declaration
if (lh != null && doctype != null && schemaLocation == null) {
lh.startDTD(doctype.getName(),
doctype.getPublicId(),
doctype.getSystemId());
lh.endDTD();
}
// Outputting namespace declarations through the attribute object,
// since the startPrefixMapping refuses to output namespace declarations.
if (schemaLocation != null) {
atts.addAttribute("", "xsi", "xmlns:xsi", "CDATA", NS_XSI);
atts.addAttribute(NS_XSI, "schemaLocation", "xsi:schemaLocation",
"CDATA", schemaLocation);
}
// Do not output the namespace declaration for MARCXML
// together with a document type declaration
if (doctype == null)
atts.addAttribute("", "", "xmlns", "CDATA", NS_URI);
// Report start of prefix mapping for MARCXML
// OK together with Document Type Delcaration?
ch.startPrefixMapping("", NS_URI);
// Report root element
ch.startElement(NS_URI, "collection", "collection", atts);
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the starting element for a record and the leader node. </p>
*
* @param leader the leader
*/
public void startRecord(Leader leader) {
try {
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 3);
ch.startElement(NS_URI, "record", "record", EMPTY_ATTS);
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 5);
writeElement(NS_URI,"leader","leader", EMPTY_ATTS, leader.marshal());
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports a control field node (001-009).</p>
*
* @param tag the tag name
* @param data the data element
*/
public void controlField(String tag, char[] data) {
try {
AttributesImpl atts = new AttributesImpl();
atts.addAttribute("", "tag", "tag", "CDATA", tag);
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 5);
writeElement(NS_URI,"controlfield","controlfield", atts, data);
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the starting element for a data field (010-999).</p>
*
* @param tag the tag name
* @param ind1 the first indicator value
* @param ind2 the second indicator value
*/
public void startDataField(String tag, char ind1, char ind2) {
try {
AttributesImpl atts = new AttributesImpl();
atts.addAttribute("", "tag", "tag", "CDATA", tag);
atts.addAttribute("", "ind1", "ind1", "CDATA", String.valueOf(ind1));
atts.addAttribute("", "ind2", "ind2", "CDATA", String.valueOf(ind2));
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 5);
ch.startElement(NS_URI,"datafield","datafield", atts);
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports a subfield node.</p>
*
* @param code the data element identifier
* @param data the data element
*/
public void subfield(char code, char[] data) {
try {
AttributesImpl atts = new AttributesImpl();
atts.addAttribute("", "code", "code", "CDATA", String.valueOf(code));
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 7);
ch.startElement(NS_URI,"subfield","subfield", atts);
if (charconv != null) {
char[] unicodeData = charconv.convert(data);
ch.characters(unicodeData, 0, unicodeData.length);
} else {
ch.characters(data, 0, data.length);
}
ch.endElement(NS_URI,"subfield","subfield");
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the closing element for a data field.</p>
*
* @param tag the tag name
*/
public void endDataField(String tag) {
try {
ch.ignorableWhitespace("\n ".toCharArray(), 0, 5);
ch.endElement(NS_URI,"datafield","datafield");
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the closing element for a record.</p>
*
*/
public void endRecord() {
try {
if (prettyPrinting)
ch.ignorableWhitespace("\n ".toCharArray(), 0, 3);
ch.endElement(NS_URI,"record","record");
} catch (SAXException se) {
se.printStackTrace();
}
}
/**
* <p>Reports the closing element for the root, reports the end
* of the prefix mapping and the end a document. </p>
*
*/
public void endCollection() {
try {
if (prettyPrinting)
ch.ignorableWhitespace("\n".toCharArray(), 0, 1);
ch.endElement(NS_URI,"collection","collection");
ch.endPrefixMapping("");
ch.endDocument();
} catch (SAXException e) {
e.printStackTrace();
}
}
private void writeElement(String uri, String localName,
String qName, Attributes atts, String content)
throws SAXException {
writeElement(uri, localName, qName, atts, content.toCharArray());
}
private void writeElement(String uri, String localName,
String qName, Attributes atts, char content)
throws SAXException {
writeElement(uri, localName, qName, atts, String.valueOf(content).toCharArray());
}
private void writeElement(String uri, String localName,
String qName, Attributes atts, char[] content)
throws SAXException {
ch.startElement(uri, localName, qName, atts);
ch.characters(content, 0, content.length);
ch.endElement(uri, localName, qName);
}
private void setCharacterConverter(boolean convert) {
if (convert) {
try {
charconv = (CharacterConverter)CharacterConverterLoader
.createCharacterConverter("org.marc4j.charconv",
"org.marc4j.util.AnselToUnicode");
} catch (CharacterConverterLoaderException e) {
e.printStackTrace();
}
}
}
}
| Changed character converter handling
| src/org/marc4j/marcxml/MarcXmlFilter.java | Changed character converter handling |
|
Java | lgpl-2.1 | 840249a2a748a58f8299da02494be51ddfaf0bc9 | 0 | mbatchelor/pentaho-platform-plugin-reporting,mbatchelor/pentaho-platform-plugin-reporting,mbatchelor/pentaho-platform-plugin-reporting | /*
* This program is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software
* Foundation.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
* or from the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* Copyright 2008 Pentaho Corporation. All rights reserved.
*/
package org.pentaho.reporting.platform.plugin;
import java.io.FileNotFoundException;
import java.io.InputStream;
import org.pentaho.platform.api.engine.ISolutionFile;
import org.pentaho.platform.api.repository.ISolutionRepository;
import org.pentaho.platform.engine.core.system.PentahoSystem;
import org.pentaho.reporting.libraries.resourceloader.ResourceData;
import org.pentaho.reporting.libraries.resourceloader.ResourceKey;
import org.pentaho.reporting.libraries.resourceloader.ResourceLoadingException;
import org.pentaho.reporting.libraries.resourceloader.ResourceManager;
import org.pentaho.reporting.libraries.resourceloader.loader.AbstractResourceData;
/**
* This class is implemented to support loading solution files from the pentaho repository into JFreeReport
*
* @author Will Gorman/Michael D'Amour
*/
public class RepositoryResourceData extends AbstractResourceData {
public static final String PENTAHO_REPOSITORY_KEY = "pentahoRepositoryKey"; //$NON-NLS-1$
private String filename;
private ResourceKey key;
/**
* constructor which takes a resource key for data loading specifics
*
* @param key
* resource key
*/
public RepositoryResourceData(final ResourceKey key) {
if (key == null) {
throw new NullPointerException();
}
this.key = key;
this.filename = (String) key.getIdentifier();
}
/**
* gets a resource stream from the runtime context.
*
* @param caller
* resource manager
* @return input stream
*/
public InputStream getResourceAsStream(ResourceManager caller) throws ResourceLoadingException {
try {
ISolutionRepository solutionRepository = PentahoSystem.get(ISolutionRepository.class);
String fileIdentifier = key.getIdentifierAsString();
if (fileIdentifier.startsWith("/")) { // Remove the leading slash so this doesn't bum out a file based repository
fileIdentifier = fileIdentifier.substring(1);
}
return solutionRepository.getResourceInputStream(fileIdentifier, false, ISolutionRepository.ACTION_EXECUTE);
} catch (FileNotFoundException e) {
// might be due to access denial
throw new ResourceLoadingException(e.getLocalizedMessage(), e);
}
}
/**
* returns a requested attribute, currently only supporting filename.
*
* @param key
* attribute requested
* @return attribute value
*/
public Object getAttribute(String lookupKey) {
if (lookupKey.equals(ResourceData.FILENAME)) {
return filename;
}
return null;
}
/**
* return the version number
*
* @param caller
* resource manager
*
* @return version
*/
public long getVersion(ResourceManager caller) throws ResourceLoadingException {
ISolutionRepository solutionRepository = PentahoSystem.get(ISolutionRepository.class);
ISolutionFile file = solutionRepository.getSolutionFile(key.getIdentifier().toString(), ISolutionRepository.ACTION_EXECUTE);
// if we got a FileNotFoundException on getResourceInputStream then we will get a null file; avoid NPE
if (file != null) {
return file.getLastModified();
} else {
return -1;
}
}
/**
* get the resource key
*
* @return resource key
*/
public ResourceKey getKey() {
return key;
}
}
| src/org/pentaho/reporting/platform/plugin/RepositoryResourceData.java | /*
* This program is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software
* Foundation.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
* or from the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* Copyright 2008 Pentaho Corporation. All rights reserved.
*/
package org.pentaho.reporting.platform.plugin;
import java.io.FileNotFoundException;
import java.io.InputStream;
import org.pentaho.platform.api.engine.ISolutionFile;
import org.pentaho.platform.api.repository.ISolutionRepository;
import org.pentaho.platform.engine.core.system.PentahoSystem;
import org.pentaho.reporting.libraries.resourceloader.ResourceData;
import org.pentaho.reporting.libraries.resourceloader.ResourceKey;
import org.pentaho.reporting.libraries.resourceloader.ResourceLoadingException;
import org.pentaho.reporting.libraries.resourceloader.ResourceManager;
import org.pentaho.reporting.libraries.resourceloader.loader.AbstractResourceData;
/**
* This class is implemented to support loading solution files from the pentaho repository into JFreeReport
*
* @author Will Gorman/Michael D'Amour
*/
public class RepositoryResourceData extends AbstractResourceData {
public static final String PENTAHO_REPOSITORY_KEY = "pentahoRepositoryKey"; //$NON-NLS-1$
private String filename;
private ResourceKey key;
/**
* constructor which takes a resource key for data loading specifics
*
* @param key
* resource key
*/
public RepositoryResourceData(final ResourceKey key) {
if (key == null) {
throw new NullPointerException();
}
this.key = key;
this.filename = (String) key.getIdentifier();
}
/**
* gets a resource stream from the runtime context.
*
* @param caller
* resource manager
* @return input stream
*/
public InputStream getResourceAsStream(ResourceManager caller) throws ResourceLoadingException {
try {
ISolutionRepository solutionRepository = PentahoSystem.get(ISolutionRepository.class);
return solutionRepository.getResourceInputStream(key.getIdentifier().toString(), false, ISolutionRepository.ACTION_EXECUTE);
} catch (FileNotFoundException e) {
// might be due to access denial
throw new ResourceLoadingException(e.getLocalizedMessage(), e);
}
}
/**
* returns a requested attribute, currently only supporting filename.
*
* @param key
* attribute requested
* @return attribute value
*/
public Object getAttribute(String lookupKey) {
if (lookupKey.equals(ResourceData.FILENAME)) {
return filename;
}
return null;
}
/**
* return the version number
*
* @param caller
* resource manager
*
* @return version
*/
public long getVersion(ResourceManager caller) throws ResourceLoadingException {
ISolutionRepository solutionRepository = PentahoSystem.get(ISolutionRepository.class);
ISolutionFile file = solutionRepository.getSolutionFile(key.getIdentifier().toString(), ISolutionRepository.ACTION_EXECUTE);
// if we got a FileNotFoundException on getResourceInputStream then we will get a null file; avoid NPE
if (file != null) {
return file.getLastModified();
} else {
return -1;
}
}
/**
* get the resource key
*
* @return resource key
*/
public ResourceKey getKey() {
return key;
}
}
| Fixed pathing problem.
| src/org/pentaho/reporting/platform/plugin/RepositoryResourceData.java | Fixed pathing problem. |
|
Java | lgpl-2.1 | 876a5765795c8b76c9d88ab147779acbb61e89bd | 0 | nhochberger/Utilities | package hochberger.utilities.timing;
import org.apache.log4j.Logger;
public class Timing {
private long startNanos;
private long stopNanos;
private boolean running = false;
public Timing() {
super();
}
public void start() {
this.running = true;
this.startNanos = System.nanoTime();
}
public void stop() {
this.stopNanos = System.nanoTime();
this.running = false;
}
public long getNanos() {
if (this.running) {
return this.stopNanos - this.startNanos;
}
return System.nanoTime() - this.startNanos;
}
public void reportOn(final Logger logger) {
logger.info("Time measured: " + String.valueOf(getNanos())
+ " nanoseconds.");
}
}
| src/hochberger/utilities/timing/Timing.java | package hochberger.utilities.timing;
import org.apache.log4j.Logger;
public class Timing {
private long startNanos;
private long stopNanos;
public Timing() {
super();
}
public void start() {
this.startNanos = System.nanoTime();
}
public void stop() {
this.stopNanos = System.nanoTime();
}
public long getNanos() {
return this.stopNanos - this.startNanos;
}
public void reportOn(final Logger logger) {
logger.info("Time measured: " + String.valueOf(getNanos())
+ " nanoseconds.");
}
}
| further work on Timing | src/hochberger/utilities/timing/Timing.java | further work on Timing |
|
Java | lgpl-2.1 | 990da75e12b103915efbd6b3163e9941ea59d1c7 | 0 | MenoData/Time4J | /*
* -----------------------------------------------------------------------
* Copyright © 2013 Meno Hochschild, <http://www.menodata.de/>
* -----------------------------------------------------------------------
* This file (PlainDuration.java) is part of project Time4J.
*
* Time4J is free software: You can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Time4J is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Time4J. If not, see <http://www.gnu.org/licenses/>.
* -----------------------------------------------------------------------
*/
package net.time4j;
import net.time4j.base.MathUtils;
import net.time4j.engine.AbstractDuration;
import net.time4j.engine.ChronoException;
import net.time4j.engine.ChronoOperator;
import net.time4j.engine.ChronoUnit;
import net.time4j.engine.Normalizer;
import net.time4j.engine.TimeAxis;
import net.time4j.engine.TimeMetric;
import net.time4j.engine.TimePoint;
import net.time4j.engine.TimeSpan;
import net.time4j.tz.TZID;
import net.time4j.tz.TransitionStrategy;
import net.time4j.tz.ZonalOffset;
import java.io.Serializable;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* <p>ISO-konforme Zeitspanne zwischen zwei Zeitpunkten. </p>
*
* <p>Instanzen können über folgende Fabrikmethoden erzeugt
* werden: </p>
*
* <ul>
* <li>{@link #of(long, IsoUnit) of(long, U)}</li>
* <li>{@link #ofCalendarUnits(int, int, int)}</li>
* <li>{@link #ofClockUnits(int, int, int)}</li>
* <li>{@link #ofPositive()} (<i>builder</i>-Muster)</li>
* <li>{@link #ofNegative()} (<i>builder</i>-Muster)</li>
* <li>{@link #parse(String)}</li>
* <li>{@link #parseCalendarPeriod(String)}</li>
* <li>{@link #parseClockPeriod(String)}</li>
* </ul>
*
* <p>Alle Instanzen sind <i>immutable</i>, aber geänderte Kopien lassen
* sich über die Methoden {@code plus()}, {@code minus()}, {@code with()},
* {@code union()}, {@code multipliedBy()}, {@code abs()} und {@code negate()}
* erzeugen. Hierbei werden die Zeiteinheiten {@code ClockUnit.MILLIS} und
* {@code ClockUnit.MICROS} intern immer zu Nanosekunden normalisiert. Ansonsten
* muß eine Normalisierung explizit mittels {@code with(Normalizer)}
* angestoßen werden. </p>
*
* <p>Notiz: Die Definition eines optionalen negativen Vorzeichens ist streng
* genommen nicht Bestandteil des ISO-Standards, ist aber Bestandteil der
* XML-Schema-Spezifikation und legt die Lage zweier Zeitpunkte relativ
* zueinander fest. Eine Manipulation des Vorzeichens ist mit der Methode
* {@code negate()} möglich. </p>
*
* <p>Die Zeitarithmetik behandelt die Addition und Subtraktion einer Zeitspanne
* bezogen auf einen Zeitpunkt abhängig vom Vorzeichen der Zeitspanne wie
* im <a href="engine/AbstractDuration.html#algorithm">Standardalgorithmus</a>
* von Time4J beschrieben. </p>
*
* @param <U> generic type of time units
* @author Meno Hochschild
* @concurrency <immutable>
*/
public final class PlainDuration<U extends IsoUnit>
extends AbstractDuration<U>
implements Serializable {
//~ Statische Felder/Initialisierungen --------------------------------
private static final char ISO_DECIMAL_SEPARATOR = (
Boolean.getBoolean("net.time4j.format.iso.decimal.dot")
? '.'
: ',' // Empfehlung des ISO-Standards
);
private static final long MRD = 1000000000L;
private static final long MIO = 1000000L;
private static final Comparator<ChronoUnit> UNIT_COMPARATOR =
new Comparator<ChronoUnit>() {
@Override
public int compare(
ChronoUnit o1,
ChronoUnit o2
) {
return PlainDuration.compare(o1, o2);
}
};
private static final
Comparator<Item<? extends ChronoUnit>> ITEM_COMPARATOR =
new Comparator<Item<? extends ChronoUnit>>() {
@Override
public int compare(
Item<? extends ChronoUnit> o1,
Item<? extends ChronoUnit> o2
) {
return PlainDuration.compare(o1.getUnit(), o2.getUnit());
}
};
/**
* <p>Normalisiert die Zeitspannenelemente einer Zeitspanne auf der Basis
* {@code 1 Jahr = 12 Monate} und {@code 1 Tag = 24 Stunden} und
* {@code 1 Stunde = 60 Minuten} und {@code 1 Minute = 60 Sekunden},
* jedoch ohne die Tage zu Monaten zu konvertieren. </p>
*
* <p>VORSICHT: Zeitzonenbedingte Veränderungen der Tageslänge
* oder Schaltsekunden werden hier ignoriert. Deshalb sollte diese
* Normalisierung möglichst nur auf ISO-Zeitstempel ohne Zeitzonen-
* oder UTC-Unterstützung angewandt werden. Nur Zeiteinheiten der
* Enums {@link CalendarUnit} und {@link ClockUnit} können normalisiert
* werden. </p>
*
* <p>Wochen werden genau dann zu Tagen konvertiert, wenn sie nicht das
* einzige datumsbezogene Zeitspannenelement darstellen. </p>
*
* @see PlainTimestamp
*/
public static Normalizer<IsoUnit> STD_PERIOD = new TimestampNormalizer();
/**
* <p>Normalisiert die Datumselemente einer Zeitspanne auf der Basis
* {@code 1 Jahr = 12 Monate}, jedoch ohne die Tage zu Monaten zu
* konvertieren. </p>
*
* <p>Wochen werden genau dann zu Tagen konvertiert, wenn sie nicht das
* einzige datumsbezogene Zeitspannenelement darstellen. Nur Zeiteinheiten
* des Enums {@link CalendarUnit} werden normalisiert. </p>
*
* @see PlainDate
*/
public static Normalizer<CalendarUnit> STD_CALENDAR_PERIOD =
new DateNormalizer();
/**
* <p>Normalisiert die Uhrzeitelemente einer Zeitspanne auf der Basis
* {@code 1 Tag = 24 Stunden} und {@code 1 Stunde = 60 Minuten} und
* {@code 1 Minute = 60 Sekunden}. </p>
*
* <p>VORSICHT: Zeitzonenbedingte Veränderungen der Tageslänge
* oder UTC-Schaltsekunden werden hier ignoriert. Deshalb sollte diese
* Normalisierung nicht auf Zeitzonen- oder UTC-sensible Zeitpunkttypen
* angewandt werden. Nur Zeiteinheiten des Enums {@link ClockUnit}
* werden normalisiert. </p>
*
* @see PlainTime
*/
public static Normalizer<ClockUnit> STD_CLOCK_PERIOD = new TimeNormalizer();
private static final PlainDuration<IsoUnit> ZERO =
new PlainDuration<IsoUnit>(false);
private static final long serialVersionUID = -6321211763598951499L;
//~ Instanzvariablen --------------------------------------------------
/**
* @serial list of amounts and units
*/
private final List<Item<U>> items;
/**
* @serial marks a negative time span
*/
private final boolean negative;
/**
* @serial marks a calendrical only time span
*/
private final boolean calendrical;
//~ Konstruktoren -----------------------------------------------------
// Standard-Konstruktor
private PlainDuration(
List<Item<U>> items,
boolean negative,
boolean calendrical
) {
super();
boolean empty = items.isEmpty();
if (empty) {
this.items = Collections.emptyList();
} else {
Collections.sort(items, ITEM_COMPARATOR);
this.items = Collections.unmodifiableList(items);
}
this.negative = (empty ? false : negative);
this.calendrical = calendrical;
}
// Kopiekonstruktor (siehe negate())
private PlainDuration(
PlainDuration<U> duration,
boolean inverse
) {
super();
this.items = duration.items;
this.negative = (inverse ? !duration.negative : duration.negative);
this.calendrical = duration.calendrical;
}
// leere Zeitspanne
private PlainDuration(boolean calendrical) {
super();
this.items = Collections.emptyList();
this.negative = false;
this.calendrical = calendrical;
}
//~ Methoden ----------------------------------------------------------
/**
* <p>Erzeugt eine neue Zeitspanne, die auf nur einer Zeiteinheit
* beruht. </p>
*
* <p>Ist der angegebene Betrag negativ, so wird auch die Zeitspanne
* negativ sein. Ist er {@code 0}, wird eine leere Zeitspanne
* generiert. </p>
*
* @param <U> generic unit type
* @param amount amount as count of units
* @param unit single time unit
* @return new duration
*/
public static <U extends IsoUnit> PlainDuration<U> of(
long amount,
U unit
) {
if (amount == 0) {
return new PlainDuration<U>(unit.isCalendrical());
}
List<Item<U>> items = new ArrayList<Item<U>>(1);
items.add(
new Item<U>(
((amount < 0) ? MathUtils.safeNegate(amount) : amount),
unit)
);
return new PlainDuration<U>(items, (amount < 0), unit.isCalendrical());
}
/**
* <p>Konstruiert über den Umweg des <i>builder</i>-Entwurfsmusters
* eine neue ISO-konforme positive Zeitspanne für kombinierte Datums- und
* Uhrzeiteinheiten. </p>
*
* @return help object for building a positive {@code PlainDuration}
*/
public static Builder ofPositive() {
return new Builder(false);
}
/**
* <p>Konstruiert über den Umweg des <i>builder</i>-Entwurfsmusters
* eine neue ISO-konforme negative Zeitspanne für kombinierte Datums- und
* Uhrzeiteinheiten. </p>
*
* @return help object for building a negative {@code PlainDuration}
*/
public static Builder ofNegative() {
return new Builder(true);
}
/**
* <p>Erzeugt eine positive Zeitspanne in Jahren, Monaten und Tagen. </p>
*
* <p>Alle Argumente dürfen nicht negativ sein. Ist ein Argument
* gleich {@code 0}, wird es ignoriert. Wird eine negative Zeitspanne
* gewünscht, kann auf dem Ergebnis einfach {@code negate()}
* aufgerufen werden. </p>
*
* @param years amount in years
* @param months amount in months
* @param days amount in days
* @return new duration
* @throws IllegalArgumentException if any argument is negative
* @see #negate()
*/
public static PlainDuration<CalendarUnit> ofCalendarUnits(
int years,
int months,
int days
) {
return PlainDuration.ofCalendarUnits(years, months, days, false);
}
/**
* <p>Erzeugt eine positive Zeitspanne in Stunden, Minuten und
* Sekunden. </p>
*
* <p>Alle Argumente dürfen nicht negativ sein. Ist ein Argument
* gleich {@code 0}, wird es ignoriert. Wird eine negative Zeitspanne
* gewünscht, kann auf dem Ergebnis einfach {@code negate()}
* aufgerufen werden. </p>
*
* @param hours amount in hours
* @param minutes amount in minutes
* @param seconds amount in seconds
* @return new duration
* @throws IllegalArgumentException if any argument is negative
* @see #negate()
*/
public static PlainDuration<ClockUnit> ofClockUnits(
int hours,
int minutes,
int seconds
) {
return PlainDuration.ofClockUnits(hours, minutes, seconds, 0, false);
}
/**
* <p>Konstruiert eine Metrik für beliebige Standard-Zeiteinheiten
* in normalisierter Form. </p>
*
* <p><strong>WICHTIG:</strong> Fehlt die der Präzision der zu
* vergleichenden Zeitpunkte entsprechende kleinste Zeiteinheit, wird
* im allgemeinen ein Subtraktionsrest übrigbleiben. Das Ergebnis
* der Metrikberechnung wird dann nicht den vollständigen zeitlichen
* Abstand zwischen den Zeitpunkten ausdrücken. Für die
* Vollständigkeit der Berechnung ist bei Datumsangaben mindestens
* die explizite Angabe der Tageseinheit notwendig. </p>
*
* @param <U> generic unit type
* @param units time units to be used in calculation
* @return immutable metric for calculating a duration in given units
* @throws IllegalArgumentException if any time unit is missing or
* if there are unit duplicates
*/
public static <U extends IsoUnit>
TimeMetric<U, PlainDuration<U>> in(U... units) {
if (units.length == 0) {
throw new IllegalArgumentException("Missing units.");
}
for (int i = 0; i < units.length - 1; i++) {
for (int j = i + 1; j < units.length; j++) {
if (units[i].equals(units[j])) {
throw new IllegalArgumentException(
"Duplicate unit: " + units[i]);
}
}
}
Arrays.sort(units, UNIT_COMPARATOR);
return new Metric<U>((units.length > 1), Arrays.asList(units));
}
/**
* <p>Konstruiert eine Metrik in Jahren, Monaten und Tagen. </p>
*
* <p>Am Ende wird die Darstellung automatisch normalisiert, also kleine
* Zeiteinheiten so weit wie möglich in große Einheiten
* umgerechnet. </p>
*
* @return immutable metric for calculating a duration in years,
* months and days
* @see #in(IsoUnit[]) in(U[])
* @see CalendarUnit#YEARS
* @see CalendarUnit#MONTHS
* @see CalendarUnit#DAYS
*/
public static
TimeMetric<CalendarUnit, PlainDuration<CalendarUnit>> inYearsMonthsDays() {
return PlainDuration.in(
CalendarUnit.YEARS,
CalendarUnit.MONTHS,
CalendarUnit.DAYS
);
}
/**
* <p>Konstruiert eine Metrik in Stunden, Minuten, Sekunden und Nanos. </p>
*
* <p>Am Ende wird die Darstellung automatisch normalisiert, also kleine
* Zeiteinheiten so weit wie möglich in große Einheiten
* umgerechnet. </p>
*
* @return immutable metric for calculating a duration in clock units
* @see #in(IsoUnit[]) in(U[])
* @see ClockUnit#HOURS
* @see ClockUnit#MINUTES
* @see ClockUnit#SECONDS
* @see ClockUnit#NANOS
*/
public static
TimeMetric<ClockUnit, PlainDuration<ClockUnit>> inClockUnits() {
return PlainDuration.in(
ClockUnit.HOURS,
ClockUnit.MINUTES,
ClockUnit.SECONDS,
ClockUnit.NANOS
);
}
@Override
public List<Item<U>> getTotalLength() {
return this.items;
}
@Override
public boolean isNegative() {
return this.negative;
}
/**
* <p>Ist die angegebene Zeiteinheit in dieser Zeitspanne enthalten? </p>
*
* <p>Eine Zeiteinheit ist auch dann enthalten, wenn sie als
* Sekundenbruchteil (Ziffer in Symboldarstellung) erst konvertiert
* werden muß. </p>
*
* @param unit time unit to be checked (optional)
* @return {@code true} if this duration contains given unit
* else {@code false}
* @see #getPartialAmount(ChronoUnit) getPartialAmount(U)
*/
@Override
public boolean contains(ChronoUnit unit) {
if (unit instanceof IsoUnit) {
IsoUnit isoUnit = (IsoUnit) unit;
boolean fractional = isFractionUnit(isoUnit);
for (int i = 0, n = this.items.size(); i < n; i++) {
Item<U> item = this.items.get(i);
U u = item.getUnit();
if (
u.equals(unit)
|| (fractional && isFractionUnit(u))
) {
return (item.getAmount() > 0);
}
}
}
return false;
}
/**
* <p>Liefert den Betrag zu einer Zeiteinheit. </p>
*
* <p>Wenn die angegebene Zeiteinheit nicht in der Zeitspanne enthalten ist,
* liefert die Methode den Wert {@code 0}. Sekundenbruchteile, die an der
* Symboldarstellung ihrer Einheiten erkennbar sind, werden automatisch
* konvertiert. Konkret: Wenn eine Zeitspanne z.B. Nanosekunden speichert,
* aber nach Mikrosekunden gefragt wird, dann wird der in der Zeitspanne
* enthaltene Nanosekundenwert mit dem Faktor {@code 1000} multipliziert
* und zurückgegeben. </p>
*
* @param unit time unit the amount is queried for (optional)
* @return non-negative amount associated with given unit ({@code >= 0})
*/
@Override
public long getPartialAmount(ChronoUnit unit) {
if (unit instanceof IsoUnit) {
IsoUnit isoUnit = (IsoUnit) unit;
boolean fractional = isFractionUnit(isoUnit);
for (int i = 0, n = this.items.size(); i < n; i++) {
Item<U> item = this.items.get(i);
U u = item.getUnit();
if (u.equals(unit)) {
return item.getAmount();
} else if (
fractional
&& isFractionUnit(u)
) {
int d1 = u.getSymbol() - '0';
int d2 = isoUnit.getSymbol() - '0';
int factor = 1;
for (int j = 0, m = Math.abs(d1 - d2); j < m; j++) {
factor *= 10;
}
if (d1 >= d2) {
return item.getAmount() / factor;
} else {
return item.getAmount() * factor;
}
}
}
}
return 0;
}
/**
* <p>Liefert ein Hilfsobjekt zum Vergleichen von Zeitspannenobjekten
* auf Basis ihrer Länge. </p>
*
* <p>Erzeugt einen {@code Comparator}, der letztlich auf dem Ausdruck
* {@code base.plus(duration1).compareTo(base.plus(duration2))} beruht.
* Der Basiszeitpunkt ist notwendig, weil sonst Zeitspannenobjekte dieser
* Klasse nicht notwendig eine physikalisch feste Länge haben.
* Zum Beispiel sind Monate variable Zeiteinheiten mit unterschiedlich
* vielen Tagen. </p>
*
* @param <U> generic unit type
* @param <T> generic type of time point
* @param base base time point which durations will use for comparison
* @return {@code Comparator} for plain durations
* @see TimePoint#compareTo(TimePoint) TimePoint.compareTo(T)
*/
public static <U extends IsoUnit, T extends TimePoint<? super U, T>>
Comparator<PlainDuration<U>> comparator(T base) {
return new LengthComparator<U, T>(base);
}
/**
* <p>Liefert eine Kopie dieser Instanz, in der der angegebene Betrag zum
* mit der angegebenen Zeiteinheit assoziierten Feldwert addiert wird. </p>
*
* <p>Die Methode berücksichtigt auch das Vorzeichen der Zeitspanne.
* Beispiel in Pseudo-Code: {@code [P5M].plus(-6, CalendarUnit.MONTHS)} wird
* zu {@code [-P1M]}. Ist der zu addierende Betrag {@code 0}, liefert die
* Methode einfach diese Instanz selbst. Um eine gemischte Zeitspanne mit
* Wochen und anderen Datumselementen zu verhindern, werden Wochen bei
* Bedarf automatisch zu Tagen normalisiert. </p>
*
* <p>Notiz: Gemischte Vorzeichen im Ergebnis sind nicht zulässig und
* werden mit einem Abbruch quittiert. Zum Beispiel ist folgender Ausdruck
* nicht erlaubt: {@code [-P1M].plus(30, CalendarUnit.DAYS)}</p>
*
* @param amount temporal amount to be added (maybe negative)
* @param unit associated time unit
* @return new changed duration while this duration remains unaffected
* @throws IllegalStateException if the result gets mixed signs by
* adding the partial amounts
* @throws ArithmeticException in case of long overflow
* @see #with(long, IsoUnit) with(long, U)
*/
public PlainDuration<U> plus(
long amount,
U unit
) {
checkUnit(unit);
long originalAmount = amount;
U originalUnit = unit;
boolean negatedValue = false;
if (amount == 0) {
return this;
} else if (amount < 0) {
amount = MathUtils.safeNegate(amount);
negatedValue = true;
}
// Millis, Micros und Weeks ersetzen
List<Item<U>> temp = new ArrayList<Item<U>>(this.getTotalLength());
Item<U> item = replaceItem(this.getTotalLength(), amount, unit);
if (item != null) {
amount = item.getAmount();
unit = item.getUnit();
}
if (this.isEmpty()) {
temp.add((item == null) ? new Item<U>(amount, unit) : item);
return new PlainDuration<U>(
temp,
negatedValue,
this.calendrical && unit.isCalendrical());
}
int index = -1;
if (unit.isCalendrical() && (unit != CalendarUnit.WEEKS)) {
index = replaceWeeksForDays(temp, unit);
}
if (index == -1) {
index = this.getIndex(unit);
}
// Items aktualisieren
boolean resultNegative = this.isNegative();
if (index < 0) {
if (this.isNegative() == negatedValue) {
temp.add(new Item<U>(amount, unit));
} else {
this.throwMixedSignsException(originalAmount, originalUnit);
}
} else {
long sum =
MathUtils.safeAdd(
MathUtils.safeMultiply(
temp.get(index).getAmount(),
(this.isNegative() ? -1 : 1)
),
MathUtils.safeMultiply(
amount,
(negatedValue ? -1 : 1)
)
);
if (sum == 0) {
temp.remove(index);
} else if (
(this.count() == 1)
|| (this.isNegative() == (sum < 0))
) {
long absSum = ((sum < 0) ? MathUtils.safeNegate(sum) : sum);
temp.set(index, new Item<U>(absSum, unit));
resultNegative = (sum < 0);
} else {
this.throwMixedSignsException(originalAmount, originalUnit);
}
}
return new PlainDuration<U>(
temp,
resultNegative,
this.calendrical && unit.isCalendrical());
}
/**
* <p>Liefert eine Kopie dieser Instanz, in der der angegebene Betrag
* vom mit der angegebenen Zeiteinheit assoziierten Feldwert subtrahiert
* wird. </p>
*
* <p>Entspricht {@code plus(-amount, unit)}. </p>
*
* @param amount temporal amount to be subtracted (maybe negative)
* @param unit associated time unit
* @return new changed duration while this duration remains unaffected
* @throws IllegalStateException if the result gets mixed signs by
* subtracting the partial amounts
* @throws ArithmeticException in case of long overflow
* @see #plus(long, IsoUnit) plus(long, U)
*/
public PlainDuration<U> minus(
long amount,
U unit
) {
return this.plus(MathUtils.safeNegate(amount), unit);
}
/**
* <p>Erzeugt eine neue Zeitspanne als Vereinigung dieser und der
* angegebenen Zeitspanne, wobei Beträge zu gleichen Zeiteinheiten
* addiert werden. </p>
*
* <p>Diese Methode vereinigt anders als {@code union()} nur
* Zeitspannen mit dem gleichen Einheitstyp. Weitere Details sind
* gleich und der Beschreibung von {@link #union(TimeSpan)} zu
* entnehmen. </p>
*
* @param timespan other time span this duration will be merged
* with by adding the partial amounts
* @return new merged duration
* @throws IllegalStateException if the result gets mixed signs by
* adding the partial amounts
* @throws ArithmeticException in case of long overflow
*/
public PlainDuration<U> plus(TimeSpan<? extends U> timespan) {
return add(this, timespan, false);
}
/**
* <p>Erzeugt eine neue Zeitspanne als Vereinigung dieser und der
* angegebenen Zeitspanne, wobei die Beträge ds Arguments zu
* gleichen Zeiteinheiten subtrahiert werden. </p>
*
* <p>Weitere Details siehe {@link #plus(TimeSpan)}. </p>
*
* @param timespan other time span this duration will be merged
* with by subtracting the partial amounts
* @return new merged duration
* @throws IllegalStateException if the result gets mixed signs by
* subtracting the partial amounts
* @throws ArithmeticException in case of long overflow
*/
public PlainDuration<U> minus(TimeSpan<? extends U> timespan) {
return add(this, timespan, true);
}
/**
* <p>Liefert eine Kopie dieser Instanz mit dem angegebenen geänderten
* Wert. </p>
*
* <p>Entspricht {@code plus(amount - getAmount(unit), unit)}. </p>
*
* @param amount temporal amount to be set (maybe negative)
* @param unit associated time unit
* @return new changed duration while this duration remains unaffected
* @throws IllegalStateException if the result gets mixed signs by
* setting the partial amounts
* @throws ArithmeticException in case of long overflow
* @see #plus(long, IsoUnit) plus(long, U)
*/
public PlainDuration<U> with(
long amount,
U unit
) {
long absAmount =
((amount < 0) ? MathUtils.safeNegate(amount) : amount);
Item<U> item = replaceItem(this.getTotalLength(), absAmount, unit);
if (item != null) {
absAmount = item.getAmount();
unit = item.getUnit();
}
long oldAmount;
if (
unit.equals(CalendarUnit.DAYS)
&& this.contains(CalendarUnit.WEEKS)
) {
oldAmount =
MathUtils.safeMultiply(
this.getPartialAmount(CalendarUnit.WEEKS),
7L
);
} else {
oldAmount = this.getPartialAmount(unit);
}
return this.plus(
MathUtils.safeSubtract(
MathUtils.safeMultiply(
absAmount,
(amount < 0) ? - 1 : 1
),
MathUtils.safeMultiply(
oldAmount,
this.isNegative() ? -1 : 1
)
),
unit
);
}
/**
* <p>Liefert die absolute immer positive Variante dieser Zeitspanne. </p>
*
* <p>Beispiel: {@code [-P5M].abs()} wird zu {@code [P5M]}. </p>
*
* @return new positive duration if this duration is negative else this
* duration unchanged
* @see #isNegative()
* @see #negate()
*/
public PlainDuration<U> abs() {
if (this.isNegative()) {
return this.negate();
} else {
return this;
}
}
/**
* <p>Liefert eine Kopie dieser Instanz, die das negative Äquivalent
* darstellt. </p>
*
* <p>Ein zweifacher Aufruf dieser Methode liefert wieder eine
* inhaltlich gleiche Instanz. Also gilt immer folgende Beziehung:
* {@code this.negate().negate().equals(this) == true}. Liegt der
* Sonderfall einer leeren Zeitspanne vor, dann ist diese Methode ohne
* Wirkung und liefert nur die gleiche Instanz zurück. Entspricht
* dem Ausdruck {@code multipliedBy(-1)}. </p>
*
* <p>Beispiel: {@code [-P5M].negate()} wird zu {@code [P5M]}. </p>
*
* @return new negative duration if this duration is positive else a new
* positive duration with the same partial amounts and units
* @see #isNegative()
* @see #multipliedBy(int)
*/
@Override
public PlainDuration<U> negate() {
return this.multipliedBy(-1);
}
/**
* <p>Multipliziert alle enthaltenen Beträge mit dem angegebenen
* Faktor. </p>
*
* <p>Ist der Faktor {@code 0}, ist die neue Zeitspanne leer. Mit dem
* Faktor {@code 1} wird diese Instanz selbst unverändert
* zurückgegeben. Bei einem negativen Faktor wird zusätzlich
* das Vorzeichen geändert. </p>
*
* @param factor multiplication factor
* @return new duration with all amounts multiplied while this duration
* remains unaffected
* @throws ArithmeticException in case of long overflow
*/
public PlainDuration<U> multipliedBy(int factor) {
if (
this.isEmpty()
|| (factor == 1)
) {
return this;
} else if (factor == 0) {
return new PlainDuration<U>(this.calendrical);
} else if (factor == -1) {
return new PlainDuration<U>(this, true);
}
List<Item<U>> newItems = new ArrayList<Item<U>>(this.count());
int scalar = Math.abs(factor);
for (int i = 0, n = this.count(); i < n; i++) {
Item<U> item = this.getTotalLength().get(i);
newItems.add(
new Item<U>(
MathUtils.safeMultiply(item.getAmount(), scalar),
item.getUnit()
)
);
}
return new PlainDuration<U>(
newItems,
((factor < 0) ? !this.isNegative() : this.isNegative()),
this.calendrical
);
}
/**
* <p>Erzeugt eine neue Zeitspanne als Vereinigung dieser und der
* angegebenen Zeitspanne, wobei Beträge zu gleichen Zeiteinheiten
* addiert werden. </p>
*
* <p><i>Vereinigung von Zeitspannen in Datum und Uhrzeit</i></p>
* <pre>
* PlainDuration<CalendarUnit> dateDuration =
* PlainDuration.ofCalendarUnits(2, 7, 10);
* PlainDuration<ClockUnit> timeDuration =
* PlainDuration.ofClockUnits(0, 30, 0);
* System.out.println(dateDuration.union(timeDuration)); // P2Y7M10DT30M
* </pre>
*
* <p><i>Vereinigung als Addition von Zeitspannen</i></p>
* <pre>
* PlainDuration<CalendarUnit> p1 =
* PlainDuration.ofCalendarUnits(0, 0, 10);
* PlainDuration<CalendarUnit> p2 =
* PlainDuration.of(3, CalendarUnit.WEEKS);
* System.out.println(p1.union(p2)); // P31D
* </pre>
*
* <p>Um eine gemischte Zeitspanne mit Wochen und anderen Datumselementen
* zu verhindern, werden Wochen bei Bedarf automatisch zu Tagen
* normalisiert (siehe auch letztes Beispiel). </p>
*
* <p>Falls die Vorzeichen beider Zeitspannen verschieden sind, müssen
* im Ergebnis trotzdem die Vorzeichen aller Beträge gleich sein, damit
* eindeutig das Vorzeichen der Ergebnis-Zeitspanne feststeht. Beispiel in
* Pseudo-Code: [P4D] union [-P1M34D] = [-P1M30D]. Hingegen führt die
* Vereinigung [P5M4D] union [-P4M34D] zum Abbruch, weil [P+1M-30D] keine
* sinnvolle Vorzeichenregelung erlaubt. </p>
*
* <p>Notiz: Anders als in {@code javax.xml.datatype.Duration} ist die
* Anforderung an gleiche Vorzeichen hier härter, weil diese Klasse
* auch zur Verwendung in Zeitzonenkontexten vorgesehen ist, wo kein
* Verlaß auf feste Umrechnungen à la 1 Tag = 24 Stunden besteht.
* Allerdings besteht die Möglichkeit, Zeitspannen vor der Vereinigung
* geeignet zu normalisieren. </p>
*
* @param timespan other time span this duration is to be merged with
* @return new merged duration with {@code IsoUnit} as unit type
* @throws IllegalStateException if the result gets mixed signs by
* adding the partial amounts
*/
public PlainDuration<IsoUnit> union(TimeSpan<? extends IsoUnit> timespan) {
return ZERO.plus(this).plus(timespan);
}
/**
* <p>Normalisiert diese Zeitspanne über den angegebenen
* Mechanismus. </p>
*
* @param normalizer help object for normalizing this duration
* @return new normalized duration while this duration remains unaffected
* @see #STD_PERIOD
* @see #STD_CALENDAR_PERIOD
* @see #STD_CLOCK_PERIOD
*/
public PlainDuration<U> with(Normalizer<U> normalizer) {
return convert(normalizer.normalize(this));
}
/**
* <p>Wendet diese Dauer so auf einen {@code Moment} an, daß in
* der UTC-Zeitzone der frühere lokale Zeitstempel berechnet
* wird. </p>
*
* @return operator applicable on {@code Moment}-objects
* @see #later()
*/
public ChronoOperator<Moment> earlier() {
return this.earlier(ZonalOffset.UTC, TransitionStrategy.PUSH_FORWARD);
}
/**
* <p>Wendet diese Dauer so auf einen {@code Moment} an, daß in
* der angegebenen Zeitzone der frühere lokale Zeitstempel berechnet
* wird. </p>
*
* @param timezone time zone id
* @param strategy conflict resolving strategy
* @return operator applicable on {@code Moment}-objects
* @see #later(TZID,TransitionStrategy)
*/
public ChronoOperator<Moment> earlier(
final TZID timezone,
final TransitionStrategy strategy
) {
return new ChronoOperator<Moment>() {
@Override
public Moment apply(Moment entity) {
PlainTimestamp ts =
entity.inTimezone(timezone).minus(PlainDuration.this);
return ts.inTimezone(timezone, strategy);
}
};
}
/**
* <p>Wendet diese Dauer so auf einen {@code Moment} an, daß in
* der UTC-Zeitzone der spätere lokale Zeitstempel berechnet
* wird. </p>
*
* @return operator applicable on {@code Moment}-objects
* @see #earlier()
*/
public ChronoOperator<Moment> later() {
return this.later(ZonalOffset.UTC, TransitionStrategy.PUSH_FORWARD);
}
/**
* <p>Wendet diese Dauer so auf einen {@code Moment} an, daß in
* der angegebenen Zeitzone der spätere lokale Zeitstempel berechnet
* wird. </p>
*
* @param timezone time zone id
* @param strategy conflict resolving strategy
* @return operator applicable on {@code Moment}-objects
* @see #earlier(TZID,TransitionStrategy)
*/
public ChronoOperator<Moment> later(
final TZID timezone,
final TransitionStrategy strategy
) {
return new ChronoOperator<Moment>() {
@Override
public Moment apply(Moment entity) {
PlainTimestamp ts =
entity.inTimezone(timezone).plus(PlainDuration.this);
return ts.inTimezone(timezone, strategy);
}
};
}
/**
* <p>Basiert auf allen gespeicherten Zeitspannenelementen und dem
* Vorzeichen. </p>
*
* @return {@code true} if {@code obj} is also a {@code PlainDuration},
* has the same units and amounts, the same sign and the same
* calendrical status else {@code false}
* @see #getTotalLength()
* @see #isNegative()
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof PlainDuration) {
PlainDuration<?> that = PlainDuration.class.cast(obj);
return (
(this.negative == that.negative)
&& (this.calendrical == that.calendrical)
&& this.getTotalLength().equals(that.getTotalLength())
);
} else {
return false;
}
}
/**
* <p>Basiert auf allen gespeicherten Zeitspannenelementen und dem
* Vorzeichen passend zur Definition von {@code equals()}. </p>
*/
@Override
public int hashCode() {
int hash = this.getTotalLength().hashCode();
if (this.negative) {
hash ^= hash;
}
return hash;
}
/**
* <p>Liefert eine kanonische Darstellung analog zur
* ISO-8601-Definition. </p>
*
* <p>Entspricht {@code toString(false)}. </p>
*
* @see #toString(boolean)
* @see #parse(String)
*/
@Override
public String toString() {
return this.toString(false);
}
/**
* <p>Liefert eine kanonische Darstellung, die optional mit einem negativen
* Vorzeichen beginnt, dann mit dem Buchstaben "P" fortsetzt,
* gefolgt von einer Reihe von alphanumerischen Zeichen analog zur
* ISO8601-Definition. </p>
*
* <p>Beispiel: Im ISO8601-Format ist eine Zeitspanne von 1 Monat, 3 Tagen
* und 4 Stunden als "P1M3DT4H" beschrieben, wobei der Buchstabe
* "T" Datums- und Uhrzeitteil trennt. </p>
*
* <p>Ist die Zeitspanne negativ, so wird in Übereinstimmung mit der
* XML-Schema-Norm ein Minuszeichen vorangestellt (z.B. "-P2D"),
* während eine leere Zeitspanne das Format "PT0S" hat
* (Sekunde als universelles Zeitmaß). Hat der Sekundenteil einen
* Bruchteil, wird als Dezimaltrennzeichen das Komma entsprechend der
* Empfehlung des ISO-Standards gewählt, es sei denn, über den
* xml-Parameter wurde die Verwendung für XML geregelt (dort ist nur
* ein Punkt zulässig). Speziell für XML gilt auch, daß
* ein vorhandenes Wochenfeld zu Tagen auf der Basis (1 Woche = 7 Tage)
* normalisiert wird. </p>
*
* <p>Hinweis: Die ISO-Empfehlung, ein Komma als Dezimaltrennzeichen zu
* verwenden, kann mit Hilfe der bool'schen System-Property
* "net.time4j.format.iso.decimal.dot" so geändert
* werden, daß die angelsächsiche Variante mit Punkt statt
* Komma verwendet wird. </p>
*
* @param xml Is a XML-Schema-compatible output required?
* @return String
* @throws ChronoException if in xml-mode any special units shall be
* output, but units of type {@code CalendarUnit} will be
* translated to xml-compatible units if necessary
* @see #parse(String)
* @see IsoUnit#getSymbol()
*/
public String toString(boolean xml) {
if (this.isEmpty()) {
return (this.calendrical ? "P0D" : "PT0S");
}
StringBuilder sb = new StringBuilder();
if (this.isNegative()) {
sb.append('-');
}
sb.append('P');
boolean timeAppended = false;
long nanos = 0;
long seconds = 0;
for (
int index = 0, limit = this.getTotalLength().size();
index < limit;
index++
) {
Item<U> item = this.getTotalLength().get(index);
U unit = item.getUnit();
if (!timeAppended && !unit.isCalendrical()) {
sb.append('T');
timeAppended = true;
}
long amount = item.getAmount();
char symbol = unit.getSymbol();
if ((symbol > '0') && (symbol <= '9')) {
assert (symbol == '9');
nanos = amount;
} else if (symbol == 'S') {
seconds = amount;
} else {
if (xml) {
switch (symbol) {
case 'D':
case 'M':
case 'Y':
case 'H':
sb.append(amount);
break;
case 'W':
sb.append(MathUtils.safeMultiply(amount, 7));
symbol = 'D';
break;
case 'Q':
sb.append(MathUtils.safeMultiply(amount, 3));
symbol = 'M';
break;
case 'E':
sb.append(MathUtils.safeMultiply(amount, 10));
symbol = 'Y';
break;
case 'C':
sb.append(MathUtils.safeMultiply(amount, 100));
symbol = 'Y';
break;
case 'I':
sb.append(MathUtils.safeMultiply(amount, 1000));
symbol = 'Y';
break;
default:
throw new ChronoException(
"Special units cannot be output in xml-mode: "
+ this.toString(false));
}
} else {
sb.append(amount);
}
if (symbol == '\u0000') {
sb.append('{');
sb.append(unit);
sb.append('}');
} else {
sb.append(symbol);
}
}
}
if (nanos != 0) {
seconds = MathUtils.safeAdd(seconds, nanos / MRD);
sb.append(seconds);
sb.append(xml ? '.' : ISO_DECIMAL_SEPARATOR);
String f = String.valueOf(nanos % MRD);
for (int i = 0, len = 9 - f.length(); i < len; i++) {
sb.append('0');
}
sb.append(f);
sb.append('S');
} else if (seconds != 0) {
sb.append(seconds);
sb.append('S');
}
return sb.toString();
}
/**
* <p>Parst eine kanonische ISO-konforme Darstellung zu einer
* Zeitspanne. </p>
*
* <p>Syntax in RegExp-ähnlicher Notation: </p>
*
* <pre>
* amount := [0-9]+
* fraction := [,\.]{amount}
* years-months-days := ({amount}Y)?({amount}M)?({amount}D)?
* weeks := ({amount}W)?
* date := {years-months-days} | {weeks}
* time := ({amount}H)?({amount}M)?({amount}{fraction}?S)?
* duration := P{date}(T{time})? | PT{time}
* </pre>
*
* <p>Die in {@link CalendarUnit} definierten Zeiteinheiten MILLENNIA,
* CENTURIES, DECADES und QUARTERS werden mitsamt ihren Symbolen ebenfalls
* unterstützt. </p>
*
* <p>Weiterhin gilt die Einschränkung, daß die Symbole P und T
* mindestens ein Zeitfeld nach sich ziehen müssen. Alle Felder mit
* {@code 0}-Beträgen werden beim Parsen ignoriert. Das einzig erlaubte
* Dezimalfeld der Sekunden kann sowohl einen Punkt wie auch ein Komma
* als Dezimaltrennzeichen haben. Im ISO-Standard ist das Komma das
* bevorzugte Zeichen, in XML-Schema nur der Punkt zulässig. Speziell
* für die Verwendung in XML-Schema (Typ xs:duration) ist zu beachten,
* daß Wochenfelder anders als im ISO-Standard nicht vorkommen. Die
* Methode {@code toString(true)} berücksichtigt diese Besonderheiten
* von XML-Schema (abgesehen davon, daß XML-Schema potentiell
* unbegrenzt große Zahlen zuläßt, aber Time4J eine
* Zeitspanne nur im long-Bereich mit maximal Nanosekunden-Genauigkeit
* definiert). </p>
*
* <p>Beispiele für unterstützte Formate: </p>
*
* <pre>
* date := -P7Y4M3D (negativ: 7 Jahre, 4 Monate, 3 Tage)
* time := PT3H2M1,4S (positiv: 3 Stunden, 2 Minuten, 1400 Millisekunden)
* date-time := P1Y1M5DT15H59M10.400S (Punkt als Dezimaltrennzeichen)
* </pre>
*
* @param duration duration in ISO-8601-format
* @return parsed duration in all possible units of date and time
* @throws ParseException if parsing fails
* @see #parseCalendarPeriod(String)
* @see #parseClockPeriod(String)
* @see #toString()
* @see #toString(boolean)
*/
public static PlainDuration<IsoUnit> parse(String duration)
throws ParseException {
return parse(duration, IsoUnit.class);
}
/**
* <p>Parst eine kanonische ISO-konforme Darstellung nur mit
* Datumskomponenten zu einer Zeitspanne. </p>
*
* @param duration duration in ISO-8601-format
* @return parsed calendrical duration
* @throws ParseException if parsing fails
* @see #parse(String)
* @see #parseClockPeriod(String)
*/
public static
PlainDuration<CalendarUnit> parseCalendarPeriod(String duration)
throws ParseException {
return parse(duration, CalendarUnit.class);
}
/**
* <p>Parst eine kanonische ISO-konforme Darstellung nur mit
* Uhrzeitkomponenten zu einer Zeitspanne. </p>
*
* @param duration duration in ISO-8601-format
* @return parsed time-only duration
* @throws ParseException if parsing fails
* @see #parse(String)
* @see #parseCalendarPeriod(String)
*/
public static
PlainDuration<ClockUnit> parseClockPeriod(String duration)
throws ParseException {
return parse(duration, ClockUnit.class);
}
private int count() {
return this.getTotalLength().size();
}
// wildcard capture
private static <U> boolean isEmpty(TimeSpan<U> timespan) {
List<Item<U>> items = timespan.getTotalLength();
for (int i = 0, n = items.size(); i < n; i++) {
if (items.get(i).getAmount() > 0) {
return false;
}
}
return true;
}
private static <U extends IsoUnit> PlainDuration<U> add(
PlainDuration<U> duration,
TimeSpan<? extends U> timespan,
boolean inverse
) {
if (duration.isEmpty()) {
if (isEmpty(timespan)) {
return duration;
} else if (timespan instanceof PlainDuration) {
PlainDuration<U> result = cast(timespan);
return (inverse ? result.negate() : result);
}
}
boolean calendrical = duration.calendrical;
Map<U, Long> map = new HashMap<U, Long>();
for (int i = 0, n = duration.count(); i < n; i++) {
Item<U> item = duration.getTotalLength().get(i);
map.put(
item.getUnit(),
Long.valueOf(
MathUtils.safeMultiply(
item.getAmount(),
(duration.isNegative() ? -1 : 1)
)
)
);
}
boolean tsign = timespan.isNegative();
if (inverse) {
tsign = !tsign;
}
for (int i = 0, n = timespan.getTotalLength().size(); i < n; i++) {
TimeSpan.Item<? extends U> e = timespan.getTotalLength().get(i);
U unit = e.getUnit();
long amount = e.getAmount();
if (calendrical && !unit.isCalendrical()) {
calendrical = false;
}
// Millis, Micros und Weeks ersetzen
Item<U> item =
replaceItem(duration.getTotalLength(), amount, unit);
if (item != null) {
amount = item.getAmount();
unit = item.getUnit();
}
boolean overwrite = false;
if (unit.isCalendrical() && (unit != CalendarUnit.WEEKS)) {
overwrite = replaceWeeksForDays(map, unit);
}
if (!overwrite) {
overwrite = map.containsKey(unit);
}
// Items aktualisieren
if (overwrite) {
map.put(
unit,
Long.valueOf(
MathUtils.safeAdd(
map.get(unit).longValue(),
MathUtils.safeMultiply(amount, (tsign ? -1 : 1))
)
)
);
} else {
map.put(
unit,
MathUtils.safeMultiply(amount, (tsign ? -1 : 1))
);
}
}
Boolean neg = null;
if (duration.isNegative() == tsign) {
neg = Boolean.valueOf(duration.isNegative());
} else {
for (Map.Entry<U, Long> entry : map.entrySet()) {
boolean nsign = (entry.getValue().longValue() < 0);
if (neg == null) {
neg = Boolean.valueOf(nsign);
} else if (neg.booleanValue() != nsign) {
throw new IllegalStateException(
"Mixed signs in result time span not allowed: "
+ duration
+ " UNION "
+ (inverse ? "-" : "") + timespan);
}
}
}
if (neg.booleanValue()) {
for (Map.Entry<U, Long> entry : map.entrySet()) {
long value = entry.getValue().longValue();
map.put(
entry.getKey(),
Long.valueOf(
(value < 0)
? MathUtils.safeNegate(value)
: value)
);
}
}
return PlainDuration.create(map, neg.booleanValue(), calendrical);
}
private static PlainDuration<CalendarUnit> ofCalendarUnits(
long years,
long months,
long days,
boolean negative
) {
List<Item<CalendarUnit>> items = new ArrayList<Item<CalendarUnit>>(3);
if (years != 0) {
items.add(new Item<CalendarUnit>(years, CalendarUnit.YEARS));
}
if (months != 0) {
items.add(new Item<CalendarUnit>(months, CalendarUnit.MONTHS));
}
if (days != 0) {
items.add(new Item<CalendarUnit>(days, CalendarUnit.DAYS));
}
return new PlainDuration<CalendarUnit>(items, negative, true);
}
private static PlainDuration<ClockUnit> ofClockUnits(
long hours,
long minutes,
long seconds,
long nanos,
boolean negative
) {
List<Item<ClockUnit>> items = new ArrayList<Item<ClockUnit>>(4);
if (hours != 0) {
items.add(new Item<ClockUnit>(hours, ClockUnit.HOURS));
}
if (minutes != 0) {
items.add(new Item<ClockUnit>(minutes, ClockUnit.MINUTES));
}
if (seconds != 0) {
items.add(new Item<ClockUnit>(seconds, ClockUnit.SECONDS));
}
if (nanos != 0) {
items.add(new Item<ClockUnit>(nanos, ClockUnit.NANOS));
}
return new PlainDuration<ClockUnit>(items, negative, false);
}
private static <U extends IsoUnit> PlainDuration<U> create(
Map<U, Long> map,
boolean negative,
boolean calendrical
) {
if (map.isEmpty()) {
return new PlainDuration<U>(calendrical);
}
List<Item<U>> temp = new ArrayList<Item<U>>(map.size());
long weeks = 0;
long days = 0;
long nanos = 0;
U weekUnit = null;
U dayUnit = null;
for (Map.Entry<U, Long> entry : map.entrySet()) {
long amount = entry.getValue().longValue();
U key = entry.getKey();
if (amount == 0) {
continue;
} else if (key == CalendarUnit.WEEKS) {
weeks = amount;
weekUnit = key;
} else if (key == CalendarUnit.DAYS) {
days = amount;
dayUnit = key;
} else if (key == ClockUnit.MILLIS) {
nanos =
MathUtils.safeAdd(
nanos,
MathUtils.safeMultiply(amount, MIO));
} else if (key == ClockUnit.MICROS) {
nanos =
MathUtils.safeAdd(
nanos,
MathUtils.safeMultiply(amount, 1000));
} else if (key == ClockUnit.NANOS) {
nanos = MathUtils.safeAdd(nanos, amount);
} else {
temp.add(new Item<U>(amount, key));
}
}
if (
(days != 0)
&& (weeks != 0)
) {
days =
MathUtils.safeAdd(
days,
MathUtils.safeMultiply(weeks, 7));
weeks = 0;
}
if (weeks != 0) {
temp.add(new Item<U>(weeks, weekUnit));
}
if (days != 0) {
temp.add(new Item<U>(days, dayUnit));
}
if (nanos != 0) {
U key = cast(ClockUnit.NANOS);
temp.add(new Item<U>(nanos, key));
}
return new PlainDuration<U>(temp, negative, calendrical);
}
// binäre Suche
private int getIndex(ChronoUnit unit) {
return getIndex(unit, this.getTotalLength());
}
// binäre Suche
private static <U extends ChronoUnit> int getIndex(
ChronoUnit unit,
List<Item<U>> list
) {
int low = 0;
int high = list.size() - 1;
while (low <= high) {
int mid = (low + high) >>> 1;
ChronoUnit midUnit = list.get(mid).getUnit();
int cmp = compare(midUnit, unit);
if (cmp < 0) {
low = mid + 1;
} else if (cmp > 0) {
high = mid - 1;
} else {
return mid; // gefunden
}
}
return -1;
}
private static int compare(
ChronoUnit u1,
ChronoUnit u2
) {
return Double.compare(u2.getLength(), u1.getLength());
}
private static <U extends IsoUnit> int replaceWeeksForDays(
List<Item<U>> temp,
U unit
) {
int weekIndex = getIndex(CalendarUnit.WEEKS, temp);
if (weekIndex >= 0) {
temp.set(
weekIndex,
new Item<U>(
MathUtils.safeMultiply(
temp.get(weekIndex).getAmount(), 7L),
PlainDuration.<U>cast(CalendarUnit.DAYS)
)
);
if (unit.equals(CalendarUnit.DAYS)) {
return weekIndex; // Summenbildung: oldDays + amount
}
}
return -1;
}
private static <U extends IsoUnit> boolean replaceWeeksForDays(
Map<U, Long> temp,
U unit
) {
Long amount = temp.get(CalendarUnit.WEEKS);
if (amount != null) {
temp.remove(CalendarUnit.WEEKS);
temp.put(
PlainDuration.<U>cast(CalendarUnit.DAYS),
Long.valueOf(MathUtils.safeMultiply(amount.longValue(), 7L))
);
if (unit.equals(CalendarUnit.DAYS)) {
return true;
}
}
return false;
}
// optional
private static <U extends IsoUnit> Item<U> replaceItem(
List<Item<U>> items,
long amount,
U unit
) {
if (unit.equals(ClockUnit.MILLIS)) {
amount = MathUtils.safeMultiply(amount, MIO);
unit = cast(ClockUnit.NANOS);
} else if (unit.equals(ClockUnit.MICROS)) {
amount = MathUtils.safeMultiply(amount, 1000L);
unit = cast(ClockUnit.NANOS);
} else if (unit.equals(CalendarUnit.WEEKS)) {
for (int i = 0, n = items.size(); i < n; i++) {
U test = items.get(i).getUnit();
if (test.isCalendrical() && (test != CalendarUnit.WEEKS)) {
amount = MathUtils.safeMultiply(amount, 7L);
unit = cast(CalendarUnit.DAYS);
break;
}
}
} else {
return null;
}
return new Item<U>(amount, unit);
}
private void checkUnit(ChronoUnit unit) {
if (unit == null) {
throw new NullPointerException("Missing chronological unit.");
}
}
private void throwMixedSignsException(
long amount,
ChronoUnit unit
) {
StringBuilder sb = new StringBuilder(128);
sb.append("Mixed signs in result time span not allowed: ");
sb.append(this);
sb.append(" + (");
sb.append(amount);
sb.append(' ');
sb.append(unit);
sb.append(')');
throw new IllegalStateException(sb.toString());
}
private static <U extends IsoUnit>
PlainDuration<U> convert(TimeSpan<U> timespan) {
if (timespan instanceof PlainDuration) {
return cast(timespan);
} else {
boolean calendrical = true;
for (Item<U> item : timespan.getTotalLength()) {
if (!item.getUnit().isCalendrical()) {
calendrical = false;
}
}
PlainDuration<U> zero = new PlainDuration<U>(calendrical);
return zero.plus(timespan);
}
}
private boolean isFractionUnit(IsoUnit unit) {
char symbol = unit.getSymbol();
return ((symbol >= '1') && (symbol <= '9'));
}
@SuppressWarnings("unchecked")
private static <T> T cast(Object obj) {
return (T) obj;
}
//~ Parse-Routinen ----------------------------------------------------
private static <U extends IsoUnit> PlainDuration<U> parse(
String duration,
Class<U> type
) throws ParseException {
int index = 0;
boolean negative = false;
if (duration.length() == 0) {
throw new ParseException("Empty duration string.", index);
} else if (duration.charAt(0) == '-') {
negative = true;
index = 1;
}
try {
if (duration.charAt(index) != 'P') {
throw new ParseException(
"Format symbol \'P\' expected: " + duration, index);
} else {
index++;
}
List<Item<U>> items = new ArrayList<Item<U>>();
int sep = duration.indexOf('T', index);
boolean calendrical = (sep == -1);
if (calendrical) {
if (type == ClockUnit.class) {
throw new ParseException(
"Format symbol \'T\' expected: " + duration, index);
} else {
parseItems(duration, index, duration.length(), true, items);
}
} else {
if (sep > index) {
if (type == ClockUnit.class) {
throw new ParseException(
"Unexpected date component found: " + duration,
index);
} else {
parseItems(duration, index, sep, true, items);
}
}
if (type == CalendarUnit.class) {
throw new ParseException(
"Unexpected time component found: " + duration, sep);
} else {
parseItems(
duration,
sep + 1,
duration.length(),
false,
items);
}
}
return new PlainDuration<U>(items, negative, calendrical);
} catch (IndexOutOfBoundsException ex) {
ParseException pe =
new ParseException(
"Unexpected termination of duration: " + duration, index);
pe.initCause(ex);
throw pe;
}
}
private static <U extends ChronoUnit> void parseItems(
String duration,
int from,
int to,
boolean date,
List<Item<U>> items
) throws ParseException {
if (from == to) {
throw new ParseException(duration, from);
}
StringBuilder num = null;
boolean endOfItem = false;
ChronoUnit last = null;
int index = from;
boolean decimal = false;
int dateElements = 0;
boolean weekSymbol = false;
for (int i = from; i < to; i++) {
char c = duration.charAt(i);
if ((c >= '0') && (c <= '9')) {
if (num == null) {
num = new StringBuilder();
endOfItem = false;
index = i;
}
num.append(c);
} else if ((c == ',') || (c == '.')) {
if ((num == null) || date) {
throw new ParseException(
"Decimal separator misplaced: " + duration, i);
} else {
endOfItem = true;
long amount = parseAmount(duration, num.toString(), index);
ChronoUnit unit = ClockUnit.SECONDS;
last =
addParsedItem(unit, last, amount, duration, i, items);
num = null;
decimal = true;
}
} else if (endOfItem) {
throw new ParseException(
"Unexpected char \'" + c + "\' found: " + duration, i);
} else if (decimal) {
if (c != 'S') {
throw new ParseException(
"Second symbol expected: " + duration, i);
} else if (num == null) {
throw new ParseException(
"Decimal separator misplaced: " + duration, i - 1);
} else if (num.length() > 9) {
num.delete(9, num.length());
}
for (int j = num.length(); j < 9; j++) {
num.append('0');
}
endOfItem = true;
long amount = parseAmount(duration, num.toString(), index);
ChronoUnit unit = ClockUnit.NANOS;
num = null;
last = addParsedItem(unit, last, amount, duration, i, items);
} else {
endOfItem = true;
long amount =
parseAmount(
duration,
(num == null) ? String.valueOf(c) : num.toString(),
index);
num = null;
ChronoUnit unit = (
date
? parseDateSymbol(c, dateElements, weekSymbol, duration, i)
: parseTimeSymbol(c, duration, i));
if (date) {
if (unit.equals(CalendarUnit.WEEKS)) {
weekSymbol = true;
}
dateElements++;
}
last = addParsedItem(unit, last, amount, duration, i, items);
}
}
if (!endOfItem) {
throw new ParseException("Unit symbol expected: " + duration, to);
}
}
private static CalendarUnit parseDateSymbol(
char c,
int dateElements,
boolean weekSymbol,
String duration,
int index
) throws ParseException {
switch (c) {
case 'I':
return CalendarUnit.MILLENNIA;
case 'C':
return CalendarUnit.CENTURIES;
case 'E':
return CalendarUnit.DECADES;
case 'Y':
return CalendarUnit.YEARS;
case 'Q':
return CalendarUnit.QUARTERS;
case 'M':
return CalendarUnit.MONTHS;
case 'W':
if (dateElements > 0) {
throw new ParseException(
"Mixed date symbols with weeks not supported: "
+ duration,
index);
} else {
return CalendarUnit.WEEKS;
}
case 'D':
if (weekSymbol) {
throw new ParseException(
"Mixed date symbols with weeks not supported: "
+ duration,
index);
} else {
return CalendarUnit.DAYS;
}
default:
throw new ParseException(
"Symbol \'" + c + "\' not supported: " + duration, index);
}
}
private static ClockUnit parseTimeSymbol(
char c,
String duration,
int index
) throws ParseException {
switch (c) {
case 'H':
return ClockUnit.HOURS;
case 'M':
return ClockUnit.MINUTES;
case 'S':
return ClockUnit.SECONDS;
default:
throw new ParseException(
"Symbol \'" + c + "\' not supported: " + duration, index);
}
}
private static <U extends ChronoUnit> ChronoUnit addParsedItem(
ChronoUnit unit,
ChronoUnit last, // optional
long amount,
String duration,
int index,
List<Item<U>> items
) throws ParseException {
if (
(last == null)
|| (Double.compare(unit.getLength(), last.getLength()) < 0)
) {
if (amount != 0) {
U reified = cast(unit);
items.add(new Item<U>(amount, reified));
}
return unit;
} else if (unit.getLength() == last.getLength()) {
throw new ParseException(
"Duplicate unit items: " + duration, index);
} else {
throw new ParseException(
"Wrong order of unit items: " + duration, index);
}
}
private static long parseAmount(
String duration,
String number,
int index
) throws ParseException {
try {
return Long.parseLong(number);
} catch (NumberFormatException nfe) {
ParseException pe = new ParseException(duration, index);
pe.initCause(nfe);
throw pe;
}
}
//~ Innere Klassen ----------------------------------------------------
/**
* <p>Hilfsobjekt zum Bauen einer ISO-konformen Zeitspanne bestehend aus
* Jahren, Monaten, Tagen und allen Uhrzeiteinheiten. </p>
*
* <p>Lediglich die Wocheneinheit ist ausgenommen, da eine wochenbasierte
* Zeitspanne nach dem ISO-Standard für sich alleine stehen sollte.
* Eine wochenbasierte Zeitspanne kann auf einfache Weise mit dem Ausdruck
* {@code PlainDuration.of(amount, CalendarUnit.WEEKS)} erzeugt werden. </p>
*
* <p>Eine Instanz wird mittels {@link PlainDuration#ofPositive()} oder
* {@link PlainDuration#ofNegative()} erzeugt. Diese Instanz ist nur zur
* lokalen Verwendung in einem Thread gedacht, da keine Thread-Sicherheit
* gegeben ist. </p>
*/
public static class Builder {
//~ Instanzvariablen ----------------------------------------------
private final List<Item<IsoUnit>> items;
private final boolean negative;
private Boolean calendrical = null;
private boolean millisSet = false;
private boolean microsSet = false;
private boolean nanosSet = false;
//~ Konstruktoren -------------------------------------------------
/**
* <p>Konstruiert ein Hilfsobjekt zum Bauen einer Zeitspanne. </p>
*
* @param negative Is a negative duration asked for?
*/
Builder(boolean negative) {
super();
this.items = new ArrayList<Item<IsoUnit>>(10);
this.negative = negative;
}
//~ Methoden ------------------------------------------------------
/**
* <p>Erzeugt eine Länge in Jahren. </p>
*
* @param num count of years {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder years(int num) {
this.set(num, CalendarUnit.YEARS);
if (this.calendrical == null) {
this.calendrical = Boolean.TRUE;
}
return this;
}
/**
* <p>Erzeugt eine Länge in Monaten. </p>
*
* @param num count of months {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder months(int num) {
this.set(num, CalendarUnit.MONTHS);
if (this.calendrical == null) {
this.calendrical = Boolean.TRUE;
}
return this;
}
/**
* <p>Erzeugt eine Länge in Tagen. </p>
*
* @param num count of days {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder days(int num) {
this.set(num, CalendarUnit.DAYS);
if (this.calendrical == null) {
this.calendrical = Boolean.TRUE;
}
return this;
}
/**
* <p>Erzeugt eine Länge in Stunden. </p>
*
* @param num count of hours {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder hours(int num) {
this.set(num, ClockUnit.HOURS);
this.calendrical = Boolean.FALSE;
return this;
}
/**
* <p>Erzeugt eine Länge in Minuten. </p>
*
* @param num count of minutes {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder minutes(int num) {
this.set(num, ClockUnit.MINUTES);
this.calendrical = Boolean.FALSE;
return this;
}
/**
* <p>Erzeugt eine Länge in Sekunden. </p>
*
* @param num count of seconds {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder seconds(int num) {
this.set(num, ClockUnit.SECONDS);
this.calendrical = Boolean.FALSE;
return this;
}
/**
* <p>Erzeugt eine Länge in Millisekunden. </p>
*
* <p>Es wird eine Normalisierung durchgeführt, indem das Argument
* mit dem Faktor {@code 1} Million multipliziert und in Nanosekunden
* gespeichert wird. </p>
*
* @param num count of milliseconds {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder millis(int num) {
this.millisCalled();
this.update(num, MIO);
return this;
}
/**
* <p>Erzeugt eine Länge in Mikrosekunden. </p>
*
* <p>Es wird eine Normalisierung durchgeführt, indem das Argument
* mit dem Faktor {@code 1000} multipliziert und in Nanosekunden
* gespeichert wird. </p>
*
* @param num count of microseconds {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder micros(int num) {
this.microsCalled();
this.update(num, 1000L);
return this;
}
/**
* <p>Erzeugt eine Länge in Nanosekunden. </p>
*
* @param num count of nanoseconds {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder nanos(int num) {
this.nanosCalled();
this.update(num, 1L);
return this;
}
/**
* <p>Erzeugt eine neue ISO-konforme Zeitspanne. </p>
*
* @return new {@code PlainDuration}
*/
public PlainDuration<IsoUnit> build() {
if (this.calendrical == null) {
throw new IllegalStateException("Not set any amount and unit.");
}
return new PlainDuration<IsoUnit>(
this.items,
this.negative,
this.calendrical.booleanValue()
);
}
private Builder set(
long amount,
IsoUnit unit
) {
for (int i = 0, n = this.items.size(); i < n; i++) {
if (this.items.get(i).getUnit() == unit) {
throw new IllegalStateException(
"Already registered: " + unit);
}
}
if (amount != 0) {
Item<IsoUnit> item = new Item<IsoUnit>(amount, unit);
this.items.add(item);
}
return this;
}
private void update(
long amount,
long factor
) {
this.calendrical = Boolean.FALSE;
if (amount >= 0) {
for (int i = this.items.size() - 1; i >= 0; i--) {
Item<IsoUnit> item = this.items.get(i);
if (item.getUnit().equals(ClockUnit.NANOS)) {
this.items.set(
i,
new Item<IsoUnit>(
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, factor),
item.getAmount()
),
ClockUnit.NANOS
)
);
return;
}
}
if (amount != 0) {
this.items.add(
new Item<IsoUnit>(
MathUtils.safeMultiply(amount, factor),
ClockUnit.NANOS
)
);
}
} else {
throw new IllegalArgumentException(
"Illegal negative amount: " + amount);
}
}
private void millisCalled() {
if (this.millisSet) {
throw new IllegalStateException(
"Called twice for: " + ClockUnit.MILLIS.name());
}
this.millisSet = true;
}
private void microsCalled() {
if (this.microsSet) {
throw new IllegalStateException(
"Called twice for: " + ClockUnit.MICROS.name());
}
this.microsSet = true;
}
private void nanosCalled() {
if (this.nanosSet) {
throw new IllegalStateException(
"Called twice for: " + ClockUnit.NANOS.name());
}
this.nanosSet = true;
}
}
private static class TimestampNormalizer
implements Normalizer<IsoUnit> {
//~ Methoden ------------------------------------------------------
@Override
public PlainDuration<IsoUnit>
normalize(TimeSpan<? extends IsoUnit> timespan) {
int count = timespan.getTotalLength().size();
List<Item<IsoUnit>> items =
new ArrayList<Item<IsoUnit>>(count);
long years = 0, months = 0, weeks = 0, days = 0;
long hours = 0, minutes = 0, seconds = 0, nanos = 0;
for (int i = 0; i < count; i++) {
Item<? extends IsoUnit> item =
timespan.getTotalLength().get(i);
long amount = item.getAmount();
IsoUnit unit = item.getUnit();
if (unit instanceof CalendarUnit) {
switch ((CalendarUnit.class.cast(unit))) {
case MILLENNIA:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 1000),
years
);
break;
case CENTURIES:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 100),
years
);
break;
case DECADES:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 10),
years
);
break;
case YEARS:
years = MathUtils.safeAdd(amount, years);
break;
case QUARTERS:
months =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 3),
months
);
break;
case MONTHS:
months = MathUtils.safeAdd(amount, months);
break;
case WEEKS:
weeks = amount;
break;
case DAYS:
days = amount;
break;
default:
throw new UnsupportedOperationException(
unit.toString());
}
} else if (unit instanceof ClockUnit) {
switch ((ClockUnit.class.cast(unit))) {
case HOURS:
hours = amount;
break;
case MINUTES:
minutes = amount;
break;
case SECONDS:
seconds = amount;
break;
case MILLIS:
nanos =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, MIO),
nanos
);
break;
case MICROS:
nanos =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 1000L),
nanos
);
break;
case NANOS:
nanos = MathUtils.safeAdd(amount, nanos);
break;
default:
throw new UnsupportedOperationException(
unit.toString());
}
} else {
items.add(new Item<IsoUnit>(amount, unit));
}
}
long f = 0, s = 0, n = 0, h = 0;
if ((hours | minutes | seconds | nanos) != 0) {
f = nanos % MRD;
seconds = MathUtils.safeAdd(seconds, nanos / MRD);
s = seconds % 60;
minutes = MathUtils.safeAdd(minutes, seconds / 60);
n = minutes % 60;
hours = MathUtils.safeAdd(hours, minutes / 60);
h = hours % 24;
days = MathUtils.safeAdd(days, hours / 24);
}
if ((years | months | days) != 0) {
long y = MathUtils.safeAdd(years, months / 12);
long m = months % 12;
long d =
MathUtils.safeAdd(
MathUtils.safeMultiply(weeks, 7),
days
);
if (y != 0) {
items.add(new Item<IsoUnit>(y, CalendarUnit.YEARS));
}
if (m != 0) {
items.add(new Item<IsoUnit>(m, CalendarUnit.MONTHS));
}
if (d != 0) {
items.add(new Item<IsoUnit>(d, CalendarUnit.DAYS));
}
} else if (weeks != 0) {
items.add(new Item<IsoUnit>(weeks, CalendarUnit.WEEKS));
}
if (h != 0) {
items.add(new Item<IsoUnit>(h, ClockUnit.HOURS));
}
if (n != 0) {
items.add(new Item<IsoUnit>(n, ClockUnit.MINUTES));
}
if (s != 0) {
items.add(new Item<IsoUnit>(s, ClockUnit.SECONDS));
}
if (f != 0) {
items.add(new Item<IsoUnit>(f, ClockUnit.NANOS));
}
return new PlainDuration<IsoUnit>(
items,
timespan.isNegative(),
false
);
}
}
private static class DateNormalizer
implements Normalizer<CalendarUnit> {
//~ Methoden ------------------------------------------------------
@Override
public PlainDuration<CalendarUnit>
normalize(TimeSpan<? extends CalendarUnit> timespan) {
int count = timespan.getTotalLength().size();
long years = 0, months = 0, weeks = 0, days = 0;
for (int i = 0; i < count; i++) {
Item<? extends CalendarUnit> item =
timespan.getTotalLength().get(i);
long amount = item.getAmount();
CalendarUnit unit = item.getUnit();
switch (unit) {
case MILLENNIA:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 1000),
years
);
break;
case CENTURIES:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 100),
years
);
break;
case DECADES:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 10),
years
);
break;
case YEARS:
years = MathUtils.safeAdd(amount, years);
break;
case QUARTERS:
months =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 3),
months
);
break;
case MONTHS:
months = MathUtils.safeAdd(amount, months);
break;
case WEEKS:
weeks = amount;
break;
case DAYS:
days = amount;
break;
default:
throw new UnsupportedOperationException(
unit.toString());
}
}
boolean negative = timespan.isNegative();
if ((years | months | days) != 0) {
long y = MathUtils.safeAdd(years, months / 12);
long m = months % 12;
long d =
MathUtils.safeAdd(
MathUtils.safeMultiply(weeks, 7),
days
);
return PlainDuration.ofCalendarUnits(y, m, d, negative);
} else if (weeks != 0) {
if (negative) {
weeks = MathUtils.safeNegate(weeks);
}
return PlainDuration.of(weeks, CalendarUnit.WEEKS);
}
return PlainDuration.of(0, CalendarUnit.DAYS);
}
}
private static class TimeNormalizer
implements Normalizer<ClockUnit> {
//~ Methoden ------------------------------------------------------
@Override
public PlainDuration<ClockUnit>
normalize(TimeSpan<? extends ClockUnit> timespan) {
int count = timespan.getTotalLength().size();
long hours = 0, minutes = 0, seconds = 0, nanos = 0;
for (int i = 0; i < count; i++) {
Item<? extends ClockUnit> item =
timespan.getTotalLength().get(i);
long amount = item.getAmount();
ClockUnit unit = item.getUnit();
switch (unit) {
case HOURS:
hours = amount;
break;
case MINUTES:
minutes = amount;
break;
case SECONDS:
seconds = amount;
break;
case MILLIS:
nanos =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, MIO),
nanos
);
break;
case MICROS:
nanos =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 1000L),
nanos
);
break;
case NANOS:
nanos = MathUtils.safeAdd(amount, nanos);
break;
default:
throw new UnsupportedOperationException(unit.name());
}
}
long f = 0, s = 0, n = 0, h = 0;
if ((hours | minutes | seconds | nanos) != 0) {
f = nanos % MRD;
seconds = MathUtils.safeAdd(seconds, nanos / MRD);
s = seconds % 60;
minutes = MathUtils.safeAdd(minutes, seconds / 60);
n = minutes % 60;
hours = MathUtils.safeAdd(hours, minutes / 60);
h = hours;
}
return PlainDuration.ofClockUnits(
h,
n,
s,
f,
timespan.isNegative()
);
}
}
private static class Metric<U extends IsoUnit>
implements TimeMetric<U, PlainDuration<U>> {
//~ Instanzvariablen ----------------------------------------------
private final List<U> sortedUnits;
private final boolean calendrical;
private boolean normalizing;
//~ Konstruktoren -------------------------------------------------
private Metric(
boolean normalizing,
List<U> units
) {
super();
boolean c = true;
for (U unit : units) {
if (!unit.isCalendrical()) {
c = false;
break;
}
}
this.calendrical = c;
this.sortedUnits = Collections.unmodifiableList(units);
this.normalizing = normalizing;
}
//~ Methoden ------------------------------------------------------
@Override
public <T extends TimePoint<? super U, T>> PlainDuration<U> between(
T start,
T end
) {
if (end.equals(start)) {
return new PlainDuration<U>(this.calendrical);
}
T t1 = start;
T t2 = end;
boolean negative = false;
// Lage von Start und Ende bestimmen
if (t1.compareTo(t2) > 0) {
T temp = t1;
t1 = end;
t2 = temp;
negative = true;
}
List<TimeSpan.Item<U>> resultList =
new ArrayList<TimeSpan.Item<U>>(10);
TimeAxis<? super U, T> engine = start.getChronology();
U unit = null;
long amount = 0;
int index = 0;
int endIndex = this.sortedUnits.size();
while (index < endIndex) {
// Nächste Subtraktion vorbereiten
if (amount != 0) {
t1 = t1.plus(amount, unit);
}
// Aktuelle Zeiteinheit bestimmen
unit = resolve(this.sortedUnits.get(index));
if (
(this.getLength(engine, unit) < 1.0)
&& (index < endIndex - 1)
) {
amount = 0; // Millis oder Mikros vor Nanos nicht berechnen
} else {
// konvertierbare Einheiten zusammenfassen
int k = index + 1;
long factor = 1;
while (k < endIndex) {
U nextUnit = this.sortedUnits.get(k);
factor *= this.getFactor(engine, unit, nextUnit);
if (
!Double.isNaN(factor)
&& (factor < MIO)
&& engine.isConvertible(unit, nextUnit)
) {
unit = nextUnit;
} else {
break;
}
k++;
}
index = k - 1;
// Differenz in einer Einheit berechnen
amount = t1.until(t2, unit);
if (amount > 0) {
resultList.add(new TimeSpan.Item<U>(amount, unit));
} else if (amount < 0) {
throw new IllegalStateException(
"Implementation error: "
+ "Cannot compute timespan "
+ "due to illegal negative timespan amounts.");
}
}
index++;
}
if (this.normalizing) {
this.normalize(engine, this.sortedUnits, resultList);
}
return new PlainDuration<U>(resultList, negative, this.calendrical);
}
@SuppressWarnings("unchecked")
private static <U> U resolve(U unit) {
if (unit instanceof OverflowUnit) {
return (U) ((OverflowUnit) unit).getCalendarUnit();
}
return unit;
}
private <T extends TimePoint<? super U, T>> void normalize(
TimeAxis<? super U, T> engine,
List<U> sortedUnits,
List<TimeSpan.Item<U>> resultList
) {
for (int i = sortedUnits.size() - 1; i >= 0; i--) {
if (i > 0) {
U currentUnit = sortedUnits.get(i);
U nextUnit = sortedUnits.get(i - 1);
long factor = this.getFactor(engine, nextUnit, currentUnit);
if (
!Double.isNaN(factor)
&& (factor < MIO)
&& engine.isConvertible(nextUnit, currentUnit)
) {
TimeSpan.Item<U> currentItem =
getItem(resultList, currentUnit);
if (currentItem != null) {
long currentValue = currentItem.getAmount();
long overflow = currentValue / factor;
if (overflow > 0) {
long a = currentValue % factor;
if (a == 0) {
removeItem(resultList, currentUnit);
} else {
putItem(resultList, engine, a, currentUnit);
}
TimeSpan.Item<U> nextItem =
getItem(resultList, nextUnit);
if (nextItem == null) {
putItem(
resultList, engine, overflow, nextUnit);
} else {
putItem(
resultList,
engine,
MathUtils.safeAdd(
nextItem.getAmount(),
overflow),
nextUnit
);
}
}
}
}
}
}
}
private static <U> TimeSpan.Item<U> getItem(
List<TimeSpan.Item<U>> items,
U unit
) {
for (int i = 0, n = items.size(); i < n; i++) {
TimeSpan.Item<U> item = items.get(i);
if (item.getUnit().equals(unit)) {
return item;
}
}
return null;
}
private static <U> void putItem(
List<TimeSpan.Item<U>> items,
Comparator<? super U> comparator,
long amount,
U unit
) {
TimeSpan.Item<U> item = new TimeSpan.Item<U>(amount, unit);
int insert = 0;
for (int i = 0, n = items.size(); i < n; i++) {
U u = items.get(i).getUnit();
if (u.equals(unit)) {
items.set(i, item);
return;
} else if (
(insert == i)
&& (comparator.compare(u, unit) < 0)
) {
insert++;
}
}
items.add(insert, item);
}
private static <U> void removeItem(
List<TimeSpan.Item<U>> items,
U unit
) {
for (int i = 0, n = items.size(); i < n; i++) {
if (items.get(i).getUnit().equals(unit)) {
items.remove(i);
return;
}
}
}
private <T extends TimePoint<? super U, T>> long getFactor(
TimeAxis<? super U, T> engine,
U unit1,
U unit2
) {
double d1 = this.getLength(engine, unit1);
double d2 = this.getLength(engine, unit2);
return Math.round(d1 / d2);
}
private <T extends TimePoint<? super U, T>> double getLength(
TimeAxis<? super U, T> engine,
U unit
) {
return engine.getLength(unit);
}
}
private static class LengthComparator
<U extends IsoUnit, T extends TimePoint<? super U, T>>
implements Comparator<PlainDuration<U>> {
//~ Instanzvariablen ----------------------------------------------
private final T base;
//~ Konstruktoren -------------------------------------------------
private LengthComparator(T base) {
super();
if (base == null) {
throw new NullPointerException("Missing base time point.");
}
this.base = base;
}
//~ Methoden ------------------------------------------------------
@Override
public int compare(
PlainDuration<U> d1,
PlainDuration<U> d2
) {
boolean sign1 = d1.isNegative();
boolean sign2 = d2.isNegative();
if (sign1 && !sign2) {
return -1;
} else if (!sign1 && sign2) {
return 1;
} else if (d1.isEmpty() && d2.isEmpty()) {
return 0;
}
return this.base.plus(d1).compareTo(this.base.plus(d2));
}
}
}
| src/main/java/net/time4j/PlainDuration.java | /*
* -----------------------------------------------------------------------
* Copyright © 2013 Meno Hochschild, <http://www.menodata.de/>
* -----------------------------------------------------------------------
* This file (PlainDuration.java) is part of project Time4J.
*
* Time4J is free software: You can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Time4J is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Time4J. If not, see <http://www.gnu.org/licenses/>.
* -----------------------------------------------------------------------
*/
package net.time4j;
import net.time4j.base.MathUtils;
import net.time4j.engine.AbstractDuration;
import net.time4j.engine.ChronoException;
import net.time4j.engine.ChronoUnit;
import net.time4j.engine.Normalizer;
import net.time4j.engine.TimeAxis;
import net.time4j.engine.TimeMetric;
import net.time4j.engine.TimePoint;
import net.time4j.engine.TimeSpan;
import java.io.Serializable;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* <p>ISO-konforme Zeitspanne zwischen zwei Zeitpunkten. </p>
*
* <p>Instanzen können über folgende Fabrikmethoden erzeugt
* werden: </p>
*
* <ul>
* <li>{@link #of(long, IsoUnit) of(long, U)}</li>
* <li>{@link #ofCalendarUnits(int, int, int)}</li>
* <li>{@link #ofClockUnits(int, int, int)}</li>
* <li>{@link #ofPositive()} (<i>builder</i>-Muster)</li>
* <li>{@link #ofNegative()} (<i>builder</i>-Muster)</li>
* <li>{@link #parse(String)}</li>
* <li>{@link #parseCalendarPeriod(String)}</li>
* <li>{@link #parseClockPeriod(String)}</li>
* </ul>
*
* <p>Alle Instanzen sind <i>immutable</i>, aber geänderte Kopien lassen
* sich über die Methoden {@code plus()}, {@code minus()}, {@code with()},
* {@code union()}, {@code multipliedBy()}, {@code abs()} und {@code negate()}
* erzeugen. Hierbei werden die Zeiteinheiten {@code ClockUnit.MILLIS} und
* {@code ClockUnit.MICROS} intern immer zu Nanosekunden normalisiert. Ansonsten
* muß eine Normalisierung explizit mittels {@code with(Normalizer)}
* angestoßen werden. </p>
*
* <p>Notiz: Die Definition eines optionalen negativen Vorzeichens ist streng
* genommen nicht Bestandteil des ISO-Standards, ist aber Bestandteil der
* XML-Schema-Spezifikation und legt die Lage zweier Zeitpunkte relativ
* zueinander fest. Eine Manipulation des Vorzeichens ist mit der Methode
* {@code negate()} möglich. </p>
*
* <p>Die Zeitarithmetik behandelt die Addition und Subtraktion einer Zeitspanne
* bezogen auf einen Zeitpunkt abhängig vom Vorzeichen der Zeitspanne wie
* im <a href="engine/AbstractDuration.html#algorithm">Standardalgorithmus</a>
* von Time4J beschrieben. </p>
*
* @param <U> generic type of time units
* @author Meno Hochschild
* @concurrency <immutable>
*/
public final class PlainDuration<U extends IsoUnit>
extends AbstractDuration<U>
implements Serializable {
//~ Statische Felder/Initialisierungen --------------------------------
private static final char ISO_DECIMAL_SEPARATOR = (
Boolean.getBoolean("net.time4j.format.iso.decimal.dot")
? '.'
: ',' // Empfehlung des ISO-Standards
);
private static final long MRD = 1000000000L;
private static final long MIO = 1000000L;
private static final Comparator<ChronoUnit> UNIT_COMPARATOR =
new Comparator<ChronoUnit>() {
@Override
public int compare(
ChronoUnit o1,
ChronoUnit o2
) {
return PlainDuration.compare(o1, o2);
}
};
private static final
Comparator<Item<? extends ChronoUnit>> ITEM_COMPARATOR =
new Comparator<Item<? extends ChronoUnit>>() {
@Override
public int compare(
Item<? extends ChronoUnit> o1,
Item<? extends ChronoUnit> o2
) {
return PlainDuration.compare(o1.getUnit(), o2.getUnit());
}
};
/**
* <p>Normalisiert die Zeitspannenelemente einer Zeitspanne auf der Basis
* {@code 1 Jahr = 12 Monate} und {@code 1 Tag = 24 Stunden} und
* {@code 1 Stunde = 60 Minuten} und {@code 1 Minute = 60 Sekunden},
* jedoch ohne die Tage zu Monaten zu konvertieren. </p>
*
* <p>VORSICHT: Zeitzonenbedingte Veränderungen der Tageslänge
* oder Schaltsekunden werden hier ignoriert. Deshalb sollte diese
* Normalisierung möglichst nur auf ISO-Zeitstempel ohne Zeitzonen-
* oder UTC-Unterstützung angewandt werden. Nur Zeiteinheiten der
* Enums {@link CalendarUnit} und {@link ClockUnit} können normalisiert
* werden. </p>
*
* <p>Wochen werden genau dann zu Tagen konvertiert, wenn sie nicht das
* einzige datumsbezogene Zeitspannenelement darstellen. </p>
*
* @see PlainTimestamp
*/
public static Normalizer<IsoUnit> STD_PERIOD = new TimestampNormalizer();
/**
* <p>Normalisiert die Datumselemente einer Zeitspanne auf der Basis
* {@code 1 Jahr = 12 Monate}, jedoch ohne die Tage zu Monaten zu
* konvertieren. </p>
*
* <p>Wochen werden genau dann zu Tagen konvertiert, wenn sie nicht das
* einzige datumsbezogene Zeitspannenelement darstellen. Nur Zeiteinheiten
* des Enums {@link CalendarUnit} werden normalisiert. </p>
*
* @see PlainDate
*/
public static Normalizer<CalendarUnit> STD_CALENDAR_PERIOD =
new DateNormalizer();
/**
* <p>Normalisiert die Uhrzeitelemente einer Zeitspanne auf der Basis
* {@code 1 Tag = 24 Stunden} und {@code 1 Stunde = 60 Minuten} und
* {@code 1 Minute = 60 Sekunden}. </p>
*
* <p>VORSICHT: Zeitzonenbedingte Veränderungen der Tageslänge
* oder UTC-Schaltsekunden werden hier ignoriert. Deshalb sollte diese
* Normalisierung nicht auf Zeitzonen- oder UTC-sensible Zeitpunkttypen
* angewandt werden. Nur Zeiteinheiten des Enums {@link ClockUnit}
* werden normalisiert. </p>
*
* @see PlainTime
*/
public static Normalizer<ClockUnit> STD_CLOCK_PERIOD = new TimeNormalizer();
private static final PlainDuration<IsoUnit> ZERO =
new PlainDuration<IsoUnit>(false);
private static final long serialVersionUID = -6321211763598951499L;
//~ Instanzvariablen --------------------------------------------------
/**
* @serial list of amounts and units
*/
private final List<Item<U>> items;
/**
* @serial marks a negative time span
*/
private final boolean negative;
/**
* @serial marks a calendrical only time span
*/
private final boolean calendrical;
//~ Konstruktoren -----------------------------------------------------
// Standard-Konstruktor
private PlainDuration(
List<Item<U>> items,
boolean negative,
boolean calendrical
) {
super();
boolean empty = items.isEmpty();
if (empty) {
this.items = Collections.emptyList();
} else {
Collections.sort(items, ITEM_COMPARATOR);
this.items = Collections.unmodifiableList(items);
}
this.negative = (empty ? false : negative);
this.calendrical = calendrical;
}
// Kopiekonstruktor (siehe negate())
private PlainDuration(
PlainDuration<U> duration,
boolean inverse
) {
super();
this.items = duration.items;
this.negative = (inverse ? !duration.negative : duration.negative);
this.calendrical = duration.calendrical;
}
// leere Zeitspanne
private PlainDuration(boolean calendrical) {
super();
this.items = Collections.emptyList();
this.negative = false;
this.calendrical = calendrical;
}
//~ Methoden ----------------------------------------------------------
/**
* <p>Erzeugt eine neue Zeitspanne, die auf nur einer Zeiteinheit
* beruht. </p>
*
* <p>Ist der angegebene Betrag negativ, so wird auch die Zeitspanne
* negativ sein. Ist er {@code 0}, wird eine leere Zeitspanne
* generiert. </p>
*
* @param <U> generic unit type
* @param amount amount as count of units
* @param unit single time unit
* @return new duration
*/
public static <U extends IsoUnit> PlainDuration<U> of(
long amount,
U unit
) {
if (amount == 0) {
return new PlainDuration<U>(unit.isCalendrical());
}
List<Item<U>> items = new ArrayList<Item<U>>(1);
items.add(
new Item<U>(
((amount < 0) ? MathUtils.safeNegate(amount) : amount),
unit)
);
return new PlainDuration<U>(items, (amount < 0), unit.isCalendrical());
}
/**
* <p>Konstruiert über den Umweg des <i>builder</i>-Entwurfsmusters
* eine neue ISO-konforme positive Zeitspanne für kombinierte Datums- und
* Uhrzeiteinheiten. </p>
*
* @return help object for building a positive {@code PlainDuration}
*/
public static Builder ofPositive() {
return new Builder(false);
}
/**
* <p>Konstruiert über den Umweg des <i>builder</i>-Entwurfsmusters
* eine neue ISO-konforme negative Zeitspanne für kombinierte Datums- und
* Uhrzeiteinheiten. </p>
*
* @return help object for building a negative {@code PlainDuration}
*/
public static Builder ofNegative() {
return new Builder(true);
}
/**
* <p>Erzeugt eine positive Zeitspanne in Jahren, Monaten und Tagen. </p>
*
* <p>Alle Argumente dürfen nicht negativ sein. Ist ein Argument
* gleich {@code 0}, wird es ignoriert. Wird eine negative Zeitspanne
* gewünscht, kann auf dem Ergebnis einfach {@code negate()}
* aufgerufen werden. </p>
*
* @param years amount in years
* @param months amount in months
* @param days amount in days
* @return new duration
* @throws IllegalArgumentException if any argument is negative
* @see #negate()
*/
public static PlainDuration<CalendarUnit> ofCalendarUnits(
int years,
int months,
int days
) {
return PlainDuration.ofCalendarUnits(years, months, days, false);
}
/**
* <p>Erzeugt eine positive Zeitspanne in Stunden, Minuten und
* Sekunden. </p>
*
* <p>Alle Argumente dürfen nicht negativ sein. Ist ein Argument
* gleich {@code 0}, wird es ignoriert. Wird eine negative Zeitspanne
* gewünscht, kann auf dem Ergebnis einfach {@code negate()}
* aufgerufen werden. </p>
*
* @param hours amount in hours
* @param minutes amount in minutes
* @param seconds amount in seconds
* @return new duration
* @throws IllegalArgumentException if any argument is negative
* @see #negate()
*/
public static PlainDuration<ClockUnit> ofClockUnits(
int hours,
int minutes,
int seconds
) {
return PlainDuration.ofClockUnits(hours, minutes, seconds, 0, false);
}
/**
* <p>Konstruiert eine Metrik für beliebige Standard-Zeiteinheiten
* in normalisierter Form. </p>
*
* <p><strong>WICHTIG:</strong> Fehlt die der Präzision der zu
* vergleichenden Zeitpunkte entsprechende kleinste Zeiteinheit, wird
* im allgemeinen ein Subtraktionsrest übrigbleiben. Das Ergebnis
* der Metrikberechnung wird dann nicht den vollständigen zeitlichen
* Abstand zwischen den Zeitpunkten ausdrücken. Für die
* Vollständigkeit der Berechnung ist bei Datumsangaben mindestens
* die explizite Angabe der Tageseinheit notwendig. </p>
*
* @param <U> generic unit type
* @param units time units to be used in calculation
* @return immutable metric for calculating a duration in given units
* @throws IllegalArgumentException if any time unit is missing or
* if there are unit duplicates
*/
public static <U extends IsoUnit>
TimeMetric<U, PlainDuration<U>> in(U... units) {
if (units.length == 0) {
throw new IllegalArgumentException("Missing units.");
}
for (int i = 0; i < units.length - 1; i++) {
for (int j = i + 1; j < units.length; j++) {
if (units[i].equals(units[j])) {
throw new IllegalArgumentException(
"Duplicate unit: " + units[i]);
}
}
}
Arrays.sort(units, UNIT_COMPARATOR);
return new Metric<U>((units.length > 1), Arrays.asList(units));
}
/**
* <p>Konstruiert eine Metrik in Jahren, Monaten und Tagen. </p>
*
* <p>Am Ende wird die Darstellung automatisch normalisiert, also kleine
* Zeiteinheiten so weit wie möglich in große Einheiten
* umgerechnet. </p>
*
* @return immutable metric for calculating a duration in years,
* months and days
* @see #in(IsoUnit[]) in(U[])
* @see CalendarUnit#YEARS
* @see CalendarUnit#MONTHS
* @see CalendarUnit#DAYS
*/
public static
TimeMetric<CalendarUnit, PlainDuration<CalendarUnit>> inYearsMonthsDays() {
return PlainDuration.in(
CalendarUnit.YEARS,
CalendarUnit.MONTHS,
CalendarUnit.DAYS
);
}
/**
* <p>Konstruiert eine Metrik in Stunden, Minuten, Sekunden und Nanos. </p>
*
* <p>Am Ende wird die Darstellung automatisch normalisiert, also kleine
* Zeiteinheiten so weit wie möglich in große Einheiten
* umgerechnet. </p>
*
* @return immutable metric for calculating a duration in clock units
* @see #in(IsoUnit[]) in(U[])
* @see ClockUnit#HOURS
* @see ClockUnit#MINUTES
* @see ClockUnit#SECONDS
* @see ClockUnit#NANOS
*/
public static
TimeMetric<ClockUnit, PlainDuration<ClockUnit>> inClockUnits() {
return PlainDuration.in(
ClockUnit.HOURS,
ClockUnit.MINUTES,
ClockUnit.SECONDS,
ClockUnit.NANOS
);
}
@Override
public List<Item<U>> getTotalLength() {
return this.items;
}
@Override
public boolean isNegative() {
return this.negative;
}
/**
* <p>Ist die angegebene Zeiteinheit in dieser Zeitspanne enthalten? </p>
*
* <p>Eine Zeiteinheit ist auch dann enthalten, wenn sie als
* Sekundenbruchteil (Ziffer in Symboldarstellung) erst konvertiert
* werden muß. </p>
*
* @param unit time unit to be checked (optional)
* @return {@code true} if this duration contains given unit
* else {@code false}
* @see #getPartialAmount(ChronoUnit) getPartialAmount(U)
*/
@Override
public boolean contains(ChronoUnit unit) {
if (unit instanceof IsoUnit) {
IsoUnit isoUnit = (IsoUnit) unit;
boolean fractional = isFractionUnit(isoUnit);
for (int i = 0, n = this.items.size(); i < n; i++) {
Item<U> item = this.items.get(i);
U u = item.getUnit();
if (
u.equals(unit)
|| (fractional && isFractionUnit(u))
) {
return (item.getAmount() > 0);
}
}
}
return false;
}
/**
* <p>Liefert den Betrag zu einer Zeiteinheit. </p>
*
* <p>Wenn die angegebene Zeiteinheit nicht in der Zeitspanne enthalten ist,
* liefert die Methode den Wert {@code 0}. Sekundenbruchteile, die an der
* Symboldarstellung ihrer Einheiten erkennbar sind, werden automatisch
* konvertiert. Konkret: Wenn eine Zeitspanne z.B. Nanosekunden speichert,
* aber nach Mikrosekunden gefragt wird, dann wird der in der Zeitspanne
* enthaltene Nanosekundenwert mit dem Faktor {@code 1000} multipliziert
* und zurückgegeben. </p>
*
* @param unit time unit the amount is queried for (optional)
* @return non-negative amount associated with given unit ({@code >= 0})
*/
@Override
public long getPartialAmount(ChronoUnit unit) {
if (unit instanceof IsoUnit) {
IsoUnit isoUnit = (IsoUnit) unit;
boolean fractional = isFractionUnit(isoUnit);
for (int i = 0, n = this.items.size(); i < n; i++) {
Item<U> item = this.items.get(i);
U u = item.getUnit();
if (u.equals(unit)) {
return item.getAmount();
} else if (
fractional
&& isFractionUnit(u)
) {
int d1 = u.getSymbol() - '0';
int d2 = isoUnit.getSymbol() - '0';
int factor = 1;
for (int j = 0, m = Math.abs(d1 - d2); j < m; j++) {
factor *= 10;
}
if (d1 >= d2) {
return item.getAmount() / factor;
} else {
return item.getAmount() * factor;
}
}
}
}
return 0;
}
/**
* <p>Liefert ein Hilfsobjekt zum Vergleichen von Zeitspannenobjekten
* auf Basis ihrer Länge. </p>
*
* <p>Erzeugt einen {@code Comparator}, der letztlich auf dem Ausdruck
* {@code base.plus(duration1).compareTo(base.plus(duration2))} beruht.
* Der Basiszeitpunkt ist notwendig, weil sonst Zeitspannenobjekte dieser
* Klasse nicht notwendig eine physikalisch feste Länge haben.
* Zum Beispiel sind Monate variable Zeiteinheiten mit unterschiedlich
* vielen Tagen. </p>
*
* @param <U> generic unit type
* @param <T> generic type of time point
* @param base base time point which durations will use for comparison
* @return {@code Comparator} for plain durations
* @see TimePoint#compareTo(TimePoint) TimePoint.compareTo(T)
*/
public static <U extends IsoUnit, T extends TimePoint<? super U, T>>
Comparator<PlainDuration<U>> comparator(T base) {
return new LengthComparator<U, T>(base);
}
/**
* <p>Liefert eine Kopie dieser Instanz, in der der angegebene Betrag zum
* mit der angegebenen Zeiteinheit assoziierten Feldwert addiert wird. </p>
*
* <p>Die Methode berücksichtigt auch das Vorzeichen der Zeitspanne.
* Beispiel in Pseudo-Code: {@code [P5M].plus(-6, CalendarUnit.MONTHS)} wird
* zu {@code [-P1M]}. Ist der zu addierende Betrag {@code 0}, liefert die
* Methode einfach diese Instanz selbst. Um eine gemischte Zeitspanne mit
* Wochen und anderen Datumselementen zu verhindern, werden Wochen bei
* Bedarf automatisch zu Tagen normalisiert. </p>
*
* <p>Notiz: Gemischte Vorzeichen im Ergebnis sind nicht zulässig und
* werden mit einem Abbruch quittiert. Zum Beispiel ist folgender Ausdruck
* nicht erlaubt: {@code [-P1M].plus(30, CalendarUnit.DAYS)}</p>
*
* @param amount temporal amount to be added (maybe negative)
* @param unit associated time unit
* @return new changed duration while this duration remains unaffected
* @throws IllegalStateException if the result gets mixed signs by
* adding the partial amounts
* @throws ArithmeticException in case of long overflow
* @see #with(long, IsoUnit) with(long, U)
*/
public PlainDuration<U> plus(
long amount,
U unit
) {
checkUnit(unit);
long originalAmount = amount;
U originalUnit = unit;
boolean negatedValue = false;
if (amount == 0) {
return this;
} else if (amount < 0) {
amount = MathUtils.safeNegate(amount);
negatedValue = true;
}
// Millis, Micros und Weeks ersetzen
List<Item<U>> temp = new ArrayList<Item<U>>(this.getTotalLength());
Item<U> item = replaceItem(this.getTotalLength(), amount, unit);
if (item != null) {
amount = item.getAmount();
unit = item.getUnit();
}
if (this.isEmpty()) {
temp.add((item == null) ? new Item<U>(amount, unit) : item);
return new PlainDuration<U>(
temp,
negatedValue,
this.calendrical && unit.isCalendrical());
}
int index = -1;
if (unit.isCalendrical() && (unit != CalendarUnit.WEEKS)) {
index = replaceWeeksForDays(temp, unit);
}
if (index == -1) {
index = this.getIndex(unit);
}
// Items aktualisieren
boolean resultNegative = this.isNegative();
if (index < 0) {
if (this.isNegative() == negatedValue) {
temp.add(new Item<U>(amount, unit));
} else {
this.throwMixedSignsException(originalAmount, originalUnit);
}
} else {
long sum =
MathUtils.safeAdd(
MathUtils.safeMultiply(
temp.get(index).getAmount(),
(this.isNegative() ? -1 : 1)
),
MathUtils.safeMultiply(
amount,
(negatedValue ? -1 : 1)
)
);
if (sum == 0) {
temp.remove(index);
} else if (
(this.count() == 1)
|| (this.isNegative() == (sum < 0))
) {
long absSum = ((sum < 0) ? MathUtils.safeNegate(sum) : sum);
temp.set(index, new Item<U>(absSum, unit));
resultNegative = (sum < 0);
} else {
this.throwMixedSignsException(originalAmount, originalUnit);
}
}
return new PlainDuration<U>(
temp,
resultNegative,
this.calendrical && unit.isCalendrical());
}
/**
* <p>Liefert eine Kopie dieser Instanz, in der der angegebene Betrag
* vom mit der angegebenen Zeiteinheit assoziierten Feldwert subtrahiert
* wird. </p>
*
* <p>Entspricht {@code plus(-amount, unit)}. </p>
*
* @param amount temporal amount to be subtracted (maybe negative)
* @param unit associated time unit
* @return new changed duration while this duration remains unaffected
* @throws IllegalStateException if the result gets mixed signs by
* subtracting the partial amounts
* @throws ArithmeticException in case of long overflow
* @see #plus(long, IsoUnit) plus(long, U)
*/
public PlainDuration<U> minus(
long amount,
U unit
) {
return this.plus(MathUtils.safeNegate(amount), unit);
}
/**
* <p>Erzeugt eine neue Zeitspanne als Vereinigung dieser und der
* angegebenen Zeitspanne, wobei Beträge zu gleichen Zeiteinheiten
* addiert werden. </p>
*
* <p>Diese Methode vereinigt anders als {@code union()} nur
* Zeitspannen mit dem gleichen Einheitstyp. Weitere Details sind
* gleich und der Beschreibung von {@link #union(TimeSpan)} zu
* entnehmen. </p>
*
* @param timespan other time span this duration will be merged
* with by adding the partial amounts
* @return new merged duration
* @throws IllegalStateException if the result gets mixed signs by
* adding the partial amounts
* @throws ArithmeticException in case of long overflow
*/
public PlainDuration<U> plus(TimeSpan<? extends U> timespan) {
return add(this, timespan, false);
}
/**
* <p>Erzeugt eine neue Zeitspanne als Vereinigung dieser und der
* angegebenen Zeitspanne, wobei die Beträge ds Arguments zu
* gleichen Zeiteinheiten subtrahiert werden. </p>
*
* <p>Weitere Details siehe {@link #plus(TimeSpan)}. </p>
*
* @param timespan other time span this duration will be merged
* with by subtracting the partial amounts
* @return new merged duration
* @throws IllegalStateException if the result gets mixed signs by
* subtracting the partial amounts
* @throws ArithmeticException in case of long overflow
*/
public PlainDuration<U> minus(TimeSpan<? extends U> timespan) {
return add(this, timespan, true);
}
/**
* <p>Liefert eine Kopie dieser Instanz mit dem angegebenen geänderten
* Wert. </p>
*
* <p>Entspricht {@code plus(amount - getAmount(unit), unit)}. </p>
*
* @param amount temporal amount to be set (maybe negative)
* @param unit associated time unit
* @return new changed duration while this duration remains unaffected
* @throws IllegalStateException if the result gets mixed signs by
* setting the partial amounts
* @throws ArithmeticException in case of long overflow
* @see #plus(long, IsoUnit) plus(long, U)
*/
public PlainDuration<U> with(
long amount,
U unit
) {
long absAmount =
((amount < 0) ? MathUtils.safeNegate(amount) : amount);
Item<U> item = replaceItem(this.getTotalLength(), absAmount, unit);
if (item != null) {
absAmount = item.getAmount();
unit = item.getUnit();
}
long oldAmount;
if (
unit.equals(CalendarUnit.DAYS)
&& this.contains(CalendarUnit.WEEKS)
) {
oldAmount =
MathUtils.safeMultiply(
this.getPartialAmount(CalendarUnit.WEEKS),
7L
);
} else {
oldAmount = this.getPartialAmount(unit);
}
return this.plus(
MathUtils.safeSubtract(
MathUtils.safeMultiply(
absAmount,
(amount < 0) ? - 1 : 1
),
MathUtils.safeMultiply(
oldAmount,
this.isNegative() ? -1 : 1
)
),
unit
);
}
/**
* <p>Liefert die absolute immer positive Variante dieser Zeitspanne. </p>
*
* <p>Beispiel: {@code [-P5M].abs()} wird zu {@code [P5M]}. </p>
*
* @return new positive duration if this duration is negative else this
* duration unchanged
* @see #isNegative()
* @see #negate()
*/
public PlainDuration<U> abs() {
if (this.isNegative()) {
return this.negate();
} else {
return this;
}
}
/**
* <p>Liefert eine Kopie dieser Instanz, die das negative Äquivalent
* darstellt. </p>
*
* <p>Ein zweifacher Aufruf dieser Methode liefert wieder eine
* inhaltlich gleiche Instanz. Also gilt immer folgende Beziehung:
* {@code this.negate().negate().equals(this) == true}. Liegt der
* Sonderfall einer leeren Zeitspanne vor, dann ist diese Methode ohne
* Wirkung und liefert nur die gleiche Instanz zurück. Entspricht
* dem Ausdruck {@code multipliedBy(-1)}. </p>
*
* <p>Beispiel: {@code [-P5M].negate()} wird zu {@code [P5M]}. </p>
*
* @return new negative duration if this duration is positive else a new
* positive duration with the same partial amounts and units
* @see #isNegative()
* @see #multipliedBy(int)
*/
@Override
public PlainDuration<U> negate() {
return this.multipliedBy(-1);
}
/**
* <p>Multipliziert alle enthaltenen Beträge mit dem angegebenen
* Faktor. </p>
*
* <p>Ist der Faktor {@code 0}, ist die neue Zeitspanne leer. Mit dem
* Faktor {@code 1} wird diese Instanz selbst unverändert
* zurückgegeben. Bei einem negativen Faktor wird zusätzlich
* das Vorzeichen geändert. </p>
*
* @param factor multiplication factor
* @return new duration with all amounts multiplied while this duration
* remains unaffected
* @throws ArithmeticException in case of long overflow
*/
public PlainDuration<U> multipliedBy(int factor) {
if (
this.isEmpty()
|| (factor == 1)
) {
return this;
} else if (factor == 0) {
return new PlainDuration<U>(this.calendrical);
} else if (factor == -1) {
return new PlainDuration<U>(this, true);
}
List<Item<U>> newItems = new ArrayList<Item<U>>(this.count());
int scalar = Math.abs(factor);
for (int i = 0, n = this.count(); i < n; i++) {
Item<U> item = this.getTotalLength().get(i);
newItems.add(
new Item<U>(
MathUtils.safeMultiply(item.getAmount(), scalar),
item.getUnit()
)
);
}
return new PlainDuration<U>(
newItems,
((factor < 0) ? !this.isNegative() : this.isNegative()),
this.calendrical
);
}
/**
* <p>Erzeugt eine neue Zeitspanne als Vereinigung dieser und der
* angegebenen Zeitspanne, wobei Beträge zu gleichen Zeiteinheiten
* addiert werden. </p>
*
* <p><i>Vereinigung von Zeitspannen in Datum und Uhrzeit</i></p>
* <pre>
* PlainDuration<CalendarUnit> dateDuration =
* PlainDuration.ofCalendarUnits(2, 7, 10);
* PlainDuration<ClockUnit> timeDuration =
* PlainDuration.ofClockUnits(0, 30, 0);
* System.out.println(dateDuration.union(timeDuration)); // P2Y7M10DT30M
* </pre>
*
* <p><i>Vereinigung als Addition von Zeitspannen</i></p>
* <pre>
* PlainDuration<CalendarUnit> p1 =
* PlainDuration.ofCalendarUnits(0, 0, 10);
* PlainDuration<CalendarUnit> p2 =
* PlainDuration.of(3, CalendarUnit.WEEKS);
* System.out.println(p1.union(p2)); // P31D
* </pre>
*
* <p>Um eine gemischte Zeitspanne mit Wochen und anderen Datumselementen
* zu verhindern, werden Wochen bei Bedarf automatisch zu Tagen
* normalisiert (siehe auch letztes Beispiel). </p>
*
* <p>Falls die Vorzeichen beider Zeitspannen verschieden sind, müssen
* im Ergebnis trotzdem die Vorzeichen aller Beträge gleich sein, damit
* eindeutig das Vorzeichen der Ergebnis-Zeitspanne feststeht. Beispiel in
* Pseudo-Code: [P4D] union [-P1M34D] = [-P1M30D]. Hingegen führt die
* Vereinigung [P5M4D] union [-P4M34D] zum Abbruch, weil [P+1M-30D] keine
* sinnvolle Vorzeichenregelung erlaubt. </p>
*
* <p>Notiz: Anders als in {@code javax.xml.datatype.Duration} ist die
* Anforderung an gleiche Vorzeichen hier härter, weil diese Klasse
* auch zur Verwendung in Zeitzonenkontexten vorgesehen ist, wo kein
* Verlaß auf feste Umrechnungen à la 1 Tag = 24 Stunden besteht.
* Allerdings besteht die Möglichkeit, Zeitspannen vor der Vereinigung
* geeignet zu normalisieren. </p>
*
* @param timespan other time span this duration is to be merged with
* @return new merged duration with {@code IsoUnit} as unit type
* @throws IllegalStateException if the result gets mixed signs by
* adding the partial amounts
*/
public PlainDuration<IsoUnit> union(TimeSpan<? extends IsoUnit> timespan) {
return ZERO.plus(this).plus(timespan);
}
/**
* <p>Normalisiert diese Zeitspanne über den angegebenen
* Mechanismus. </p>
*
* @param normalizer help object for normalizing this duration
* @return new normalized duration while this duration remains unaffected
* @see #STD_PERIOD
* @see #STD_CALENDAR_PERIOD
* @see #STD_CLOCK_PERIOD
*/
public PlainDuration<U> with(Normalizer<U> normalizer) {
return convert(normalizer.normalize(this));
}
/**
* <p>Basiert auf allen gespeicherten Zeitspannenelementen und dem
* Vorzeichen. </p>
*
* @return {@code true} if {@code obj} is also a {@code PlainDuration},
* has the same units and amounts, the same sign and the same
* calendrical status else {@code false}
* @see #getTotalLength()
* @see #isNegative()
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof PlainDuration) {
PlainDuration<?> that = PlainDuration.class.cast(obj);
return (
(this.negative == that.negative)
&& (this.calendrical == that.calendrical)
&& this.getTotalLength().equals(that.getTotalLength())
);
} else {
return false;
}
}
/**
* <p>Basiert auf allen gespeicherten Zeitspannenelementen und dem
* Vorzeichen passend zur Definition von {@code equals()}. </p>
*/
@Override
public int hashCode() {
int hash = this.getTotalLength().hashCode();
if (this.negative) {
hash ^= hash;
}
return hash;
}
/**
* <p>Liefert eine kanonische Darstellung analog zur
* ISO-8601-Definition. </p>
*
* <p>Entspricht {@code toString(false)}. </p>
*
* @see #toString(boolean)
* @see #parse(String)
*/
@Override
public String toString() {
return this.toString(false);
}
/**
* <p>Liefert eine kanonische Darstellung, die optional mit einem negativen
* Vorzeichen beginnt, dann mit dem Buchstaben "P" fortsetzt,
* gefolgt von einer Reihe von alphanumerischen Zeichen analog zur
* ISO8601-Definition. </p>
*
* <p>Beispiel: Im ISO8601-Format ist eine Zeitspanne von 1 Monat, 3 Tagen
* und 4 Stunden als "P1M3DT4H" beschrieben, wobei der Buchstabe
* "T" Datums- und Uhrzeitteil trennt. </p>
*
* <p>Ist die Zeitspanne negativ, so wird in Übereinstimmung mit der
* XML-Schema-Norm ein Minuszeichen vorangestellt (z.B. "-P2D"),
* während eine leere Zeitspanne das Format "PT0S" hat
* (Sekunde als universelles Zeitmaß). Hat der Sekundenteil einen
* Bruchteil, wird als Dezimaltrennzeichen das Komma entsprechend der
* Empfehlung des ISO-Standards gewählt, es sei denn, über den
* xml-Parameter wurde die Verwendung für XML geregelt (dort ist nur
* ein Punkt zulässig). Speziell für XML gilt auch, daß
* ein vorhandenes Wochenfeld zu Tagen auf der Basis (1 Woche = 7 Tage)
* normalisiert wird. </p>
*
* <p>Hinweis: Die ISO-Empfehlung, ein Komma als Dezimaltrennzeichen zu
* verwenden, kann mit Hilfe der bool'schen System-Property
* "net.time4j.format.iso.decimal.dot" so geändert
* werden, daß die angelsächsiche Variante mit Punkt statt
* Komma verwendet wird. </p>
*
* @param xml Is a XML-Schema-compatible output required?
* @return String
* @throws ChronoException if in xml-mode any special units shall be
* output, but units of type {@code CalendarUnit} will be
* translated to xml-compatible units if necessary
* @see #parse(String)
* @see IsoUnit#getSymbol()
*/
public String toString(boolean xml) {
if (this.isEmpty()) {
return (this.calendrical ? "P0D" : "PT0S");
}
StringBuilder sb = new StringBuilder();
if (this.isNegative()) {
sb.append('-');
}
sb.append('P');
boolean timeAppended = false;
long nanos = 0;
long seconds = 0;
for (
int index = 0, limit = this.getTotalLength().size();
index < limit;
index++
) {
Item<U> item = this.getTotalLength().get(index);
U unit = item.getUnit();
if (!timeAppended && !unit.isCalendrical()) {
sb.append('T');
timeAppended = true;
}
long amount = item.getAmount();
char symbol = unit.getSymbol();
if ((symbol > '0') && (symbol <= '9')) {
assert (symbol == '9');
nanos = amount;
} else if (symbol == 'S') {
seconds = amount;
} else {
if (xml) {
switch (symbol) {
case 'D':
case 'M':
case 'Y':
case 'H':
sb.append(amount);
break;
case 'W':
sb.append(MathUtils.safeMultiply(amount, 7));
symbol = 'D';
break;
case 'Q':
sb.append(MathUtils.safeMultiply(amount, 3));
symbol = 'M';
break;
case 'E':
sb.append(MathUtils.safeMultiply(amount, 10));
symbol = 'Y';
break;
case 'C':
sb.append(MathUtils.safeMultiply(amount, 100));
symbol = 'Y';
break;
case 'I':
sb.append(MathUtils.safeMultiply(amount, 1000));
symbol = 'Y';
break;
default:
throw new ChronoException(
"Special units cannot be output in xml-mode: "
+ this.toString(false));
}
} else {
sb.append(amount);
}
if (symbol == '\u0000') {
sb.append('{');
sb.append(unit);
sb.append('}');
} else {
sb.append(symbol);
}
}
}
if (nanos != 0) {
seconds = MathUtils.safeAdd(seconds, nanos / MRD);
sb.append(seconds);
sb.append(xml ? '.' : ISO_DECIMAL_SEPARATOR);
String f = String.valueOf(nanos % MRD);
for (int i = 0, len = 9 - f.length(); i < len; i++) {
sb.append('0');
}
sb.append(f);
sb.append('S');
} else if (seconds != 0) {
sb.append(seconds);
sb.append('S');
}
return sb.toString();
}
/**
* <p>Parst eine kanonische ISO-konforme Darstellung zu einer
* Zeitspanne. </p>
*
* <p>Syntax in RegExp-ähnlicher Notation: </p>
*
* <pre>
* amount := [0-9]+
* fraction := [,\.]{amount}
* years-months-days := ({amount}Y)?({amount}M)?({amount}D)?
* weeks := ({amount}W)?
* date := {years-months-days} | {weeks}
* time := ({amount}H)?({amount}M)?({amount}{fraction}?S)?
* duration := P{date}(T{time})? | PT{time}
* </pre>
*
* <p>Die in {@link CalendarUnit} definierten Zeiteinheiten MILLENNIA,
* CENTURIES, DECADES und QUARTERS werden mitsamt ihren Symbolen ebenfalls
* unterstützt. </p>
*
* <p>Weiterhin gilt die Einschränkung, daß die Symbole P und T
* mindestens ein Zeitfeld nach sich ziehen müssen. Alle Felder mit
* {@code 0}-Beträgen werden beim Parsen ignoriert. Das einzig erlaubte
* Dezimalfeld der Sekunden kann sowohl einen Punkt wie auch ein Komma
* als Dezimaltrennzeichen haben. Im ISO-Standard ist das Komma das
* bevorzugte Zeichen, in XML-Schema nur der Punkt zulässig. Speziell
* für die Verwendung in XML-Schema (Typ xs:duration) ist zu beachten,
* daß Wochenfelder anders als im ISO-Standard nicht vorkommen. Die
* Methode {@code toString(true)} berücksichtigt diese Besonderheiten
* von XML-Schema (abgesehen davon, daß XML-Schema potentiell
* unbegrenzt große Zahlen zuläßt, aber Time4J eine
* Zeitspanne nur im long-Bereich mit maximal Nanosekunden-Genauigkeit
* definiert). </p>
*
* <p>Beispiele für unterstützte Formate: </p>
*
* <pre>
* date := -P7Y4M3D (negativ: 7 Jahre, 4 Monate, 3 Tage)
* time := PT3H2M1,4S (positiv: 3 Stunden, 2 Minuten, 1400 Millisekunden)
* date-time := P1Y1M5DT15H59M10.400S (Punkt als Dezimaltrennzeichen)
* </pre>
*
* @param duration duration in ISO-8601-format
* @return parsed duration in all possible units of date and time
* @throws ParseException if parsing fails
* @see #parseCalendarPeriod(String)
* @see #parseClockPeriod(String)
* @see #toString()
* @see #toString(boolean)
*/
public static PlainDuration<IsoUnit> parse(String duration)
throws ParseException {
return parse(duration, IsoUnit.class);
}
/**
* <p>Parst eine kanonische ISO-konforme Darstellung nur mit
* Datumskomponenten zu einer Zeitspanne. </p>
*
* @param duration duration in ISO-8601-format
* @return parsed calendrical duration
* @throws ParseException if parsing fails
* @see #parse(String)
* @see #parseClockPeriod(String)
*/
public static
PlainDuration<CalendarUnit> parseCalendarPeriod(String duration)
throws ParseException {
return parse(duration, CalendarUnit.class);
}
/**
* <p>Parst eine kanonische ISO-konforme Darstellung nur mit
* Uhrzeitkomponenten zu einer Zeitspanne. </p>
*
* @param duration duration in ISO-8601-format
* @return parsed time-only duration
* @throws ParseException if parsing fails
* @see #parse(String)
* @see #parseCalendarPeriod(String)
*/
public static
PlainDuration<ClockUnit> parseClockPeriod(String duration)
throws ParseException {
return parse(duration, ClockUnit.class);
}
private int count() {
return this.getTotalLength().size();
}
// wildcard capture
private static <U> boolean isEmpty(TimeSpan<U> timespan) {
List<Item<U>> items = timespan.getTotalLength();
for (int i = 0, n = items.size(); i < n; i++) {
if (items.get(i).getAmount() > 0) {
return false;
}
}
return true;
}
private static <U extends IsoUnit> PlainDuration<U> add(
PlainDuration<U> duration,
TimeSpan<? extends U> timespan,
boolean inverse
) {
if (duration.isEmpty()) {
if (isEmpty(timespan)) {
return duration;
} else if (timespan instanceof PlainDuration) {
PlainDuration<U> result = cast(timespan);
return (inverse ? result.negate() : result);
}
}
boolean calendrical = duration.calendrical;
Map<U, Long> map = new HashMap<U, Long>();
for (int i = 0, n = duration.count(); i < n; i++) {
Item<U> item = duration.getTotalLength().get(i);
map.put(
item.getUnit(),
Long.valueOf(
MathUtils.safeMultiply(
item.getAmount(),
(duration.isNegative() ? -1 : 1)
)
)
);
}
boolean tsign = timespan.isNegative();
if (inverse) {
tsign = !tsign;
}
for (int i = 0, n = timespan.getTotalLength().size(); i < n; i++) {
TimeSpan.Item<? extends U> e = timespan.getTotalLength().get(i);
U unit = e.getUnit();
long amount = e.getAmount();
if (calendrical && !unit.isCalendrical()) {
calendrical = false;
}
// Millis, Micros und Weeks ersetzen
Item<U> item =
replaceItem(duration.getTotalLength(), amount, unit);
if (item != null) {
amount = item.getAmount();
unit = item.getUnit();
}
boolean overwrite = false;
if (unit.isCalendrical() && (unit != CalendarUnit.WEEKS)) {
overwrite = replaceWeeksForDays(map, unit);
}
if (!overwrite) {
overwrite = map.containsKey(unit);
}
// Items aktualisieren
if (overwrite) {
map.put(
unit,
Long.valueOf(
MathUtils.safeAdd(
map.get(unit).longValue(),
MathUtils.safeMultiply(amount, (tsign ? -1 : 1))
)
)
);
} else {
map.put(
unit,
MathUtils.safeMultiply(amount, (tsign ? -1 : 1))
);
}
}
Boolean neg = null;
if (duration.isNegative() == tsign) {
neg = Boolean.valueOf(duration.isNegative());
} else {
for (Map.Entry<U, Long> entry : map.entrySet()) {
boolean nsign = (entry.getValue().longValue() < 0);
if (neg == null) {
neg = Boolean.valueOf(nsign);
} else if (neg.booleanValue() != nsign) {
throw new IllegalStateException(
"Mixed signs in result time span not allowed: "
+ duration + " UNION " + timespan);
}
}
}
if (neg.booleanValue()) {
for (Map.Entry<U, Long> entry : map.entrySet()) {
long value = entry.getValue().longValue();
map.put(
entry.getKey(),
Long.valueOf(
(value < 0)
? MathUtils.safeNegate(value)
: value)
);
}
}
return PlainDuration.create(map, neg.booleanValue(), calendrical);
}
private static PlainDuration<CalendarUnit> ofCalendarUnits(
long years,
long months,
long days,
boolean negative
) {
List<Item<CalendarUnit>> items = new ArrayList<Item<CalendarUnit>>(3);
if (years != 0) {
items.add(new Item<CalendarUnit>(years, CalendarUnit.YEARS));
}
if (months != 0) {
items.add(new Item<CalendarUnit>(months, CalendarUnit.MONTHS));
}
if (days != 0) {
items.add(new Item<CalendarUnit>(days, CalendarUnit.DAYS));
}
return new PlainDuration<CalendarUnit>(items, negative, true);
}
private static PlainDuration<ClockUnit> ofClockUnits(
long hours,
long minutes,
long seconds,
long nanos,
boolean negative
) {
List<Item<ClockUnit>> items = new ArrayList<Item<ClockUnit>>(4);
if (hours != 0) {
items.add(new Item<ClockUnit>(hours, ClockUnit.HOURS));
}
if (minutes != 0) {
items.add(new Item<ClockUnit>(minutes, ClockUnit.MINUTES));
}
if (seconds != 0) {
items.add(new Item<ClockUnit>(seconds, ClockUnit.SECONDS));
}
if (nanos != 0) {
items.add(new Item<ClockUnit>(nanos, ClockUnit.NANOS));
}
return new PlainDuration<ClockUnit>(items, negative, false);
}
private static <U extends IsoUnit> PlainDuration<U> create(
Map<U, Long> map,
boolean negative,
boolean calendrical
) {
if (map.isEmpty()) {
return new PlainDuration<U>(calendrical);
}
List<Item<U>> temp = new ArrayList<Item<U>>(map.size());
long weeks = 0;
long days = 0;
long nanos = 0;
U weekUnit = null;
U dayUnit = null;
for (Map.Entry<U, Long> entry : map.entrySet()) {
long amount = entry.getValue().longValue();
U key = entry.getKey();
if (amount == 0) {
continue;
} else if (key == CalendarUnit.WEEKS) {
weeks = amount;
weekUnit = key;
} else if (key == CalendarUnit.DAYS) {
days = amount;
dayUnit = key;
} else if (key == ClockUnit.MILLIS) {
nanos =
MathUtils.safeAdd(
nanos,
MathUtils.safeMultiply(amount, MIO));
} else if (key == ClockUnit.MICROS) {
nanos =
MathUtils.safeAdd(
nanos,
MathUtils.safeMultiply(amount, 1000));
} else if (key == ClockUnit.NANOS) {
nanos = MathUtils.safeAdd(nanos, amount);
} else {
temp.add(new Item<U>(amount, key));
}
}
if (
(days != 0)
&& (weeks != 0)
) {
days =
MathUtils.safeAdd(
days,
MathUtils.safeMultiply(weeks, 7));
weeks = 0;
}
if (weeks != 0) {
temp.add(new Item<U>(weeks, weekUnit));
}
if (days != 0) {
temp.add(new Item<U>(days, dayUnit));
}
if (nanos != 0) {
U key = cast(ClockUnit.NANOS);
temp.add(new Item<U>(nanos, key));
}
return new PlainDuration<U>(temp, negative, calendrical);
}
// binäre Suche
private int getIndex(ChronoUnit unit) {
return getIndex(unit, this.getTotalLength());
}
// binäre Suche
private static <U extends ChronoUnit> int getIndex(
ChronoUnit unit,
List<Item<U>> list
) {
int low = 0;
int high = list.size() - 1;
while (low <= high) {
int mid = (low + high) >>> 1;
ChronoUnit midUnit = list.get(mid).getUnit();
int cmp = compare(midUnit, unit);
if (cmp < 0) {
low = mid + 1;
} else if (cmp > 0) {
high = mid - 1;
} else {
return mid; // gefunden
}
}
return -1;
}
private static int compare(
ChronoUnit u1,
ChronoUnit u2
) {
return Double.compare(u2.getLength(), u1.getLength());
}
private static <U extends IsoUnit> int replaceWeeksForDays(
List<Item<U>> temp,
U unit
) {
int weekIndex = getIndex(CalendarUnit.WEEKS, temp);
if (weekIndex >= 0) {
temp.set(
weekIndex,
new Item<U>(
MathUtils.safeMultiply(
temp.get(weekIndex).getAmount(), 7L),
PlainDuration.<U>cast(CalendarUnit.DAYS)
)
);
if (unit.equals(CalendarUnit.DAYS)) {
return weekIndex; // Summenbildung: oldDays + amount
}
}
return -1;
}
private static <U extends IsoUnit> boolean replaceWeeksForDays(
Map<U, Long> temp,
U unit
) {
Long amount = temp.get(CalendarUnit.WEEKS);
if (amount != null) {
temp.remove(CalendarUnit.WEEKS);
temp.put(
PlainDuration.<U>cast(CalendarUnit.DAYS),
Long.valueOf(MathUtils.safeMultiply(amount.longValue(), 7L))
);
if (unit.equals(CalendarUnit.DAYS)) {
return true;
}
}
return false;
}
// optional
private static <U extends IsoUnit> Item<U> replaceItem(
List<Item<U>> items,
long amount,
U unit
) {
if (unit.equals(ClockUnit.MILLIS)) {
amount = MathUtils.safeMultiply(amount, MIO);
unit = cast(ClockUnit.NANOS);
} else if (unit.equals(ClockUnit.MICROS)) {
amount = MathUtils.safeMultiply(amount, 1000L);
unit = cast(ClockUnit.NANOS);
} else if (unit.equals(CalendarUnit.WEEKS)) {
for (int i = 0, n = items.size(); i < n; i++) {
U test = items.get(i).getUnit();
if (test.isCalendrical() && (test != CalendarUnit.WEEKS)) {
amount = MathUtils.safeMultiply(amount, 7L);
unit = cast(CalendarUnit.DAYS);
break;
}
}
} else {
return null;
}
return new Item<U>(amount, unit);
}
private void checkUnit(ChronoUnit unit) {
if (unit == null) {
throw new NullPointerException("Missing chronological unit.");
}
}
private void throwMixedSignsException(
long amount,
ChronoUnit unit
) {
StringBuilder sb = new StringBuilder(128);
sb.append("Mixed signs in result time span not allowed: ");
sb.append(this);
sb.append(" + (");
sb.append(amount);
sb.append(' ');
sb.append(unit);
sb.append(')');
throw new IllegalStateException(sb.toString());
}
private static <U extends IsoUnit>
PlainDuration<U> convert(TimeSpan<U> timespan) {
if (timespan instanceof PlainDuration) {
return cast(timespan);
} else {
boolean calendrical = true;
for (Item<U> item : timespan.getTotalLength()) {
if (!item.getUnit().isCalendrical()) {
calendrical = false;
}
}
PlainDuration<U> zero = new PlainDuration<U>(calendrical);
return zero.plus(timespan);
}
}
private boolean isFractionUnit(IsoUnit unit) {
char symbol = unit.getSymbol();
return ((symbol >= '1') && (symbol <= '9'));
}
@SuppressWarnings("unchecked")
private static <T> T cast(Object obj) {
return (T) obj;
}
//~ Parse-Routinen ----------------------------------------------------
private static <U extends IsoUnit> PlainDuration<U> parse(
String duration,
Class<U> type
) throws ParseException {
int index = 0;
boolean negative = false;
if (duration.length() == 0) {
throw new ParseException("Empty duration string.", index);
} else if (duration.charAt(0) == '-') {
negative = true;
index = 1;
}
try {
if (duration.charAt(index) != 'P') {
throw new ParseException(
"Format symbol \'P\' expected: " + duration, index);
} else {
index++;
}
List<Item<U>> items = new ArrayList<Item<U>>();
int sep = duration.indexOf('T', index);
boolean calendrical = (sep == -1);
if (calendrical) {
if (type == ClockUnit.class) {
throw new ParseException(
"Format symbol \'T\' expected: " + duration, index);
} else {
parseItems(duration, index, duration.length(), true, items);
}
} else {
if (sep > index) {
if (type == ClockUnit.class) {
throw new ParseException(
"Unexpected date component found: " + duration,
index);
} else {
parseItems(duration, index, sep, true, items);
}
}
if (type == CalendarUnit.class) {
throw new ParseException(
"Unexpected time component found: " + duration, sep);
} else {
parseItems(
duration,
sep + 1,
duration.length(),
false,
items);
}
}
return new PlainDuration<U>(items, negative, calendrical);
} catch (IndexOutOfBoundsException ex) {
ParseException pe =
new ParseException(
"Unexpected termination of duration: " + duration, index);
pe.initCause(ex);
throw pe;
}
}
private static <U extends ChronoUnit> void parseItems(
String duration,
int from,
int to,
boolean date,
List<Item<U>> items
) throws ParseException {
if (from == to) {
throw new ParseException(duration, from);
}
StringBuilder num = null;
boolean endOfItem = false;
ChronoUnit last = null;
int index = from;
boolean decimal = false;
int dateElements = 0;
boolean weekSymbol = false;
for (int i = from; i < to; i++) {
char c = duration.charAt(i);
if ((c >= '0') && (c <= '9')) {
if (num == null) {
num = new StringBuilder();
endOfItem = false;
index = i;
}
num.append(c);
} else if ((c == ',') || (c == '.')) {
if ((num == null) || date) {
throw new ParseException(
"Decimal separator misplaced: " + duration, i);
} else {
endOfItem = true;
long amount = parseAmount(duration, num.toString(), index);
ChronoUnit unit = ClockUnit.SECONDS;
last =
addParsedItem(unit, last, amount, duration, i, items);
num = null;
decimal = true;
}
} else if (endOfItem) {
throw new ParseException(
"Unexpected char \'" + c + "\' found: " + duration, i);
} else if (decimal) {
if (c != 'S') {
throw new ParseException(
"Second symbol expected: " + duration, i);
} else if (num == null) {
throw new ParseException(
"Decimal separator misplaced: " + duration, i - 1);
} else if (num.length() > 9) {
num.delete(9, num.length());
}
for (int j = num.length(); j < 9; j++) {
num.append('0');
}
endOfItem = true;
long amount = parseAmount(duration, num.toString(), index);
ChronoUnit unit = ClockUnit.NANOS;
num = null;
last = addParsedItem(unit, last, amount, duration, i, items);
} else {
endOfItem = true;
long amount =
parseAmount(
duration,
(num == null) ? String.valueOf(c) : num.toString(),
index);
num = null;
ChronoUnit unit = (
date
? parseDateSymbol(c, dateElements, weekSymbol, duration, i)
: parseTimeSymbol(c, duration, i));
if (date) {
if (unit.equals(CalendarUnit.WEEKS)) {
weekSymbol = true;
}
dateElements++;
}
last = addParsedItem(unit, last, amount, duration, i, items);
}
}
if (!endOfItem) {
throw new ParseException("Unit symbol expected: " + duration, to);
}
}
private static CalendarUnit parseDateSymbol(
char c,
int dateElements,
boolean weekSymbol,
String duration,
int index
) throws ParseException {
switch (c) {
case 'I':
return CalendarUnit.MILLENNIA;
case 'C':
return CalendarUnit.CENTURIES;
case 'E':
return CalendarUnit.DECADES;
case 'Y':
return CalendarUnit.YEARS;
case 'Q':
return CalendarUnit.QUARTERS;
case 'M':
return CalendarUnit.MONTHS;
case 'W':
if (dateElements > 0) {
throw new ParseException(
"Mixed date symbols with weeks not supported: "
+ duration,
index);
} else {
return CalendarUnit.WEEKS;
}
case 'D':
if (weekSymbol) {
throw new ParseException(
"Mixed date symbols with weeks not supported: "
+ duration,
index);
} else {
return CalendarUnit.DAYS;
}
default:
throw new ParseException(
"Symbol \'" + c + "\' not supported: " + duration, index);
}
}
private static ClockUnit parseTimeSymbol(
char c,
String duration,
int index
) throws ParseException {
switch (c) {
case 'H':
return ClockUnit.HOURS;
case 'M':
return ClockUnit.MINUTES;
case 'S':
return ClockUnit.SECONDS;
default:
throw new ParseException(
"Symbol \'" + c + "\' not supported: " + duration, index);
}
}
private static <U extends ChronoUnit> ChronoUnit addParsedItem(
ChronoUnit unit,
ChronoUnit last, // optional
long amount,
String duration,
int index,
List<Item<U>> items
) throws ParseException {
if (
(last == null)
|| (Double.compare(unit.getLength(), last.getLength()) < 0)
) {
if (amount != 0) {
U reified = cast(unit);
items.add(new Item<U>(amount, reified));
}
return unit;
} else if (unit.getLength() == last.getLength()) {
throw new ParseException(
"Duplicate unit items: " + duration, index);
} else {
throw new ParseException(
"Wrong order of unit items: " + duration, index);
}
}
private static long parseAmount(
String duration,
String number,
int index
) throws ParseException {
try {
return Long.parseLong(number);
} catch (NumberFormatException nfe) {
ParseException pe = new ParseException(duration, index);
pe.initCause(nfe);
throw pe;
}
}
//~ Innere Klassen ----------------------------------------------------
/**
* <p>Hilfsobjekt zum Bauen einer ISO-konformen Zeitspanne bestehend aus
* Jahren, Monaten, Tagen und allen Uhrzeiteinheiten. </p>
*
* <p>Lediglich die Wocheneinheit ist ausgenommen, da eine wochenbasierte
* Zeitspanne nach dem ISO-Standard für sich alleine stehen sollte.
* Eine wochenbasierte Zeitspanne kann auf einfache Weise mit dem Ausdruck
* {@code PlainDuration.of(amount, CalendarUnit.WEEKS)} erzeugt werden. </p>
*
* <p>Eine Instanz wird mittels {@link PlainDuration#ofPositive()} oder
* {@link PlainDuration#ofNegative()} erzeugt. Diese Instanz ist nur zur
* lokalen Verwendung in einem Thread gedacht, da keine Thread-Sicherheit
* gegeben ist. </p>
*/
public static class Builder {
//~ Instanzvariablen ----------------------------------------------
private final List<Item<IsoUnit>> items;
private final boolean negative;
private Boolean calendrical = null;
private boolean millisSet = false;
private boolean microsSet = false;
private boolean nanosSet = false;
//~ Konstruktoren -------------------------------------------------
/**
* <p>Konstruiert ein Hilfsobjekt zum Bauen einer Zeitspanne. </p>
*
* @param negative Is a negative duration asked for?
*/
Builder(boolean negative) {
super();
this.items = new ArrayList<Item<IsoUnit>>(10);
this.negative = negative;
}
//~ Methoden ------------------------------------------------------
/**
* <p>Erzeugt eine Länge in Jahren. </p>
*
* @param num count of years {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder years(int num) {
this.set(num, CalendarUnit.YEARS);
if (this.calendrical == null) {
this.calendrical = Boolean.TRUE;
}
return this;
}
/**
* <p>Erzeugt eine Länge in Monaten. </p>
*
* @param num count of months {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder months(int num) {
this.set(num, CalendarUnit.MONTHS);
if (this.calendrical == null) {
this.calendrical = Boolean.TRUE;
}
return this;
}
/**
* <p>Erzeugt eine Länge in Tagen. </p>
*
* @param num count of days {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder days(int num) {
this.set(num, CalendarUnit.DAYS);
if (this.calendrical == null) {
this.calendrical = Boolean.TRUE;
}
return this;
}
/**
* <p>Erzeugt eine Länge in Stunden. </p>
*
* @param num count of hours {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder hours(int num) {
this.set(num, ClockUnit.HOURS);
this.calendrical = Boolean.FALSE;
return this;
}
/**
* <p>Erzeugt eine Länge in Minuten. </p>
*
* @param num count of minutes {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder minutes(int num) {
this.set(num, ClockUnit.MINUTES);
this.calendrical = Boolean.FALSE;
return this;
}
/**
* <p>Erzeugt eine Länge in Sekunden. </p>
*
* @param num count of seconds {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder seconds(int num) {
this.set(num, ClockUnit.SECONDS);
this.calendrical = Boolean.FALSE;
return this;
}
/**
* <p>Erzeugt eine Länge in Millisekunden. </p>
*
* <p>Es wird eine Normalisierung durchgeführt, indem das Argument
* mit dem Faktor {@code 1} Million multipliziert und in Nanosekunden
* gespeichert wird. </p>
*
* @param num count of milliseconds {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder millis(int num) {
this.millisCalled();
this.update(num, MIO);
return this;
}
/**
* <p>Erzeugt eine Länge in Mikrosekunden. </p>
*
* <p>Es wird eine Normalisierung durchgeführt, indem das Argument
* mit dem Faktor {@code 1000} multipliziert und in Nanosekunden
* gespeichert wird. </p>
*
* @param num count of microseconds {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder micros(int num) {
this.microsCalled();
this.update(num, 1000L);
return this;
}
/**
* <p>Erzeugt eine Länge in Nanosekunden. </p>
*
* @param num count of nanoseconds {@code >= 0}
* @return this instance for method chaining
* @throws IllegalArgumentException if the argument is negative
* @throws IllegalStateException if already called
*/
public Builder nanos(int num) {
this.nanosCalled();
this.update(num, 1L);
return this;
}
/**
* <p>Erzeugt eine neue ISO-konforme Zeitspanne. </p>
*
* @return new {@code PlainDuration}
*/
public PlainDuration<IsoUnit> build() {
if (this.calendrical == null) {
throw new IllegalStateException("Not set any amount and unit.");
}
return new PlainDuration<IsoUnit>(
this.items,
this.negative,
this.calendrical.booleanValue()
);
}
private Builder set(
long amount,
IsoUnit unit
) {
for (int i = 0, n = this.items.size(); i < n; i++) {
if (this.items.get(i).getUnit() == unit) {
throw new IllegalStateException(
"Already registered: " + unit);
}
}
if (amount != 0) {
Item<IsoUnit> item = new Item<IsoUnit>(amount, unit);
this.items.add(item);
}
return this;
}
private void update(
long amount,
long factor
) {
this.calendrical = Boolean.FALSE;
if (amount >= 0) {
for (int i = this.items.size() - 1; i >= 0; i--) {
Item<IsoUnit> item = this.items.get(i);
if (item.getUnit().equals(ClockUnit.NANOS)) {
this.items.set(
i,
new Item<IsoUnit>(
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, factor),
item.getAmount()
),
ClockUnit.NANOS
)
);
return;
}
}
if (amount != 0) {
this.items.add(
new Item<IsoUnit>(
MathUtils.safeMultiply(amount, factor),
ClockUnit.NANOS
)
);
}
} else {
throw new IllegalArgumentException(
"Illegal negative amount: " + amount);
}
}
private void millisCalled() {
if (this.millisSet) {
throw new IllegalStateException(
"Called twice for: " + ClockUnit.MILLIS.name());
}
this.millisSet = true;
}
private void microsCalled() {
if (this.microsSet) {
throw new IllegalStateException(
"Called twice for: " + ClockUnit.MICROS.name());
}
this.microsSet = true;
}
private void nanosCalled() {
if (this.nanosSet) {
throw new IllegalStateException(
"Called twice for: " + ClockUnit.NANOS.name());
}
this.nanosSet = true;
}
}
private static class TimestampNormalizer
implements Normalizer<IsoUnit> {
//~ Methoden ------------------------------------------------------
@Override
public PlainDuration<IsoUnit>
normalize(TimeSpan<? extends IsoUnit> timespan) {
int count = timespan.getTotalLength().size();
List<Item<IsoUnit>> items =
new ArrayList<Item<IsoUnit>>(count);
long years = 0, months = 0, weeks = 0, days = 0;
long hours = 0, minutes = 0, seconds = 0, nanos = 0;
for (int i = 0; i < count; i++) {
Item<? extends IsoUnit> item =
timespan.getTotalLength().get(i);
long amount = item.getAmount();
IsoUnit unit = item.getUnit();
if (unit instanceof CalendarUnit) {
switch ((CalendarUnit.class.cast(unit))) {
case MILLENNIA:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 1000),
years
);
break;
case CENTURIES:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 100),
years
);
break;
case DECADES:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 10),
years
);
break;
case YEARS:
years = MathUtils.safeAdd(amount, years);
break;
case QUARTERS:
months =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 3),
months
);
break;
case MONTHS:
months = MathUtils.safeAdd(amount, months);
break;
case WEEKS:
weeks = amount;
break;
case DAYS:
days = amount;
break;
default:
throw new UnsupportedOperationException(
unit.toString());
}
} else if (unit instanceof ClockUnit) {
switch ((ClockUnit.class.cast(unit))) {
case HOURS:
hours = amount;
break;
case MINUTES:
minutes = amount;
break;
case SECONDS:
seconds = amount;
break;
case MILLIS:
nanos =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, MIO),
nanos
);
break;
case MICROS:
nanos =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 1000L),
nanos
);
break;
case NANOS:
nanos = MathUtils.safeAdd(amount, nanos);
break;
default:
throw new UnsupportedOperationException(
unit.toString());
}
} else {
items.add(new Item<IsoUnit>(amount, unit));
}
}
long f = 0, s = 0, n = 0, h = 0;
if ((hours | minutes | seconds | nanos) != 0) {
f = nanos % MRD;
seconds = MathUtils.safeAdd(seconds, nanos / MRD);
s = seconds % 60;
minutes = MathUtils.safeAdd(minutes, seconds / 60);
n = minutes % 60;
hours = MathUtils.safeAdd(hours, minutes / 60);
h = hours % 24;
days = MathUtils.safeAdd(days, hours / 24);
}
if ((years | months | days) != 0) {
long y = MathUtils.safeAdd(years, months / 12);
long m = months % 12;
long d =
MathUtils.safeAdd(
MathUtils.safeMultiply(weeks, 7),
days
);
if (y != 0) {
items.add(new Item<IsoUnit>(y, CalendarUnit.YEARS));
}
if (m != 0) {
items.add(new Item<IsoUnit>(m, CalendarUnit.MONTHS));
}
if (d != 0) {
items.add(new Item<IsoUnit>(d, CalendarUnit.DAYS));
}
} else if (weeks != 0) {
items.add(new Item<IsoUnit>(weeks, CalendarUnit.WEEKS));
}
if (h != 0) {
items.add(new Item<IsoUnit>(h, ClockUnit.HOURS));
}
if (n != 0) {
items.add(new Item<IsoUnit>(n, ClockUnit.MINUTES));
}
if (s != 0) {
items.add(new Item<IsoUnit>(s, ClockUnit.SECONDS));
}
if (f != 0) {
items.add(new Item<IsoUnit>(f, ClockUnit.NANOS));
}
return new PlainDuration<IsoUnit>(
items,
timespan.isNegative(),
false
);
}
}
private static class DateNormalizer
implements Normalizer<CalendarUnit> {
//~ Methoden ------------------------------------------------------
@Override
public PlainDuration<CalendarUnit>
normalize(TimeSpan<? extends CalendarUnit> timespan) {
int count = timespan.getTotalLength().size();
long years = 0, months = 0, weeks = 0, days = 0;
for (int i = 0; i < count; i++) {
Item<? extends CalendarUnit> item =
timespan.getTotalLength().get(i);
long amount = item.getAmount();
CalendarUnit unit = item.getUnit();
switch (unit) {
case MILLENNIA:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 1000),
years
);
break;
case CENTURIES:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 100),
years
);
break;
case DECADES:
years =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 10),
years
);
break;
case YEARS:
years = MathUtils.safeAdd(amount, years);
break;
case QUARTERS:
months =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 3),
months
);
break;
case MONTHS:
months = MathUtils.safeAdd(amount, months);
break;
case WEEKS:
weeks = amount;
break;
case DAYS:
days = amount;
break;
default:
throw new UnsupportedOperationException(
unit.toString());
}
}
boolean negative = timespan.isNegative();
if ((years | months | days) != 0) {
long y = MathUtils.safeAdd(years, months / 12);
long m = months % 12;
long d =
MathUtils.safeAdd(
MathUtils.safeMultiply(weeks, 7),
days
);
return PlainDuration.ofCalendarUnits(y, m, d, negative);
} else if (weeks != 0) {
if (negative) {
weeks = MathUtils.safeNegate(weeks);
}
return PlainDuration.of(weeks, CalendarUnit.WEEKS);
}
return PlainDuration.of(0, CalendarUnit.DAYS);
}
}
private static class TimeNormalizer
implements Normalizer<ClockUnit> {
//~ Methoden ------------------------------------------------------
@Override
public PlainDuration<ClockUnit>
normalize(TimeSpan<? extends ClockUnit> timespan) {
int count = timespan.getTotalLength().size();
long hours = 0, minutes = 0, seconds = 0, nanos = 0;
for (int i = 0; i < count; i++) {
Item<? extends ClockUnit> item =
timespan.getTotalLength().get(i);
long amount = item.getAmount();
ClockUnit unit = item.getUnit();
switch (unit) {
case HOURS:
hours = amount;
break;
case MINUTES:
minutes = amount;
break;
case SECONDS:
seconds = amount;
break;
case MILLIS:
nanos =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, MIO),
nanos
);
break;
case MICROS:
nanos =
MathUtils.safeAdd(
MathUtils.safeMultiply(amount, 1000L),
nanos
);
break;
case NANOS:
nanos = MathUtils.safeAdd(amount, nanos);
break;
default:
throw new UnsupportedOperationException(unit.name());
}
}
long f = 0, s = 0, n = 0, h = 0;
if ((hours | minutes | seconds | nanos) != 0) {
f = nanos % MRD;
seconds = MathUtils.safeAdd(seconds, nanos / MRD);
s = seconds % 60;
minutes = MathUtils.safeAdd(minutes, seconds / 60);
n = minutes % 60;
hours = MathUtils.safeAdd(hours, minutes / 60);
h = hours;
}
return PlainDuration.ofClockUnits(
h,
n,
s,
f,
timespan.isNegative()
);
}
}
private static class Metric<U extends IsoUnit>
implements TimeMetric<U, PlainDuration<U>> {
//~ Instanzvariablen ----------------------------------------------
private final List<U> sortedUnits;
private final boolean calendrical;
private boolean normalizing;
//~ Konstruktoren -------------------------------------------------
private Metric(
boolean normalizing,
List<U> units
) {
super();
boolean c = true;
for (U unit : units) {
if (!unit.isCalendrical()) {
c = false;
break;
}
}
this.calendrical = c;
this.sortedUnits = Collections.unmodifiableList(units);
this.normalizing = normalizing;
}
//~ Methoden ------------------------------------------------------
@Override
public <T extends TimePoint<? super U, T>> PlainDuration<U> between(
T start,
T end
) {
if (end.equals(start)) {
return new PlainDuration<U>(this.calendrical);
}
T t1 = start;
T t2 = end;
boolean negative = false;
// Lage von Start und Ende bestimmen
if (t1.compareTo(t2) > 0) {
T temp = t1;
t1 = end;
t2 = temp;
negative = true;
}
List<TimeSpan.Item<U>> resultList =
new ArrayList<TimeSpan.Item<U>>(10);
TimeAxis<? super U, T> engine = start.getChronology();
U unit = null;
long amount = 0;
int index = 0;
int endIndex = this.sortedUnits.size();
while (index < endIndex) {
// Nächste Subtraktion vorbereiten
if (amount != 0) {
t1 = t1.plus(amount, unit);
}
// Aktuelle Zeiteinheit bestimmen
unit = resolve(this.sortedUnits.get(index));
if (
(this.getLength(engine, unit) < 1.0)
&& (index < endIndex - 1)
) {
amount = 0; // Millis oder Mikros vor Nanos nicht berechnen
} else {
// konvertierbare Einheiten zusammenfassen
int k = index + 1;
long factor = 1;
while (k < endIndex) {
U nextUnit = this.sortedUnits.get(k);
factor *= this.getFactor(engine, unit, nextUnit);
if (
!Double.isNaN(factor)
&& (factor < MIO)
&& engine.isConvertible(unit, nextUnit)
) {
unit = nextUnit;
} else {
break;
}
k++;
}
index = k - 1;
// Differenz in einer Einheit berechnen
amount = t1.until(t2, unit);
if (amount > 0) {
resultList.add(new TimeSpan.Item<U>(amount, unit));
} else if (amount < 0) {
throw new IllegalStateException(
"Implementation error: "
+ "Cannot compute timespan "
+ "due to illegal negative timespan amounts.");
}
}
index++;
}
if (this.normalizing) {
this.normalize(engine, this.sortedUnits, resultList);
}
return new PlainDuration<U>(resultList, negative, this.calendrical);
}
@SuppressWarnings("unchecked")
private static <U> U resolve(U unit) {
if (unit instanceof OverflowUnit) {
return (U) ((OverflowUnit) unit).getCalendarUnit();
}
return unit;
}
private <T extends TimePoint<? super U, T>> void normalize(
TimeAxis<? super U, T> engine,
List<U> sortedUnits,
List<TimeSpan.Item<U>> resultList
) {
for (int i = sortedUnits.size() - 1; i >= 0; i--) {
if (i > 0) {
U currentUnit = sortedUnits.get(i);
U nextUnit = sortedUnits.get(i - 1);
long factor = this.getFactor(engine, nextUnit, currentUnit);
if (
!Double.isNaN(factor)
&& (factor < MIO)
&& engine.isConvertible(nextUnit, currentUnit)
) {
TimeSpan.Item<U> currentItem =
getItem(resultList, currentUnit);
if (currentItem != null) {
long currentValue = currentItem.getAmount();
long overflow = currentValue / factor;
if (overflow > 0) {
long a = currentValue % factor;
if (a == 0) {
removeItem(resultList, currentUnit);
} else {
putItem(resultList, engine, a, currentUnit);
}
TimeSpan.Item<U> nextItem =
getItem(resultList, nextUnit);
if (nextItem == null) {
putItem(
resultList, engine, overflow, nextUnit);
} else {
putItem(
resultList,
engine,
MathUtils.safeAdd(
nextItem.getAmount(),
overflow),
nextUnit
);
}
}
}
}
}
}
}
private static <U> TimeSpan.Item<U> getItem(
List<TimeSpan.Item<U>> items,
U unit
) {
for (int i = 0, n = items.size(); i < n; i++) {
TimeSpan.Item<U> item = items.get(i);
if (item.getUnit().equals(unit)) {
return item;
}
}
return null;
}
private static <U> void putItem(
List<TimeSpan.Item<U>> items,
Comparator<? super U> comparator,
long amount,
U unit
) {
TimeSpan.Item<U> item = new TimeSpan.Item<U>(amount, unit);
int insert = 0;
for (int i = 0, n = items.size(); i < n; i++) {
U u = items.get(i).getUnit();
if (u.equals(unit)) {
items.set(i, item);
return;
} else if (
(insert == i)
&& (comparator.compare(u, unit) < 0)
) {
insert++;
}
}
items.add(insert, item);
}
private static <U> void removeItem(
List<TimeSpan.Item<U>> items,
U unit
) {
for (int i = 0, n = items.size(); i < n; i++) {
if (items.get(i).getUnit().equals(unit)) {
items.remove(i);
return;
}
}
}
private <T extends TimePoint<? super U, T>> long getFactor(
TimeAxis<? super U, T> engine,
U unit1,
U unit2
) {
double d1 = this.getLength(engine, unit1);
double d2 = this.getLength(engine, unit2);
return Math.round(d1 / d2);
}
private <T extends TimePoint<? super U, T>> double getLength(
TimeAxis<? super U, T> engine,
U unit
) {
return engine.getLength(unit);
}
}
private static class LengthComparator
<U extends IsoUnit, T extends TimePoint<? super U, T>>
implements Comparator<PlainDuration<U>> {
//~ Instanzvariablen ----------------------------------------------
private final T base;
//~ Konstruktoren -------------------------------------------------
private LengthComparator(T base) {
super();
if (base == null) {
throw new NullPointerException("Missing base time point.");
}
this.base = base;
}
//~ Methoden ------------------------------------------------------
@Override
public int compare(
PlainDuration<U> d1,
PlainDuration<U> d2
) {
boolean sign1 = d1.isNegative();
boolean sign2 = d2.isNegative();
if (sign1 && !sign2) {
return -1;
} else if (!sign1 && sign2) {
return 1;
} else if (d1.isEmpty() && d2.isEmpty()) {
return 0;
}
return this.base.plus(d1).compareTo(this.base.plus(d2));
}
}
}
| duration enhancements
a) fix for better error message in case of mixed signs
b) new earlier()- and later()-operators for standard time arithmetic on
Moment
| src/main/java/net/time4j/PlainDuration.java | duration enhancements |
|
Java | lgpl-2.1 | 9ac9f008b07e268564daa2898a524189edde0ef9 | 0 | ACS-Community/ACS,csrg-utfsm/acscb,jbarriosc/ACSUFRO,csrg-utfsm/acscb,jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,ACS-Community/ACS,csrg-utfsm/acscb,jbarriosc/ACSUFRO,ACS-Community/ACS,ACS-Community/ACS,csrg-utfsm/acscb,csrg-utfsm/acscb,csrg-utfsm/acscb,ACS-Community/ACS,jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,ACS-Community/ACS,csrg-utfsm/acscb,ACS-Community/ACS,csrg-utfsm/acscb,ACS-Community/ACS | /*
* ALMA - Atacama Large Millimiter Array
* (c) European Southern Observatory, 2002
* Copyright by ESO (in the framework of the ALMA collaboration),
* All rights reserved
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston,
* MA 02111-1307 USA
*/
package alma.acs.container;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Vector;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import org.omg.CosNaming.NamingContext;
import org.omg.CosNaming.NamingContextHelper;
import org.omg.PortableServer.POA;
import org.omg.PortableServer.Servant;
import com.cosylab.CDB.DAL;
import com.cosylab.CDB.DALHelper;
import si.ijs.maci.ComponentInfo;
import si.ijs.maci.ComponentSpec;
import alma.ACS.CBlong;
import alma.ACS.OffShoot;
import alma.ACS.OffShootHelper;
import alma.ACS.OffShootOperations;
import alma.ACSErrTypeCommon.wrappers.AcsJBadParameterEx;
import alma.JavaContainerError.wrappers.AcsJContainerEx;
import alma.JavaContainerError.wrappers.AcsJContainerServicesEx;
import alma.acs.callbacks.RequesterUtil;
import alma.acs.callbacks.ResponseReceiver;
import alma.acs.component.ComponentDescriptor;
import alma.acs.component.ComponentQueryDescriptor;
import alma.acs.component.ComponentStateManager;
import alma.acs.component.dynwrapper.ComponentInvocationHandler;
import alma.acs.component.dynwrapper.DynamicProxyFactory;
import alma.acs.container.archive.Range;
import alma.acs.container.archive.UIDLibrary;
import alma.acs.container.corba.AcsCorba;
import alma.acs.exceptions.AcsJException;
import alma.acs.logging.AcsLogLevel;
import alma.acs.logging.AcsLogger;
import alma.acs.logging.ClientLogManager;
import alma.acs.nc.AcsEventPublisher;
import alma.acs.nc.AcsEventSubscriber;
import alma.acsErrTypeAlarmSourceFactory.ACSASFactoryNotInitedEx;
import alma.acsErrTypeAlarmSourceFactory.FaultStateCreationErrorEx;
import alma.acsErrTypeAlarmSourceFactory.SourceCreationErrorEx;
import alma.alarmsystem.source.ACSAlarmSystemInterface;
import alma.alarmsystem.source.ACSAlarmSystemInterfaceFactory;
import alma.alarmsystem.source.ACSFaultState;
import alma.entities.commonentity.EntityT;
import alma.maciErrType.wrappers.AcsJComponentDeactivationFailedEx;
import alma.maciErrType.wrappers.AcsJComponentDeactivationUncleanEx;
import alma.maciErrType.wrappers.AcsJNoPermissionEx;
import alma.maciErrType.wrappers.AcsJmaciErrTypeEx;
import alma.xmlstore.Identifier;
import alma.xmlstore.IdentifierHelper;
import alma.xmlstore.IdentifierJ;
import alma.xmlstore.IdentifierOperations;
/**
* Implementation of the <code>ContainerServices</code> interface.
* To be used by ACS components, as well as any other clients that need access
* to components.
* <p>
* This class is "cheap" to instantiate because many resources it uses are singletons
* and or objects otherwise shared among instances.
* It should thus be ok to create one instance per component or other client.
* <p>
* This class has to be thread-safe, because a component's functional methods can be called from
* different ORB threads, or because the component could itself create threads, each of them
* accessing this object.
*
* @author hsommer Apr 1, 2003 2:28:01 PM
*/
public class ContainerServicesImpl implements ContainerServices
{
private AdvancedContainerServicesImpl advancedContainerServices;
// identifier archive and UID lib will created lazily
private volatile UIDLibrary uidLibrary;
private volatile IdentifierJ identifierArchive;
/** cheat property that allows testing without identifier archive present, because UIDs will be faked */
public static final String PROPERTYNAME_FAKE_UID_FOR_TESTING = "acs.container.fakeUIDsForTesting";
private final boolean fakeUIDsForTesting = Boolean.getBoolean(PROPERTYNAME_FAKE_UID_FOR_TESTING);
/**
* Holds and re-establishes the connection to the manager, and encapsulates the handle given by the manager at login.
*/
protected final AcsManagerProxy m_acsManagerProxy;
// logger used by this class
protected final AcsLogger m_logger;
// logger given to component
private volatile AcsLogger componentLogger;
// sync'd map, key=curl, value=corbaStub
private final Map<String, org.omg.CORBA.Object> m_usedComponentsMap;
private final Map<String, org.omg.CORBA.Object> m_usedNonStickyComponentsMap;
// sync'd map, key=curl, value=ComponentDescriptor
private final Map<String, ComponentDescriptor> m_componentDescriptorMap;
/**
* The handle that the manager has assigned to the component to whom this ContainerServices object belongs,
* or 0 if this ContainerServices object does not belong to a component,
* in which case m_acsManagerProxy's handle should be used.
*/
private final int m_componentHandle;
/**
* Name of the component or other client (client app or container etc)
*/
private final String m_clientName;
/**
* The externally provided instance of AcsCorba
*/
private final AcsCorba acsCorba;
private final POA m_clientPOA;
private Object m_componentXmlTranslatorProxy;
// sync'd map, key=offshot implementation object, value=servant
// (they can be the same, but help to keep track of the servants
// when activating offshoots of xyzJ type)
private Map<Object, Servant> m_activatedOffshootsMap;
private final ComponentStateManager m_componentStateManager;
private final ThreadFactory m_threadFactory;
private volatile String[] methodsExcludedFromInvocationLogging;
/**
* Optional callback object for component available/unavailable notification
*/
private ComponentListener compListener;
private final List<CleanUpCallback> cleanUpCallbacks;
private final Map<String, AcsEventSubscriber> m_subscribers;
private final Map<String, AcsEventPublisher> m_publishers;
final String CLASSNAME_NC_SUBSCRIBER = "alma.acs.nc.refactored.NCSubscriber";
final String CLASSNAME_NC_PUBLISHER = "alma.acs.nc.refactored.NCPublisher";
/**
* ctor.
* @param acsManagerProxy
* @param componentPOA the POA for the component. Can be the root POA or some other specialized POA.
* @param acsCorba Encapsulates the ORB and all POAs
* @param logger logger to be used by this class
* @param componentHandle handle to be used for identification when sending requests to the manager.
* For components, this should be the component handle assigned by the manager;
* for other clients, it should be 0 to indicate that the handle obtained at manager login should be used.
* @param clientCurl
* @param componentStateManager can be null if this class is instantiated
* for a component client outside of a container
* @param threadFactory to be used for <code>getThreadFactory</code>
*/
public ContainerServicesImpl(AcsManagerProxy acsManagerProxy, POA componentPOA, AcsCorba acsCorba,
AcsLogger logger, int componentHandle, String clientCurl,
ComponentStateManager componentStateManager,
ThreadFactory threadFactory)
{
// The following fields are final. This guarantees that they will be copied to main thread memory,
// and thus be seen by other threads after this ctor has terminated.
m_acsManagerProxy = acsManagerProxy;
m_clientPOA = componentPOA;
this.acsCorba = acsCorba;
m_logger = logger;
m_componentHandle = componentHandle;
m_clientName = clientCurl;
m_componentStateManager = componentStateManager;
// should do for thread-safety as long as we don't iterate over it
m_usedComponentsMap = Collections.synchronizedMap(new HashMap<String, org.omg.CORBA.Object>());
m_usedNonStickyComponentsMap = Collections.synchronizedMap(new HashMap<String, org.omg.CORBA.Object>());
m_componentDescriptorMap = Collections.synchronizedMap(new HashMap<String, ComponentDescriptor>());
m_activatedOffshootsMap = Collections.synchronizedMap(new HashMap<Object, Servant>());
m_subscribers = new HashMap<String, AcsEventSubscriber>();
m_publishers = new HashMap<String, AcsEventPublisher>();
m_threadFactory = threadFactory;
cleanUpCallbacks = new ArrayList<CleanUpCallback>();
if (fakeUIDsForTesting) {
m_logger.warning("Running in test mode where UIDs will be constructed randomly instead of being retrieved from the archive!");
}
}
void setComponentXmlTranslatorProxy(Object xmlTranslatorProxy) {
m_componentXmlTranslatorProxy = xmlTranslatorProxy;
}
/////////////////////////////////////////////////////////////
// Implementation of ContainerServices
/////////////////////////////////////////////////////////////
/**
* Gets the component name (which the component does not know statically)
* @see alma.acs.container.ContainerServices#getName()
*/
public String getName() {
return m_clientName;
}
/**
* {@inheritDoc}
*
* This method should only be called by a component that lives inside a container;
* a component client that is not a component itself should not call it,
* would result in a NPE!
*
* @see alma.acs.container.ContainerServices#getComponentStateManager()
*/
public ComponentStateManager getComponentStateManager()
{
if (m_componentStateManager == null)
{
// to make debugging easier if this ever happened on the container side
throw new NullPointerException("ComponentStateManager is null!");
}
return m_componentStateManager;
}
/**
* The component must retrieve its logger object from this interface
* (as opposed to using the <code>ClientLogManager</code> singleton)
* so that the container is free
* to give away loggers that are somehow tailored to the particular component.
* <p>
* The goal is to have "componentName" and other fields in all ALMA log entries,
* and have tool support for filtering logs by component, subsystem, user, ...
*
* @see alma.acs.container.ContainerServices#getLogger()
*/
public AcsLogger getLogger()
{
if (componentLogger == null) {
componentLogger = ClientLogManager.getAcsLogManager().getLoggerForComponent(m_clientName);
}
return componentLogger;
}
public void registerComponentListener(ComponentListener listener) {
compListener = listener;
}
/**
*/
public void fireComponentsAvailable (List<ComponentDescriptor> compDescs) {
if (compListener == null) {
return;
}
// find out which components are interesting for the client
List<ComponentDescriptor> interesting = null;
if (compListener.includeForeignComponents()) {
interesting = compDescs;
}
else {
interesting = new Vector<ComponentDescriptor>();
for (ComponentDescriptor cd : compDescs) {
if (m_usedComponentsMap.containsKey(cd.getName()) || m_usedNonStickyComponentsMap.containsKey(cd.getName())) {
interesting.add(cd);
}
}
}
if (interesting.size() > 0) {
try {
compListener.componentsAvailable(interesting);
} catch (Throwable thr) {
m_logger.log(Level.INFO, "componentsAvailable implementation of client " + m_clientName + " failed", thr);
}
}
}
/**
*/
public void fireComponentsUnavailable (List<String> compNames) {
if (compListener == null) {
return;
}
// find out which components are interesting for the client
List<String> interesting = null;
if (compListener.includeForeignComponents()) {
interesting = compNames;
}
else {
interesting = new Vector<String>();
for (String cn : compNames) {
if (m_usedComponentsMap.containsKey(cn) || m_usedNonStickyComponentsMap.containsKey(cn) ) {
interesting.add(cn);
}
}
}
if (interesting.size() > 0 && compListener != null) {
try {
compListener.componentsUnavailable(interesting);
} catch (Throwable thr) {
m_logger.log(Level.INFO, "componentsUnavailable implementation of client " + m_clientName + " failed", thr);
}
}
}
/**
* @see alma.acs.container.ContainerServices#assignUniqueEntityId(EntityT)
*/
public void assignUniqueEntityId(EntityT entity) throws AcsJContainerServicesEx
{
if (entity == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("entity");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
if (fakeUIDsForTesting) {
long localId = (new Random(System.currentTimeMillis())).nextLong();
String uid = Range.generateUID("testArchiveId", "testRangeId", localId);
entity.setEntityId(uid);
return;
}
try {
if (identifierArchive == null) {
Identifier identRaw = IdentifierHelper.narrow(getDefaultComponent("IDL:alma/xmlstore/Identifier:1.0"));
identifierArchive = getTransparentXmlWrapper(IdentifierJ.class, identRaw, IdentifierOperations.class);
}
if (uidLibrary == null) {
uidLibrary = new UIDLibrary(m_logger);
}
uidLibrary.assignUniqueEntityId(entity, identifierArchive);
}
catch (Throwable thr) {
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo("failed to assign a UID to entity of type " + entity.getEntityTypeName());
throw ex;
}
}
/**
* @see alma.acs.container.ContainerServices#findComponents(java.lang.String, java.lang.String)
*/
public String[] findComponents(String curlWildcard, String typeWildcard)
throws AcsJContainerServicesEx
{
if (curlWildcard == null) {
curlWildcard = "*";
}
if (typeWildcard == null) {
typeWildcard = "*";
}
String msgSpec = "curlWildcard='" + curlWildcard + "' and typeWildcard='" + typeWildcard + "'.";
if (m_logger.isLoggable(Level.FINER)) {
m_logger.finer("about to call Manager#get_component_info with " + msgSpec);
}
ComponentInfo[] components = null;
try {
components = m_acsManagerProxy.get_component_info(new int[0], curlWildcard, typeWildcard, false );
}
catch (AcsJNoPermissionEx ex) {
m_logger.log(Level.FINE, "No permission to find components with " + msgSpec, ex);
AcsJContainerServicesEx ex2 = new AcsJContainerServicesEx(ex);
ex2.setContextInfo(msgSpec);
throw ex2;
}
catch (Throwable thr) {
m_logger.log(Level.FINE, "Unexpected failure calling 'get_component_info' with " + msgSpec, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msgSpec);
throw ex;
}
ArrayList<String> curls = new ArrayList<String>();
if (components != null) {
for (int i = 0; i < components.length; i++) {
curls.add(components[i].name);
}
}
if (m_logger.isLoggable(Level.FINER)) {
m_logger.finer("received " + curls.size() + " curls from get_component_info.");
}
return curls.toArray(new String[curls.size()]);
}
/**
*
* @see alma.acs.container.ContainerServices#getComponentDescriptor(java.lang.String)
*/
public ComponentDescriptor getComponentDescriptor(String curl)
throws AcsJContainerServicesEx
{
if (curl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("curl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
ComponentDescriptor desc = m_componentDescriptorMap.get(curl);
if (desc == null) {
// try to get it from the manager
ComponentInfo[] compInfos;
try {
compInfos = m_acsManagerProxy.get_component_info(new int[0], curl, "*", false);
} catch (Throwable thr) {
m_logger.log(Level.FINE, "Unexpected failure calling 'get_component_info'.", thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo("CURL=" + curl);
throw ex;
}
if (compInfos.length == 1) {
desc = new ComponentDescriptor(compInfos[0]);
m_componentDescriptorMap.put(curl, desc);
}
else {
String msg = "failed to retrieve a unique component descriptor for the component instance " + curl;
m_logger.fine(msg);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw new AcsJContainerServicesEx();
}
}
return desc;
}
/**
* @see alma.acs.container.ContainerServices#getComponent(String)
*/
public org.omg.CORBA.Object getComponent(String curl) throws AcsJContainerServicesEx
{
if (curl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("curl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
// check if our component has requested the other component before
org.omg.CORBA.Object stub = m_usedComponentsMap.get(curl);
if (stub != null)
{
// reusing seems ok as long as there is one separate
// instance of ContainerServicesImpl for each component.
// This reuse does not cut off the manager from component access,
// since it only happens at second or further access attempts;
// the first time, the component reference is obtained through the manager
// (independently of whether the components are collocated inside the same container),
// so that the manager is aware of component dependencies.
m_logger.info("client '" + m_clientName + "' attempts to retrieve component '" +
curl + "' more than once; will return existing reference.");
}
else
{
m_logger.fine("will retrieve remote component '" + curl +
"' using ACS Manager#get_component with client handle " + getEffectiveClientHandle());
/// @todo: think about timeouts
try {
stub = m_acsManagerProxy.get_component(getEffectiveClientHandle(), curl, true);
m_logger.fine("component " + curl + " retrieved successfully.");
m_usedComponentsMap.put(curl, stub);
} catch (AcsJmaciErrTypeEx ex) {
String msg = "Failed to retrieve component " + curl;
m_logger.log(Level.FINE, msg, ex); // only a low-level log because the client component is supposed to log the exception which contains all context data
throw new AcsJContainerServicesEx(ex);
} catch (Throwable thr) {
String msg = "Failed to retrieve component " + curl + " for unexpected reasons.";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
}
return stub;
}
public org.omg.CORBA.Object getComponentNonSticky(String curl)
throws AcsJContainerServicesEx
{
if (curl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("curl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
org.omg.CORBA.Object stub = null;
try {
stub = m_acsManagerProxy.get_component_non_sticky(getEffectiveClientHandle(), curl);
m_logger.fine("Non-sticky reference to component '" + curl + "' retrieved successfully.");
m_usedNonStickyComponentsMap.put(curl, stub);
} catch (AcsJmaciErrTypeEx ex) {
String msg = "Failed to retrieve non-sticky reference to component " + curl;
m_logger.log(Level.FINE, msg, ex); // only a low-level log because the client component is supposed to log the exception which contains all context data
throw new AcsJContainerServicesEx(ex);
} catch (Throwable thr) {
String msg = "Failed to retrieve non-sticky reference to component '" + curl + "' for unexpected reasons.";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
return stub;
}
/**
* @see alma.acs.container.ContainerServices#getDefaultComponent(java.lang.String)
*/
public org.omg.CORBA.Object getDefaultComponent(String componentIDLType)
throws AcsJContainerServicesEx
{
if (componentIDLType == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("componentIDLType");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
ComponentInfo cInfo = null;
try
{
// the call
cInfo = m_acsManagerProxy.get_default_component(getEffectiveClientHandle(), componentIDLType);
}
catch (AcsJmaciErrTypeEx ex) {
String msg = "failed to retrieve default component for type " + componentIDLType;
m_logger.log(Level.FINE, msg, ex); // higher-level log should be produced by the calling client from the exception later
throw new AcsJContainerServicesEx(ex);
}
catch (Throwable thr) {
String msg = "failed to retrieve default component for type " + componentIDLType + " for unexpected reasons!";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
// cInfo.reference == null should no longer happen since the maci exception changes for ACS 6.0
// @todo check and remove this
if (cInfo.reference == null) {
String msg = "Default component for type '" + componentIDLType + "' could not be accessed. ";
m_logger.info(msg);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw ex;
}
m_usedComponentsMap.put(cInfo.name, cInfo.reference);
m_componentDescriptorMap.put(cInfo.name, new ComponentDescriptor(cInfo));
return cInfo.reference;
}
public org.omg.CORBA.Object getCollocatedComponent(String compUrl, String targetCompUrl) throws AcsJContainerServicesEx {
if (compUrl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("compUrl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
if (targetCompUrl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("targetCompUrl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
ComponentQueryDescriptor cqd = new ComponentQueryDescriptor(compUrl, null);
return getCollocatedComponent(cqd, false, targetCompUrl);
}
public org.omg.CORBA.Object getCollocatedComponent(ComponentQueryDescriptor spec, boolean markAsDefaul, String targetCompUrl) throws AcsJContainerServicesEx {
if (spec == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("ComponentQueryDescriptor");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
if (targetCompUrl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("targetCompUrl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
ComponentInfo cInfo = null;
try {
// the call
cInfo = m_acsManagerProxy.get_collocated_component(getEffectiveClientHandle(), spec.toComponentSpec(), false, targetCompUrl);
} catch (AcsJmaciErrTypeEx ex) {
String msg = "Failed to retrieve component '" + spec.getComponentName() + "' created such that it runs collocated with '"+ targetCompUrl + "'.";
m_logger.log(Level.FINE, msg, ex); // it's serious, but the caller is supposed to log this. Container only logs just in case.
throw new AcsJContainerServicesEx(ex);
}
catch (Throwable thr) {
String msg = "Unexpectedly failed to retrieve component '" + spec.getComponentName() + "' created such that it runs collocated with '"+ targetCompUrl + "'.";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
// cInfo.reference == null should no longer happen since the maci exception changes for ACS 6.0
// @todo check and remove this
if (cInfo.reference == null) {
String msg = "Failed to retrieve component '" + spec.getComponentName() + "' created such that it runs collocated with '"+ targetCompUrl + "'.";
m_logger.info(msg);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw ex;
}
m_usedComponentsMap.put(cInfo.name, cInfo.reference);
m_componentDescriptorMap.put(cInfo.name, new ComponentDescriptor(cInfo));
return cInfo.reference;
}
/**
* @see alma.acs.container.ContainerServices#getDynamicComponent(si.ijs.maci.ComponentSpec, boolean)
*/
public org.omg.CORBA.Object getDynamicComponent(ComponentQueryDescriptor compDesc, boolean markAsDefault)
throws AcsJContainerServicesEx
{
return getDynamicComponent(compDesc.toComponentSpec(), markAsDefault);
}
/**
* @see alma.acs.container.ContainerServices#getDynamicComponent(si.ijs.maci.ComponentSpec, boolean)
*/
public org.omg.CORBA.Object getDynamicComponent(ComponentSpec compSpec, boolean markAsDefault)
throws AcsJContainerServicesEx
{
String entryMsg = "getDynamicComponent called with" +
" compName=" + compSpec.component_name +
" compType=" + compSpec.component_type +
" compCode=" + compSpec.component_code +
" compContainer=" + compSpec.container_name +
" markAsDefault=" + markAsDefault;
m_logger.fine(entryMsg);
ComponentInfo cInfo = null;
try
{
// the call
cInfo = m_acsManagerProxy.get_dynamic_component(getEffectiveClientHandle(), compSpec, markAsDefault);
m_usedComponentsMap.put(cInfo.name, cInfo.reference);
m_componentDescriptorMap.put(cInfo.name, new ComponentDescriptor(cInfo));
} catch (AcsJmaciErrTypeEx ex) {
m_logger.log(Level.FINE, "Failed to create dynamic component", ex);
throw new AcsJContainerServicesEx(ex);
}
catch (Throwable thr) {
String msg = "Unexpectedly failed to create dynamic component for unexpected reasons!";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
return cInfo.reference;
}
public org.omg.CORBA.Object getReferenceWithCustomClientSideTimeout(org.omg.CORBA.Object originalCorbaRef, double timeoutSeconds)
throws AcsJContainerServicesEx {
return acsCorba.wrapForRoundtripTimeout(originalCorbaRef, timeoutSeconds);
}
/**
* @see alma.acs.container.ContainerServices#getCDB()
*/
public DAL getCDB() throws AcsJContainerServicesEx
{
DAL dal = null;
String errMsg = "Failed to get the reference to the CDB component/service.";
try
{
// manager's get_service contains get_component, so even if the CDB becomes a real component, we can leave this
org.omg.CORBA.Object dalObj = m_acsManagerProxy.get_service("CDB", true);
dal = DALHelper.narrow(dalObj);
} catch (AcsJmaciErrTypeEx ex) {
m_logger.log(Level.FINE, errMsg, ex);
throw new AcsJContainerServicesEx(ex);
}
catch (Throwable thr) {
String msg = "Unexpectedly failed to get the CDB reference!";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
return dal;
}
/**
* Releases the specified component reference. This involves notification of the manager,
* as well as calling <code>_release()</code> on the CORBA stub.
* If the curl is not known to the container, the request will be ignored.
* <p>
* Note that <i>references</i> to other components are released by this method,
* where the components hosted inside this container act as clients.
* These referenced components may run inside this or some other container/container.
*
* @see alma.acs.container.ContainerServices#releaseComponent(java.lang.String)
*/
public void releaseComponent(String curl) {
ComponentReleaseCallback callback = new ComponentReleaseCallback();
releaseComponent(curl, callback);
try {
callback.awaitComponentRelease(60, TimeUnit.SECONDS);
} catch (InterruptedException ex) {
}
}
/**
* Used by {@link ContainerServicesImpl#releaseComponent(String, alma.acs.container.ContainerServices.ComponentReleaseCallback)}
* to wrap the user-supplied <code>ComponentReleaseCallback</code> for usage over Corba and for cleaner exception dispatching.
*/
private class ComponentReleaseCallbackCorbaHandler extends ResponseReceiver<Integer> {
private final ComponentReleaseCallback delegate;
ComponentReleaseCallbackCorbaHandler(ComponentReleaseCallback delegate) {
this.delegate = delegate;
}
@Override
public void incomingException(AcsJException ex) {
try {
if (ex instanceof AcsJComponentDeactivationUncleanEx) {
delegate.componentReleased((AcsJComponentDeactivationUncleanEx)ex);
}
else if (ex instanceof AcsJComponentDeactivationFailedEx) {
delegate.errorComponentReleaseFailed((AcsJComponentDeactivationFailedEx)ex);
}
else {
m_logger.log(Level.WARNING, "Received unexpected exception from manager#release_component_async, please report to ACS developers.", ex);
delegate.errorCommunicationFailure(ex); // strictly speaking the wrong method, but better than nothing.
}
}
catch (RuntimeException handlerEx) {
m_logger.log(Level.FINE, "User-supplied handler threw an exception.", handlerEx);
}
finally {
delegate.callOver();
}
}
@Override
public void incomingResponse(Integer numberRemainingClients) {
// we do not expose numberRemainingClients in the CS API
try {
delegate.componentReleased(null);
}
catch (RuntimeException handlerEx) {
m_logger.log(Level.FINE, "User-supplied handler threw an exception.", handlerEx);
}
finally {
delegate.callOver();
}
}
}
@Override
public void releaseComponent(String curl, ComponentReleaseCallback callback) {
// we keep the "forceful" release option as a switch in the code.
// It was taken out for ACS 7.0, but may come back in the future.
final boolean forcibly = false;
if (curl == null) {
m_logger.info("Invalid curl 'null', nothing to release.");
return;
}
org.omg.CORBA.Object stub = null;
// This use of synchronized makes the code thread safe without locking across the remote call to manager#release_component etc
synchronized (m_usedComponentsMap) {
if (!m_usedComponentsMap.containsKey(curl))
{
if (m_usedNonStickyComponentsMap.containsKey(curl)) {
m_logger.info("ignoring request by client '" + m_clientName +
"' to release component '" + curl + "' because the reference is non-sticky and does not need to be released.");
}
else {
m_logger.info("ignoring request by client '" + m_clientName +
"' to release other component with unknown curl='" + curl + "'.");
}
return;
}
// the CURL is in the map and gets removed now
stub = m_usedComponentsMap.get(curl);
m_usedComponentsMap.remove(curl);
}
m_logger.fine("about to release component " + curl + (forcibly ? " forcibly" : ""));
try {
if (forcibly) {
m_acsManagerProxy.force_release_component(getEffectiveClientHandle(), curl);
}
else {
CBlong myCBlong = null;
if (callback != null) {
ComponentReleaseCallbackCorbaHandler callbackCorba = new ComponentReleaseCallbackCorbaHandler(callback);
myCBlong = RequesterUtil.giveCBLong(this, callbackCorba);
}
m_acsManagerProxy.release_component(getEffectiveClientHandle(), curl, myCBlong);
}
m_logger.info("client '" + m_clientName + "' has successfully released " + " a component with curl=" + curl);
stub._release();
}
catch (AcsJNoPermissionEx ex) {
AcsLogLevel level = ( callback == null ? AcsLogLevel.WARNING : AcsLogLevel.DEBUG );
m_logger.log(level, "client '" + m_clientName + "' (handle " + getEffectiveClientHandle() + ") cannot release " +
" with the manager the component with curl=" + curl, ex);
if (callback != null) {
callback.errorNoPermission(ex.getReason());
}
}
catch (Throwable thr) { // any org.omg.CORBA.SystemException, or whatever else can happen
AcsLogLevel level = ( callback == null ? AcsLogLevel.WARNING : AcsLogLevel.DEBUG );
m_logger.log(level, "client '" + m_clientName + "' (handle " + getEffectiveClientHandle() + ") failed to release " +
" with the manager the component with curl=" + curl, thr);
if (callback != null) {
callback.errorCommunicationFailure(thr);
}
}
}
/**
* @see alma.acs.container.ContainerServices#activateOffShoot(org.omg.PortableServer.Servant)
*/
@Override
public <T extends Servant & OffShootOperations> OffShoot activateOffShoot(T servant)
throws AcsJContainerServicesEx
{
return activateOffShoot(servant, null);
}
/**
* @see alma.acs.container.ContainerServices#activateOffShoot(org.omg.PortableServer.Servant)
*/
@Override
public <T extends OffShootOperations> OffShoot activateOffShoot(T offshootImpl, Class<T> idlOpInterface)
throws AcsJContainerServicesEx
{
Servant servant = null;
boolean isTie = false;
boolean haveToInject = false;
// Checks
checkOffShoot(offshootImpl);
// If we receive an object that is not a servant it means that it requires XML automatic bindings.
// We create the corresponding POATie object, the dynamic proxy binder,
// and set the offshoot implementation as the final delegate
if( !(offshootImpl instanceof Servant) ) {
if( idlOpInterface == null )
throw new AcsJContainerServicesEx(new NullPointerException("Received null idlOpInterface when asking to activate XML offshoot"));
if( !idlOpInterface.isAssignableFrom(offshootImpl.getClass()) ) {
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo("Received OffShoot of type '" + offshootImpl.getClass().getName() +
"' does not inherits from '" + idlOpInterface.getName() + "'");
throw ex;
}
// Guess the name of the xyzPOATie class, build it, and delegate
String poaTieClassName = null;
try {
m_logger.fine("Creating POATie servant for offshoot '" + offshootImpl.getClass().getName() + "'");
// Get the POATie class and the expected xyzOperations interface
String baseClassName = idlOpInterface.getName().substring(0, idlOpInterface.getName().length()-1);
poaTieClassName = baseClassName + "POATie";
Class<?> poaTieClazz = Class.forName( poaTieClassName );
Method implGetter = poaTieClazz.getMethod("_delegate", (Class[]) null);
Class<?> operationsIF = implGetter.getReturnType();
// Create the dynamic XML entities wrapper
Object proxy = DynamicProxyFactory.getDynamicProxyFactory(m_logger)
.createServerProxy(operationsIF, offshootImpl, idlOpInterface);
// Create the POATie object, give it the proxy, and set it as our servant
Constructor<?> c = poaTieClazz.getConstructor(new Class[]{operationsIF});
servant = (Servant)c.newInstance(proxy);
if( m_componentXmlTranslatorProxy != null )
haveToInject = true;
} catch (ClassNotFoundException e) {
String msg = "Failed to create servant for offshoot " + offshootImpl.getClass().getName() + ": class '" + poaTieClassName + "' cannot be found";
m_logger.log(AcsLogLevel.ERROR, msg, e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw ex;
} catch(Exception e) {
throw new AcsJContainerServicesEx(e);
}
}
else {
m_logger.fine("Don't need to create servant for offshoot '" + offshootImpl.getClass().getName() + "'");
servant = (Servant)offshootImpl;
}
// check if the servant is the Tie variant, which allows proxy-based call interception by the container
String servantName = servant.getClass().getName();
if (servantName.endsWith("POATie")) {
try {
// the _delegate getter method is mandated by the IDL-to-Java mapping spec
Method implGetter = servant.getClass().getMethod("_delegate", (Class[]) null);
isTie = true;
Class<?> operationsIF = implGetter.getReturnType();
java.lang.Object offshootTiedImpl = implGetter.invoke(servant, (java.lang.Object[]) null);
// now we insert the interceptor between the tie skeleton and the impl.
// Offshoots have no name, so we construct one from the component name and the offshoot interface name
//
String qualOffshootName = getName() + "/" + operationsIF.getName().substring(0, operationsIF.getName().length() - "Operations".length());
java.lang.Object interceptingOffshootImpl = ContainerSealant.createContainerSealant(
operationsIF, offshootTiedImpl, qualOffshootName, true, m_logger,
Thread.currentThread().getContextClassLoader(), methodsExcludedFromInvocationLogging);
Method implSetter = servant.getClass().getMethod("_delegate", new Class[]{operationsIF});
implSetter.invoke(servant, new java.lang.Object[]{interceptingOffshootImpl});
m_logger.fine("created sealant for offshoot " + qualOffshootName);
} catch (NoSuchMethodException e) {
// so this was not a Tie skeleton, even though its name ends misleadingly with "POATie"
} catch (Exception e) {
m_logger.log(Level.WARNING, "Failed to create interceptor for offshoot " + servantName, e);
}
}
if (!isTie) {
// TODO: perhaps require tie offshoots with ACS 5.0, and enable this warning log
// m_logger.warning("Offshoot servant '" + servantName + "' from component '" + getName() +
// "' does not follow the tie approach. Calls can thus not be intercepted by the container.");
}
OffShoot shoot = null;
try {
org.omg.CORBA.Object obj = acsCorba.activateOffShoot(servant, m_clientPOA);
m_activatedOffshootsMap.put(offshootImpl, servant);
shoot = OffShootHelper.narrow(obj);
}
catch (Throwable thr) {
String msg = "failed to activate offshoot object of type '" + servant.getClass().getName() +
"' for client '" + m_clientName + "'. ";
// flatten the exception chain by one level if possible
if (thr instanceof AcsJContainerServicesEx && thr.getCause() != null) {
msg += "(" + thr.getMessage() + ")";
thr = thr.getCause();
}
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
throw ex;
}
// finally, put the CORBA-object/implementation into the component's proxy invocation handler,
// so when requesting an offshoot into the component, we return the corresponding CORBA object
if( haveToInject ) {
m_logger.fine("Injecting offshoot '" + offshootImpl.getClass().getName() + "' to '" + m_clientName + "' component XML binder");
ComponentInvocationHandler handler = (ComponentInvocationHandler)Proxy.getInvocationHandler(m_componentXmlTranslatorProxy);
handler.addOffshoot(offshootImpl, shoot);
}
m_logger.fine("successfully activated offshoot of type " + offshootImpl.getClass().getName());
return shoot;
}
@Override
public void deactivateOffShoot(Object offshootImpl)
throws AcsJContainerServicesEx
{
checkOffShoot(offshootImpl);
try {
acsCorba.deactivateOffShoot(m_activatedOffshootsMap.get(offshootImpl), m_clientPOA);
m_activatedOffshootsMap.remove(offshootImpl);
m_logger.fine("successfully deactivated offshoot of type " + offshootImpl.getClass().getName());
} catch (AcsJContainerEx e) {
throw new AcsJContainerServicesEx(e);
}
}
/**
* @param cbServant
* @throws ContainerException
*/
private void checkOffShoot(Object servant) throws AcsJContainerServicesEx {
if (servant == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("servant");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
if (!(servant instanceof OffShootOperations)) {
String msg = "invalid offshoot servant provided. Must implement " + OffShootOperations.class.getName();
m_logger.fine(msg);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw ex;
}
}
/**
* @see alma.acs.container.ContainerServices#getAdvancedContainerServices()
*/
public synchronized AdvancedContainerServices getAdvancedContainerServices() {
if (advancedContainerServices == null) {
advancedContainerServices = new AdvancedContainerServicesImpl(this, m_logger);
// todo: once the legitimate cases of calling this method are settled, remove the log message.
m_logger.info("component '" + getName() + "' requested AdvancedContainerServices");
}
return advancedContainerServices;
}
/**
* {@inheritDoc}.
* <p>
* TODO: implement shortcutting of xml (de-)serialization for collocated component or offshoot:
* ask AcsContainer if it knows componentReference, and if it has transpXml-IF;
* if so, get component impl directly;
* check if respective component helper allows direct calls to transpXmlIF
* (by not implementing _getInterfaceTranslator, or some explicit flag);
* move intercepting layer (ContainerSealant) so that it's still in between the components.
*
* @see alma.acs.container.ContainerServices#getTransparentXmlComponent(java.lang.Class, org.omg.CORBA.Object, java.lang.Class)
*/
public <T, F> T getTransparentXmlWrapper(Class<T> transparentXmlIF, F flatXmlObject, Class<F> flatXmlIF)
throws AcsJContainerServicesEx
{
if (m_logger.isLoggable(Level.FINEST)) {
m_logger.finest("creating xml binding class aware wrapper around remote object " +
"implementing " + flatXmlIF.getName() + "...");
}
T wrapper = null;
try
{
wrapper = DynamicProxyFactory.getDynamicProxyFactory(m_logger).createClientProxy(
transparentXmlIF,
flatXmlObject,
flatXmlIF);
}
catch (Throwable thr)
{
String msg = "failed to create XML binding class wrapper for remote object implementing " + flatXmlIF.getName();
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex2 = new AcsJContainerServicesEx(thr);
ex2.setContextInfo(msg);
throw ex2;
}
return wrapper;
}
/////////////////////////////////////////////////////////////
// other
/////////////////////////////////////////////////////////////
public void releaseAllComponents()
{
// copy curls first to avoid deleting from m_usedComponentsMap
// while iterating over it (fail-fast iterator throws ConcurrentModificationException).
// synchronized just in case...
List<String> curls = new ArrayList<String>();
synchronized (m_usedComponentsMap) {
curls.addAll(m_usedComponentsMap.keySet());
}
for (String curl : curls ) {
releaseComponent(curl);
}
}
/**
* Gets the handle to be used toward the manager, which is
* <ul>
* <li> The handle obtained from the manager at login for normal clients
* <li> The component handle assigned by the manager at component activation time,
* if this ContainerServices instance is used for a component
* </ul>
* We don't cache the handle from acsManagerProxy because it may change after a re-login,
* and then we get errors if the stale handle would be used.
* @return The correct handle to be used to identify this client to the manager.
*/
private int getEffectiveClientHandle() {
return (m_componentHandle > 0 ? m_componentHandle : m_acsManagerProxy.getManagerHandle());
}
/**
* @see alma.acs.container.ContainerServices#getThreadFactory()
*/
public ThreadFactory getThreadFactory() {
return m_threadFactory;
}
AcsCorba getAcsCorba() {
return acsCorba;
}
/**
* With this optional call, automatic invocation logging for certain offshoot methods can be disabled.
* @param methodsExcludedFromInvocationLogging
* @see ComponentHelper#getComponentMethodsExcludedFromInvocationLogging()
*/
void setMethodsExcludedFromInvocationLogging(String[] methodsExcludedFromInvocationLogging) {
this.methodsExcludedFromInvocationLogging = methodsExcludedFromInvocationLogging;
}
/**
* Cleans up all the resources that need to be closed, like closing opened notification channels
*
* @since ACS 8.1.0
*/
public void cleanUp() {
/* Cleanup through externally registered callbacks */
for (CleanUpCallback cleanUpCallback : cleanUpCallbacks) {
try {
cleanUpCallback.containerServicesCleanUp();
}
catch (Throwable thr) {
m_logger.log(Level.WARNING, "Failed to clean up registered client object", thr);
}
}
/* Disconnect NC subscribers */
for(String channel: m_subscribers.keySet()) {
AcsEventSubscriber subscriber = m_subscribers.get(channel);
try {
subscriber.disconnect();
String tmp[] = channel.split("/");
m_logger.log(AcsLogLevel.NOTICE, "Automatically disconnected subscriber for NC '" + tmp[tmp.length - 1] + "'");
} catch (IllegalStateException e) {
// Silently ignore this exception, as the subscriber was already disconnected. Well done, developers! :)
}
}
/* Disconnect NC publishers */
for(String channel: m_publishers.keySet()) {
AcsEventPublisher subscriber = m_publishers.get(channel);
try {
subscriber.disconnect();
String tmp[] = channel.split("/");
m_logger.log(AcsLogLevel.NOTICE, "Automatically disconnected publisher for NC '" + tmp[tmp.length - 1] + "'");
} catch (IllegalStateException e) {
// Silently ignore this exception, as the subscriber was already disconnected. Well done, developers! :)
}
}
}
/**
* A hack, see {@link ContainerServicesImpl#registerCleanUpCallback(CleanUpCallback)}.
*/
public static interface CleanUpCallback {
public void containerServicesCleanUp();
}
/**
* This is a hack: NC classes can register themselves to be notified,
* in order to release remote Corba resources (and prevent crashes of Notify Service...).
* Note that without this hack, the lifecycle of NC classes is only managed by the application code,
* which means that ACS could not enforce the clean up.
* <p>
* @TODO remove this once the NC classes are properly integrated into container services
* @param cb
* @since ACS 8.1.0
*/
public void registerCleanUpCallback(ContainerServicesImpl.CleanUpCallback cb) {
cleanUpCallbacks.add(cb);
}
private NamingContext getNameService() throws AcsJContainerServicesEx {
NamingContext nameService = null;
try
{
org.omg.CORBA.Object nameServiceObj = m_acsManagerProxy.get_service("NameService", true);
nameService = NamingContextHelper.narrow(nameServiceObj);
} catch (AcsJmaciErrTypeEx ex) {
m_logger.log(Level.FINE, "Failed to get the reference to the NameService service", ex);
throw new AcsJContainerServicesEx(ex);
} catch (Throwable thr) {
String msg = "Unexpectedly failed to get the NameService reference!";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
return nameService;
}
/**
* @see alma.acs.container.ContainerServices#createNotificationChannelSubscriber(String)
*/
public AcsEventSubscriber createNotificationChannelSubscriber(String channelName) throws AcsJContainerServicesEx {
return createNotificationChannelSubscriber(channelName, null); //TODO (rtobar): Is this fine? I'm only 99% sure
}
/**
* @see alma.acs.container.ContainerServices#createNotificationChannelSubscriber(String, String)
*/
public AcsEventSubscriber createNotificationChannelSubscriber(String channelName, String channelNotifyServiceDomainName) throws AcsJContainerServicesEx {
AcsEventSubscriber subscriber = null;
try {
Object[] args = new Object[]{
channelName,
channelNotifyServiceDomainName,
this,
getNameService(),
m_clientName
};
Class<?> clazz = Class.forName(CLASSNAME_NC_SUBSCRIBER);
Constructor<?> constructor = clazz.getConstructor(String.class, String.class, ContainerServicesBase.class, NamingContext.class, String.class);
subscriber = (AcsEventSubscriber)constructor.newInstance(args);
} catch(ClassNotFoundException e) {
// TODO: maybe we could prevent future NCSubscriber creation tries, since the class isn't and will not be loaded
// The same applies for the next "catch" block
m_logger.log(AcsLogLevel.ERROR, "Cannot create NC subscriber because the 'NCSubscriber' class is not present in the classpath", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
ex.setContextInfo("'" + CLASSNAME_NC_SUBSCRIBER + "' class not present in the classpath");
throw ex;
} catch(ClassCastException e) {
m_logger.log(AcsLogLevel.ERROR, "Cannot create NC subscriber because loaded class is not of type 'AcsEventSubscriber", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
ex.setContextInfo("'" + CLASSNAME_NC_SUBSCRIBER + "' class does not extend 'AcsEventSubscriber'");
throw ex;
} catch(Throwable e) {
m_logger.log(AcsLogLevel.ERROR, "Unexpected error while creating new AcsEventSubscriber object", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
throw ex;
}
m_subscribers.put( (channelNotifyServiceDomainName == null ? "" : channelNotifyServiceDomainName) + "/" + channelName, subscriber);
return subscriber;
}
/**
* @see ContainerServices#createNotificationChannelPublisher(String)
*/
public AcsEventPublisher createNotificationChannelPublisher(String channelName) throws AcsJContainerServicesEx {
return createNotificationChannelPublisher(channelName, null); // TODO (rtobar): only 99% sure that this is right
}
/**
* @see ContainerServices#createNotificationChannelPublisher(String, String)
*/
public AcsEventPublisher createNotificationChannelPublisher(String channelName, String channelNotifyServiceDomainName) throws AcsJContainerServicesEx {
AcsEventPublisher publisher = null;
try {
Object[] args = new Object[]{
channelName,
channelNotifyServiceDomainName,
this
};
Class<?> clazz = Class.forName(CLASSNAME_NC_PUBLISHER);
Constructor<?> constructor = clazz.getConstructor(String.class, String.class, ContainerServicesBase.class);
publisher = (AcsEventPublisher)constructor.newInstance(args);
} catch(ClassNotFoundException e) {
// TODO: maybe we could prevent future NCPublisher creation tries, since the class isn't and will not be loaded
// The same applies for the next "catch" block
m_logger.log(AcsLogLevel.ERROR, "Cannot create NC publisher because the 'NCPublisher' class is not present in the classpath", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
ex.setContextInfo("'" + CLASSNAME_NC_PUBLISHER + "' class not present in the classpath");
throw ex;
} catch(ClassCastException e) {
m_logger.log(AcsLogLevel.ERROR, "Cannot create NC publisher because loaded class is not of type 'AcsEventPublisher", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
ex.setContextInfo("'" + CLASSNAME_NC_PUBLISHER + "' class does not extend 'AcsEventPublisher'");
throw ex;
} catch(Throwable e) {
m_logger.log(AcsLogLevel.ERROR, "Unexpected error while creating new AcsEventPublisher object", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
throw ex;
}
m_publishers.put( (channelNotifyServiceDomainName == null ? "" : channelNotifyServiceDomainName) + "/" + channelName, publisher);
return publisher;
}
private void submitAlarm(String faultFamily, String faultMember, int faultCode, boolean raise) throws AcsJContainerServicesEx {
try {
ACSAlarmSystemInterface source = null;
ACSFaultState faultState = null;
try {
// TODO: Store a map of the created sources if the AS supports more than one source in the future
// Also, don't have a default hardcoded source name here
source = ACSAlarmSystemInterfaceFactory.createSource("ALARM_SYSTEM_SOURCES");
} catch (SourceCreationErrorEx e) {
throw new AcsJContainerServicesEx(e);
}
try {
faultState = ACSAlarmSystemInterfaceFactory.createFaultState(faultFamily, faultMember, faultCode);
} catch (FaultStateCreationErrorEx e) {
e.printStackTrace();
// @TODO
// } catch (ACSASFactoryNotInitedEx ex) {
// log.severe("Alarm with FF=" + faultFamily + " FM=" + faultMember + " FC=" + faultCode
// + " could not be thrown. Message=" + ex.getMessage());
// } catch (SourceCreationErrorEx ex) {
// log.severe("Alarm with FF=" + faultFamily + " FM=" + faultMember + " FC=" + faultCode
// + " could not be thrown. Message=" + ex.getMessage());
// } catch (FaultStateCreationErrorEx ex) {
// log.severe("Alarm with FF=" + faultFamily + " FM=" + faultMember + " FC=" + faultCode
// + " could not be thrown. Message=" + ex.getMessage());
// }
}
if( raise )
faultState.setDescriptor(ACSFaultState.ACTIVE);
else
faultState.setDescriptor(ACSFaultState.TERMINATE);
faultState.setUserTimestamp(new Timestamp(System.currentTimeMillis()));
source.push(faultState);
} catch (ACSASFactoryNotInitedEx e) {
throw new AcsJContainerServicesEx(e);
}
}
/**
* @see ContainerServices#raiseAlarm(String, String, int)
*/
public void raiseAlarm(String faultFamily, String faultMember, int faultCode) throws AcsJContainerServicesEx {
submitAlarm(faultFamily, faultMember, faultCode, true);
}
/**
* @see ContainerServices#clearAlarm(String, String, int)
*/
public void clearAlarm(String faultFamily, String faultMember, int faultCode) throws AcsJContainerServicesEx {
submitAlarm(faultFamily, faultMember, faultCode, false);
}
} | LGPL/CommonSoftware/jcont/src/alma/acs/container/ContainerServicesImpl.java | /*
* ALMA - Atacama Large Millimiter Array
* (c) European Southern Observatory, 2002
* Copyright by ESO (in the framework of the ALMA collaboration),
* All rights reserved
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston,
* MA 02111-1307 USA
*/
package alma.acs.container;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Vector;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import org.omg.CosNaming.NamingContext;
import org.omg.CosNaming.NamingContextHelper;
import org.omg.PortableServer.POA;
import org.omg.PortableServer.Servant;
import com.cosylab.CDB.DAL;
import com.cosylab.CDB.DALHelper;
import si.ijs.maci.ComponentInfo;
import si.ijs.maci.ComponentSpec;
import alma.ACS.CBlong;
import alma.ACS.OffShoot;
import alma.ACS.OffShootHelper;
import alma.ACS.OffShootOperations;
import alma.ACSErrTypeCommon.wrappers.AcsJBadParameterEx;
import alma.JavaContainerError.wrappers.AcsJContainerEx;
import alma.JavaContainerError.wrappers.AcsJContainerServicesEx;
import alma.acs.callbacks.RequesterUtil;
import alma.acs.callbacks.ResponseReceiver;
import alma.acs.component.ComponentDescriptor;
import alma.acs.component.ComponentQueryDescriptor;
import alma.acs.component.ComponentStateManager;
import alma.acs.component.dynwrapper.ComponentInvocationHandler;
import alma.acs.component.dynwrapper.DynamicProxyFactory;
import alma.acs.container.archive.Range;
import alma.acs.container.archive.UIDLibrary;
import alma.acs.container.corba.AcsCorba;
import alma.acs.exceptions.AcsJException;
import alma.acs.logging.AcsLogLevel;
import alma.acs.logging.AcsLogger;
import alma.acs.logging.ClientLogManager;
import alma.acs.nc.AcsEventPublisher;
import alma.acs.nc.AcsEventSubscriber;
import alma.acsErrTypeAlarmSourceFactory.ACSASFactoryNotInitedEx;
import alma.acsErrTypeAlarmSourceFactory.FaultStateCreationErrorEx;
import alma.acsErrTypeAlarmSourceFactory.SourceCreationErrorEx;
import alma.alarmsystem.source.ACSAlarmSystemInterface;
import alma.alarmsystem.source.ACSAlarmSystemInterfaceFactory;
import alma.alarmsystem.source.ACSFaultState;
import alma.entities.commonentity.EntityT;
import alma.maciErrType.wrappers.AcsJComponentDeactivationFailedEx;
import alma.maciErrType.wrappers.AcsJComponentDeactivationUncleanEx;
import alma.maciErrType.wrappers.AcsJNoPermissionEx;
import alma.maciErrType.wrappers.AcsJmaciErrTypeEx;
import alma.xmlstore.Identifier;
import alma.xmlstore.IdentifierHelper;
import alma.xmlstore.IdentifierJ;
import alma.xmlstore.IdentifierOperations;
/**
* Implementation of the <code>ContainerServices</code> interface.
* To be used by ACS components, as well as any other clients that need access
* to components.
* <p>
* This class is "cheap" to instantiate because many resources it uses are singletons
* and or objects otherwise shared among instances.
* It should thus be ok to create one instance per component or other client.
* <p>
* This class has to be thread-safe, because a component's functional methods can be called from
* different ORB threads, or because the component could itself create threads, each of them
* accessing this object.
*
* @author hsommer Apr 1, 2003 2:28:01 PM
*/
public class ContainerServicesImpl implements ContainerServices
{
private AdvancedContainerServicesImpl advancedContainerServices;
// identifier archive and UID lib will created lazily
private volatile UIDLibrary uidLibrary;
private volatile IdentifierJ identifierArchive;
/** cheat property that allows testing without identifier archive present, because UIDs will be faked */
public static final String PROPERTYNAME_FAKE_UID_FOR_TESTING = "acs.container.fakeUIDsForTesting";
private final boolean fakeUIDsForTesting = Boolean.getBoolean(PROPERTYNAME_FAKE_UID_FOR_TESTING);
/**
* Holds and re-establishes the connection to the manager, and encapsulates the handle given by the manager at login.
*/
protected final AcsManagerProxy m_acsManagerProxy;
// logger used by this class
protected final AcsLogger m_logger;
// logger given to component
private volatile AcsLogger componentLogger;
// sync'd map, key=curl, value=corbaStub
private final Map<String, org.omg.CORBA.Object> m_usedComponentsMap;
private final Map<String, org.omg.CORBA.Object> m_usedNonStickyComponentsMap;
// sync'd map, key=curl, value=ComponentDescriptor
private final Map<String, ComponentDescriptor> m_componentDescriptorMap;
/**
* The handle that the manager has assigned to the component to whom this ContainerServices object belongs,
* or 0 if this ContainerServices object does not belong to a component,
* in which case m_acsManagerProxy's handle should be used.
*/
private final int m_componentHandle;
/**
* Name of the component or other client (client app or container etc)
*/
private final String m_clientName;
/**
* The externally provided instance of AcsCorba
*/
private final AcsCorba acsCorba;
private final POA m_clientPOA;
private Object m_componentXmlTranslatorProxy;
// sync'd map, key=offshot implementation object, value=servant
// (they can be the same, but help to keep track of the servants
// when activating offshoots of xyzJ type)
private Map<Object, Servant> m_activatedOffshootsMap;
private final ComponentStateManager m_componentStateManager;
private final ThreadFactory m_threadFactory;
private volatile String[] methodsExcludedFromInvocationLogging;
/**
* Optional callback object for component available/unavailable notification
*/
private ComponentListener compListener;
private final List<CleanUpCallback> cleanUpCallbacks;
private final Map<String, AcsEventSubscriber> m_subscribers;
private final Map<String, AcsEventPublisher> m_publishers;
final String CLASSNAME_NC_SUBSCRIBER = "alma.acs.nc.refactored.NCSubscriber";
final String CLASSNAME_NC_PUBLISHER = "alma.acs.nc.refactored.NCPublisher";
/**
* ctor.
* @param acsManagerProxy
* @param componentPOA the POA for the component. Can be the root POA or some other specialized POA.
* @param acsCorba Encapsulates the ORB and all POAs
* @param logger logger to be used by this class
* @param componentHandle handle to be used for identification when sending requests to the manager.
* For components, this should be the component handle assigned by the manager;
* for other clients, it should be 0 to indicate that the handle obtained at manager login should be used.
* @param clientCurl
* @param componentStateManager can be null if this class is instantiated
* for a component client outside of a container
* @param threadFactory to be used for <code>getThreadFactory</code>
*/
public ContainerServicesImpl(AcsManagerProxy acsManagerProxy, POA componentPOA, AcsCorba acsCorba,
AcsLogger logger, int componentHandle, String clientCurl,
ComponentStateManager componentStateManager,
ThreadFactory threadFactory)
{
// The following fields are final. This guarantees that they will be copied to main thread memory,
// and thus be seen by other threads after this ctor has terminated.
m_acsManagerProxy = acsManagerProxy;
m_clientPOA = componentPOA;
this.acsCorba = acsCorba;
m_logger = logger;
m_componentHandle = componentHandle;
m_clientName = clientCurl;
m_componentStateManager = componentStateManager;
// should do for thread-safety as long as we don't iterate over it
m_usedComponentsMap = Collections.synchronizedMap(new HashMap<String, org.omg.CORBA.Object>());
m_usedNonStickyComponentsMap = Collections.synchronizedMap(new HashMap<String, org.omg.CORBA.Object>());
m_componentDescriptorMap = Collections.synchronizedMap(new HashMap<String, ComponentDescriptor>());
m_activatedOffshootsMap = Collections.synchronizedMap(new HashMap<Object, Servant>());
m_subscribers = new HashMap<String, AcsEventSubscriber>();
m_publishers = new HashMap<String, AcsEventPublisher>();
m_threadFactory = threadFactory;
cleanUpCallbacks = new ArrayList<CleanUpCallback>();
if (fakeUIDsForTesting) {
m_logger.warning("Running in test mode where UIDs will be constructed randomly instead of being retrieved from the archive!");
}
}
void setComponentXmlTranslatorProxy(Object xmlTranslatorProxy) {
m_componentXmlTranslatorProxy = xmlTranslatorProxy;
}
/////////////////////////////////////////////////////////////
// Implementation of ContainerServices
/////////////////////////////////////////////////////////////
/**
* Gets the component name (which the component does not know statically)
* @see alma.acs.container.ContainerServices#getName()
*/
public String getName() {
return m_clientName;
}
/**
* {@inheritDoc}
*
* This method should only be called by a component that lives inside a container;
* a component client that is not a component itself should not call it,
* would result in a NPE!
*
* @see alma.acs.container.ContainerServices#getComponentStateManager()
*/
public ComponentStateManager getComponentStateManager()
{
if (m_componentStateManager == null)
{
// to make debugging easier if this ever happened on the container side
throw new NullPointerException("ComponentStateManager is null!");
}
return m_componentStateManager;
}
/**
* The component must retrieve its logger object from this interface
* (as opposed to using the <code>ClientLogManager</code> singleton)
* so that the container is free
* to give away loggers that are somehow tailored to the particular component.
* <p>
* The goal is to have "componentName" and other fields in all ALMA log entries,
* and have tool support for filtering logs by component, subsystem, user, ...
*
* @see alma.acs.container.ContainerServices#getLogger()
*/
public AcsLogger getLogger()
{
if (componentLogger == null) {
componentLogger = ClientLogManager.getAcsLogManager().getLoggerForComponent(m_clientName);
}
return componentLogger;
}
public void registerComponentListener(ComponentListener listener) {
compListener = listener;
}
/**
*/
public void fireComponentsAvailable (List<ComponentDescriptor> compDescs) {
if (compListener == null) {
return;
}
// find out which components are interesting for the client
List<ComponentDescriptor> interesting = null;
if (compListener.includeForeignComponents()) {
interesting = compDescs;
}
else {
interesting = new Vector<ComponentDescriptor>();
for (ComponentDescriptor cd : compDescs) {
if (m_usedComponentsMap.containsKey(cd.getName()) || m_usedNonStickyComponentsMap.containsKey(cd.getName())) {
interesting.add(cd);
}
}
}
if (interesting.size() > 0) {
try {
compListener.componentsAvailable(interesting);
} catch (Throwable thr) {
m_logger.log(Level.INFO, "componentsAvailable implementation of client " + m_clientName + " failed", thr);
}
}
}
/**
*/
public void fireComponentsUnavailable (List<String> compNames) {
if (compListener == null) {
return;
}
// find out which components are interesting for the client
List<String> interesting = null;
if (compListener.includeForeignComponents()) {
interesting = compNames;
}
else {
interesting = new Vector<String>();
for (String cn : compNames) {
if (m_usedComponentsMap.containsKey(cn) || m_usedNonStickyComponentsMap.containsKey(cn) ) {
interesting.add(cn);
}
}
}
if (interesting.size() > 0 && compListener != null) {
try {
compListener.componentsUnavailable(interesting);
} catch (Throwable thr) {
m_logger.log(Level.INFO, "componentsUnavailable implementation of client " + m_clientName + " failed", thr);
}
}
}
/**
* @see alma.acs.container.ContainerServices#assignUniqueEntityId(EntityT)
*/
public void assignUniqueEntityId(EntityT entity) throws AcsJContainerServicesEx
{
if (entity == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("entity");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
if (fakeUIDsForTesting) {
long localId = (new Random(System.currentTimeMillis())).nextLong();
String uid = Range.generateUID("testArchiveId", "testRangeId", localId);
entity.setEntityId(uid);
return;
}
try {
if (identifierArchive == null) {
Identifier identRaw = IdentifierHelper.narrow(getDefaultComponent("IDL:alma/xmlstore/Identifier:1.0"));
identifierArchive = getTransparentXmlWrapper(IdentifierJ.class, identRaw, IdentifierOperations.class);
}
if (uidLibrary == null) {
uidLibrary = new UIDLibrary(m_logger);
}
uidLibrary.assignUniqueEntityId(entity, identifierArchive);
}
catch (Throwable thr) {
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo("failed to assign a UID to entity of type " + entity.getEntityTypeName());
throw ex;
}
}
/**
* @see alma.acs.container.ContainerServices#findComponents(java.lang.String, java.lang.String)
*/
public String[] findComponents(String curlWildcard, String typeWildcard)
throws AcsJContainerServicesEx
{
if (curlWildcard == null) {
curlWildcard = "*";
}
if (typeWildcard == null) {
typeWildcard = "*";
}
String msgSpec = "curlWildcard='" + curlWildcard + "' and typeWildcard='" + typeWildcard + "'.";
if (m_logger.isLoggable(Level.FINER)) {
m_logger.finer("about to call Manager#get_component_info with " + msgSpec);
}
ComponentInfo[] components = null;
try {
components = m_acsManagerProxy.get_component_info(new int[0], curlWildcard, typeWildcard, false );
}
catch (AcsJNoPermissionEx ex) {
m_logger.log(Level.FINE, "No permission to find components with " + msgSpec, ex);
AcsJContainerServicesEx ex2 = new AcsJContainerServicesEx(ex);
ex2.setContextInfo(msgSpec);
throw ex2;
}
catch (Throwable thr) {
m_logger.log(Level.FINE, "Unexpected failure calling 'get_component_info' with " + msgSpec, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msgSpec);
throw ex;
}
ArrayList<String> curls = new ArrayList<String>();
if (components != null) {
for (int i = 0; i < components.length; i++) {
curls.add(components[i].name);
}
}
if (m_logger.isLoggable(Level.FINER)) {
m_logger.finer("received " + curls.size() + " curls from get_component_info.");
}
return curls.toArray(new String[curls.size()]);
}
/**
*
* @see alma.acs.container.ContainerServices#getComponentDescriptor(java.lang.String)
*/
public ComponentDescriptor getComponentDescriptor(String curl)
throws AcsJContainerServicesEx
{
if (curl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("curl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
ComponentDescriptor desc = m_componentDescriptorMap.get(curl);
if (desc == null) {
// try to get it from the manager
ComponentInfo[] compInfos;
try {
compInfos = m_acsManagerProxy.get_component_info(new int[0], curl, "*", false);
} catch (Throwable thr) {
m_logger.log(Level.FINE, "Unexpected failure calling 'get_component_info'.", thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo("CURL=" + curl);
throw ex;
}
if (compInfos.length == 1) {
desc = new ComponentDescriptor(compInfos[0]);
m_componentDescriptorMap.put(curl, desc);
}
else {
String msg = "failed to retrieve a unique component descriptor for the component instance " + curl;
m_logger.fine(msg);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw new AcsJContainerServicesEx();
}
}
return desc;
}
/**
* @see alma.acs.container.ContainerServices#getComponent(String)
*/
public org.omg.CORBA.Object getComponent(String curl) throws AcsJContainerServicesEx
{
if (curl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("curl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
// check if our component has requested the other component before
org.omg.CORBA.Object stub = m_usedComponentsMap.get(curl);
if (stub != null)
{
// reusing seems ok as long as there is one separate
// instance of ContainerServicesImpl for each component.
// This reuse does not cut off the manager from component access,
// since it only happens at second or further access attempts;
// the first time, the component reference is obtained through the manager
// (independently of whether the components are collocated inside the same container),
// so that the manager is aware of component dependencies.
m_logger.info("client '" + m_clientName + "' attempts to retrieve component '" +
curl + "' more than once; will return existing reference.");
}
else
{
m_logger.fine("will retrieve remote component '" + curl +
"' using ACS Manager#get_component with client handle " + getEffectiveClientHandle());
/// @todo: think about timeouts
try {
stub = m_acsManagerProxy.get_component(getEffectiveClientHandle(), curl, true);
m_logger.fine("component " + curl + " retrieved successfully.");
m_usedComponentsMap.put(curl, stub);
} catch (AcsJmaciErrTypeEx ex) {
String msg = "Failed to retrieve component " + curl;
m_logger.log(Level.FINE, msg, ex); // only a low-level log because the client component is supposed to log the exception which contains all context data
throw new AcsJContainerServicesEx(ex);
} catch (Throwable thr) {
String msg = "Failed to retrieve component " + curl + " for unexpected reasons.";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
}
return stub;
}
public org.omg.CORBA.Object getComponentNonSticky(String curl)
throws AcsJContainerServicesEx
{
if (curl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("curl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
org.omg.CORBA.Object stub = null;
try {
stub = m_acsManagerProxy.get_component_non_sticky(getEffectiveClientHandle(), curl);
m_logger.fine("Non-sticky reference to component '" + curl + "' retrieved successfully.");
m_usedNonStickyComponentsMap.put(curl, stub);
} catch (AcsJmaciErrTypeEx ex) {
String msg = "Failed to retrieve non-sticky reference to component " + curl;
m_logger.log(Level.FINE, msg, ex); // only a low-level log because the client component is supposed to log the exception which contains all context data
throw new AcsJContainerServicesEx(ex);
} catch (Throwable thr) {
String msg = "Failed to retrieve non-sticky reference to component '" + curl + "' for unexpected reasons.";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
return stub;
}
/**
* @see alma.acs.container.ContainerServices#getDefaultComponent(java.lang.String)
*/
public org.omg.CORBA.Object getDefaultComponent(String componentIDLType)
throws AcsJContainerServicesEx
{
if (componentIDLType == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("componentIDLType");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
ComponentInfo cInfo = null;
try
{
// the call
cInfo = m_acsManagerProxy.get_default_component(getEffectiveClientHandle(), componentIDLType);
}
catch (AcsJmaciErrTypeEx ex) {
String msg = "failed to retrieve default component for type " + componentIDLType;
m_logger.log(Level.FINE, msg, ex); // higher-level log should be produced by the calling client from the exception later
throw new AcsJContainerServicesEx(ex);
}
catch (Throwable thr) {
String msg = "failed to retrieve default component for type " + componentIDLType + " for unexpected reasons!";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
// cInfo.reference == null should no longer happen since the maci exception changes for ACS 6.0
// @todo check and remove this
if (cInfo.reference == null) {
String msg = "Default component for type '" + componentIDLType + "' could not be accessed. ";
m_logger.info(msg);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw ex;
}
m_usedComponentsMap.put(cInfo.name, cInfo.reference);
m_componentDescriptorMap.put(cInfo.name, new ComponentDescriptor(cInfo));
return cInfo.reference;
}
public org.omg.CORBA.Object getCollocatedComponent(String compUrl, String targetCompUrl) throws AcsJContainerServicesEx {
if (compUrl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("compUrl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
if (targetCompUrl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("targetCompUrl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
ComponentQueryDescriptor cqd = new ComponentQueryDescriptor(compUrl, null);
return getCollocatedComponent(cqd, false, targetCompUrl);
}
public org.omg.CORBA.Object getCollocatedComponent(ComponentQueryDescriptor spec, boolean markAsDefaul, String targetCompUrl) throws AcsJContainerServicesEx {
if (spec == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("ComponentQueryDescriptor");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
if (targetCompUrl == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("targetCompUrl");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
ComponentInfo cInfo = null;
try {
// the call
cInfo = m_acsManagerProxy.get_collocated_component(getEffectiveClientHandle(), spec.toComponentSpec(), false, targetCompUrl);
} catch (AcsJmaciErrTypeEx ex) {
String msg = "Failed to retrieve component '" + spec.getComponentName() + "' created such that it runs collocated with '"+ targetCompUrl + "'.";
m_logger.log(Level.FINE, msg, ex); // it's serious, but the caller is supposed to log this. Container only logs just in case.
throw new AcsJContainerServicesEx(ex);
}
catch (Throwable thr) {
String msg = "Unexpectedly failed to retrieve component '" + spec.getComponentName() + "' created such that it runs collocated with '"+ targetCompUrl + "'.";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
// cInfo.reference == null should no longer happen since the maci exception changes for ACS 6.0
// @todo check and remove this
if (cInfo.reference == null) {
String msg = "Failed to retrieve component '" + spec.getComponentName() + "' created such that it runs collocated with '"+ targetCompUrl + "'.";
m_logger.info(msg);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw ex;
}
m_usedComponentsMap.put(cInfo.name, cInfo.reference);
m_componentDescriptorMap.put(cInfo.name, new ComponentDescriptor(cInfo));
return cInfo.reference;
}
/**
* @see alma.acs.container.ContainerServices#getDynamicComponent(si.ijs.maci.ComponentSpec, boolean)
*/
public org.omg.CORBA.Object getDynamicComponent(ComponentQueryDescriptor compDesc, boolean markAsDefault)
throws AcsJContainerServicesEx
{
return getDynamicComponent(compDesc.toComponentSpec(), markAsDefault);
}
/**
* @see alma.acs.container.ContainerServices#getDynamicComponent(si.ijs.maci.ComponentSpec, boolean)
*/
public org.omg.CORBA.Object getDynamicComponent(ComponentSpec compSpec, boolean markAsDefault)
throws AcsJContainerServicesEx
{
String entryMsg = "getDynamicComponent called with" +
" compName=" + compSpec.component_name +
" compType=" + compSpec.component_type +
" compCode=" + compSpec.component_code +
" compContainer=" + compSpec.container_name +
" markAsDefault=" + markAsDefault;
m_logger.fine(entryMsg);
ComponentInfo cInfo = null;
try
{
// the call
cInfo = m_acsManagerProxy.get_dynamic_component(getEffectiveClientHandle(), compSpec, markAsDefault);
m_usedComponentsMap.put(cInfo.name, cInfo.reference);
m_componentDescriptorMap.put(cInfo.name, new ComponentDescriptor(cInfo));
} catch (AcsJmaciErrTypeEx ex) {
m_logger.log(Level.FINE, "Failed to create dynamic component", ex);
throw new AcsJContainerServicesEx(ex);
}
catch (Throwable thr) {
String msg = "Unexpectedly failed to create dynamic component for unexpected reasons!";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
return cInfo.reference;
}
public org.omg.CORBA.Object getReferenceWithCustomClientSideTimeout(org.omg.CORBA.Object originalCorbaRef, double timeoutSeconds)
throws AcsJContainerServicesEx {
return acsCorba.wrapForRoundtripTimeout(originalCorbaRef, timeoutSeconds);
}
/**
* @see alma.acs.container.ContainerServices#getCDB()
*/
public DAL getCDB() throws AcsJContainerServicesEx
{
DAL dal = null;
String errMsg = "Failed to get the reference to the CDB component/service.";
try
{
// manager's get_service contains get_component, so even if the CDB becomes a real component, we can leave this
org.omg.CORBA.Object dalObj = m_acsManagerProxy.get_service("CDB", true);
dal = DALHelper.narrow(dalObj);
} catch (AcsJmaciErrTypeEx ex) {
m_logger.log(Level.FINE, errMsg, ex);
throw new AcsJContainerServicesEx(ex);
}
catch (Throwable thr) {
String msg = "Unexpectedly failed to get the CDB reference!";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
return dal;
}
/**
* Releases the specified component reference. This involves notification of the manager,
* as well as calling <code>_release()</code> on the CORBA stub.
* If the curl is not known to the container, the request will be ignored.
* <p>
* Note that <i>references</i> to other components are released by this method,
* where the components hosted inside this container act as clients.
* These referenced components may run inside this or some other container/container.
*
* @see alma.acs.container.ContainerServices#releaseComponent(java.lang.String)
*/
public void releaseComponent(String curl) {
ComponentReleaseCallback callback = new ComponentReleaseCallback();
releaseComponent(curl, callback);
try {
callback.awaitComponentRelease(60, TimeUnit.SECONDS);
} catch (InterruptedException ex) {
}
}
/**
* Used by {@link ContainerServicesImpl#releaseComponent(String, alma.acs.container.ContainerServices.ComponentReleaseCallback)}
* to wrap the user-supplied <code>ComponentReleaseCallback</code> for usage over Corba and for cleaner exception dispatching.
*/
private class ComponentReleaseCallbackCorbaHandler extends ResponseReceiver<Integer> {
private final ComponentReleaseCallback delegate;
ComponentReleaseCallbackCorbaHandler(ComponentReleaseCallback delegate) {
this.delegate = delegate;
}
@Override
public void incomingException(AcsJException ex) {
try {
if (ex instanceof AcsJComponentDeactivationUncleanEx) {
delegate.componentReleased((AcsJComponentDeactivationUncleanEx)ex);
}
else if (ex instanceof AcsJComponentDeactivationFailedEx) {
delegate.errorComponentReleaseFailed((AcsJComponentDeactivationFailedEx)ex);
}
else {
m_logger.log(Level.WARNING, "Received unexpected exception from manager#release_component_async, please report to ACS developers.", ex);
delegate.errorCommunicationFailure(ex); // strictly speaking the wrong method, but better than nothing.
}
}
catch (RuntimeException handlerEx) {
m_logger.log(Level.FINE, "User-supplied handler threw an exception.", handlerEx);
}
finally {
delegate.callOver();
}
}
@Override
public void incomingResponse(Integer numberRemainingClients) {
// we do not expose numberRemainingClients in the CS API
try {
delegate.componentReleased(null);
}
catch (RuntimeException handlerEx) {
m_logger.log(Level.FINE, "User-supplied handler threw an exception.", handlerEx);
}
finally {
delegate.callOver();
}
}
}
@Override
public void releaseComponent(String curl, ComponentReleaseCallback callback) {
// we keep the "forceful" release option as a switch in the code.
// It was taken out for ACS 7.0, but may come back in the future.
final boolean forcibly = false;
if (curl == null) {
m_logger.info("Invalid curl 'null', nothing to release.");
return;
}
org.omg.CORBA.Object stub = null;
// This use of synchronized makes the code thread safe without locking across the remote call to manager#release_component etc
synchronized (m_usedComponentsMap) {
if (!m_usedComponentsMap.containsKey(curl))
{
if (m_usedNonStickyComponentsMap.containsKey(curl)) {
m_logger.info("ignoring request by client '" + m_clientName +
"' to release component '" + curl + "' because the reference is non-sticky and does not need to be released.");
}
else {
m_logger.info("ignoring request by client '" + m_clientName +
"' to release other component with unknown curl='" + curl + "'.");
}
return;
}
// the CURL is in the map and gets removed now
stub = m_usedComponentsMap.get(curl);
m_usedComponentsMap.remove(curl);
}
m_logger.fine("about to release component " + curl + (forcibly ? " forcibly" : ""));
try {
if (forcibly) {
m_acsManagerProxy.force_release_component(getEffectiveClientHandle(), curl);
}
else {
ComponentReleaseCallbackCorbaHandler callbackCorba = new ComponentReleaseCallbackCorbaHandler(callback);
CBlong myCBlong = RequesterUtil.giveCBLong(this, callbackCorba);
m_acsManagerProxy.release_component(getEffectiveClientHandle(), curl, myCBlong);
}
m_logger.info("client '" + m_clientName + "' has successfully released " + " a component with curl=" + curl);
stub._release();
}
catch (AcsJNoPermissionEx ex) {
AcsLogLevel level = ( callback == null ? AcsLogLevel.WARNING : AcsLogLevel.DEBUG );
m_logger.log(level, "client '" + m_clientName + "' (handle " + getEffectiveClientHandle() + ") cannot release " +
" with the manager the component with curl=" + curl, ex);
if (callback != null) {
callback.errorNoPermission(ex.getReason());
}
}
catch (Throwable thr) { // any org.omg.CORBA.SystemException, or whatever else can happen
AcsLogLevel level = ( callback == null ? AcsLogLevel.WARNING : AcsLogLevel.DEBUG );
m_logger.log(level, "client '" + m_clientName + "' (handle " + getEffectiveClientHandle() + ") failed to release " +
" with the manager the component with curl=" + curl, thr);
if (callback != null) {
callback.errorCommunicationFailure(thr);
}
}
}
/**
* @see alma.acs.container.ContainerServices#activateOffShoot(org.omg.PortableServer.Servant)
*/
@Override
public <T extends Servant & OffShootOperations> OffShoot activateOffShoot(T servant)
throws AcsJContainerServicesEx
{
return activateOffShoot(servant, null);
}
/**
* @see alma.acs.container.ContainerServices#activateOffShoot(org.omg.PortableServer.Servant)
*/
@Override
public <T extends OffShootOperations> OffShoot activateOffShoot(T offshootImpl, Class<T> idlOpInterface)
throws AcsJContainerServicesEx
{
Servant servant = null;
boolean isTie = false;
boolean haveToInject = false;
// Checks
checkOffShoot(offshootImpl);
// If we receive an object that is not a servant it means that it requires XML automatic bindings.
// We create the corresponding POATie object, the dynamic proxy binder,
// and set the offshoot implementation as the final delegate
if( !(offshootImpl instanceof Servant) ) {
if( idlOpInterface == null )
throw new AcsJContainerServicesEx(new NullPointerException("Received null idlOpInterface when asking to activate XML offshoot"));
if( !idlOpInterface.isAssignableFrom(offshootImpl.getClass()) ) {
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo("Received OffShoot of type '" + offshootImpl.getClass().getName() +
"' does not inherits from '" + idlOpInterface.getName() + "'");
throw ex;
}
// Guess the name of the xyzPOATie class, build it, and delegate
String poaTieClassName = null;
try {
m_logger.fine("Creating POATie servant for offshoot '" + offshootImpl.getClass().getName() + "'");
// Get the POATie class and the expected xyzOperations interface
String baseClassName = idlOpInterface.getName().substring(0, idlOpInterface.getName().length()-1);
poaTieClassName = baseClassName + "POATie";
Class<?> poaTieClazz = Class.forName( poaTieClassName );
Method implGetter = poaTieClazz.getMethod("_delegate", (Class[]) null);
Class<?> operationsIF = implGetter.getReturnType();
// Create the dynamic XML entities wrapper
Object proxy = DynamicProxyFactory.getDynamicProxyFactory(m_logger)
.createServerProxy(operationsIF, offshootImpl, idlOpInterface);
// Create the POATie object, give it the proxy, and set it as our servant
Constructor<?> c = poaTieClazz.getConstructor(new Class[]{operationsIF});
servant = (Servant)c.newInstance(proxy);
if( m_componentXmlTranslatorProxy != null )
haveToInject = true;
} catch (ClassNotFoundException e) {
String msg = "Failed to create servant for offshoot " + offshootImpl.getClass().getName() + ": class '" + poaTieClassName + "' cannot be found";
m_logger.log(AcsLogLevel.ERROR, msg, e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw ex;
} catch(Exception e) {
throw new AcsJContainerServicesEx(e);
}
}
else {
m_logger.fine("Don't need to create servant for offshoot '" + offshootImpl.getClass().getName() + "'");
servant = (Servant)offshootImpl;
}
// check if the servant is the Tie variant, which allows proxy-based call interception by the container
String servantName = servant.getClass().getName();
if (servantName.endsWith("POATie")) {
try {
// the _delegate getter method is mandated by the IDL-to-Java mapping spec
Method implGetter = servant.getClass().getMethod("_delegate", (Class[]) null);
isTie = true;
Class<?> operationsIF = implGetter.getReturnType();
java.lang.Object offshootTiedImpl = implGetter.invoke(servant, (java.lang.Object[]) null);
// now we insert the interceptor between the tie skeleton and the impl.
// Offshoots have no name, so we construct one from the component name and the offshoot interface name
//
String qualOffshootName = getName() + "/" + operationsIF.getName().substring(0, operationsIF.getName().length() - "Operations".length());
java.lang.Object interceptingOffshootImpl = ContainerSealant.createContainerSealant(
operationsIF, offshootTiedImpl, qualOffshootName, true, m_logger,
Thread.currentThread().getContextClassLoader(), methodsExcludedFromInvocationLogging);
Method implSetter = servant.getClass().getMethod("_delegate", new Class[]{operationsIF});
implSetter.invoke(servant, new java.lang.Object[]{interceptingOffshootImpl});
m_logger.fine("created sealant for offshoot " + qualOffshootName);
} catch (NoSuchMethodException e) {
// so this was not a Tie skeleton, even though its name ends misleadingly with "POATie"
} catch (Exception e) {
m_logger.log(Level.WARNING, "Failed to create interceptor for offshoot " + servantName, e);
}
}
if (!isTie) {
// TODO: perhaps require tie offshoots with ACS 5.0, and enable this warning log
// m_logger.warning("Offshoot servant '" + servantName + "' from component '" + getName() +
// "' does not follow the tie approach. Calls can thus not be intercepted by the container.");
}
OffShoot shoot = null;
try {
org.omg.CORBA.Object obj = acsCorba.activateOffShoot(servant, m_clientPOA);
m_activatedOffshootsMap.put(offshootImpl, servant);
shoot = OffShootHelper.narrow(obj);
}
catch (Throwable thr) {
String msg = "failed to activate offshoot object of type '" + servant.getClass().getName() +
"' for client '" + m_clientName + "'. ";
// flatten the exception chain by one level if possible
if (thr instanceof AcsJContainerServicesEx && thr.getCause() != null) {
msg += "(" + thr.getMessage() + ")";
thr = thr.getCause();
}
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
throw ex;
}
// finally, put the CORBA-object/implementation into the component's proxy invocation handler,
// so when requesting an offshoot into the component, we return the corresponding CORBA object
if( haveToInject ) {
m_logger.fine("Injecting offshoot '" + offshootImpl.getClass().getName() + "' to '" + m_clientName + "' component XML binder");
ComponentInvocationHandler handler = (ComponentInvocationHandler)Proxy.getInvocationHandler(m_componentXmlTranslatorProxy);
handler.addOffshoot(offshootImpl, shoot);
}
m_logger.fine("successfully activated offshoot of type " + offshootImpl.getClass().getName());
return shoot;
}
@Override
public void deactivateOffShoot(Object offshootImpl)
throws AcsJContainerServicesEx
{
checkOffShoot(offshootImpl);
try {
acsCorba.deactivateOffShoot(m_activatedOffshootsMap.get(offshootImpl), m_clientPOA);
m_activatedOffshootsMap.remove(offshootImpl);
m_logger.fine("successfully deactivated offshoot of type " + offshootImpl.getClass().getName());
} catch (AcsJContainerEx e) {
throw new AcsJContainerServicesEx(e);
}
}
/**
* @param cbServant
* @throws ContainerException
*/
private void checkOffShoot(Object servant) throws AcsJContainerServicesEx {
if (servant == null) {
AcsJBadParameterEx cause = new AcsJBadParameterEx();
cause.setParameter("servant");
cause.setParameterValue("null");
throw new AcsJContainerServicesEx(cause);
}
if (!(servant instanceof OffShootOperations)) {
String msg = "invalid offshoot servant provided. Must implement " + OffShootOperations.class.getName();
m_logger.fine(msg);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo(msg);
throw ex;
}
}
/**
* @see alma.acs.container.ContainerServices#getAdvancedContainerServices()
*/
public synchronized AdvancedContainerServices getAdvancedContainerServices() {
if (advancedContainerServices == null) {
advancedContainerServices = new AdvancedContainerServicesImpl(this, m_logger);
// todo: once the legitimate cases of calling this method are settled, remove the log message.
m_logger.info("component '" + getName() + "' requested AdvancedContainerServices");
}
return advancedContainerServices;
}
/**
* {@inheritDoc}.
* <p>
* TODO: implement shortcutting of xml (de-)serialization for collocated component or offshoot:
* ask AcsContainer if it knows componentReference, and if it has transpXml-IF;
* if so, get component impl directly;
* check if respective component helper allows direct calls to transpXmlIF
* (by not implementing _getInterfaceTranslator, or some explicit flag);
* move intercepting layer (ContainerSealant) so that it's still in between the components.
*
* @see alma.acs.container.ContainerServices#getTransparentXmlComponent(java.lang.Class, org.omg.CORBA.Object, java.lang.Class)
*/
public <T, F> T getTransparentXmlWrapper(Class<T> transparentXmlIF, F flatXmlObject, Class<F> flatXmlIF)
throws AcsJContainerServicesEx
{
if (m_logger.isLoggable(Level.FINEST)) {
m_logger.finest("creating xml binding class aware wrapper around remote object " +
"implementing " + flatXmlIF.getName() + "...");
}
T wrapper = null;
try
{
wrapper = DynamicProxyFactory.getDynamicProxyFactory(m_logger).createClientProxy(
transparentXmlIF,
flatXmlObject,
flatXmlIF);
}
catch (Throwable thr)
{
String msg = "failed to create XML binding class wrapper for remote object implementing " + flatXmlIF.getName();
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex2 = new AcsJContainerServicesEx(thr);
ex2.setContextInfo(msg);
throw ex2;
}
return wrapper;
}
/////////////////////////////////////////////////////////////
// other
/////////////////////////////////////////////////////////////
public void releaseAllComponents()
{
// copy curls first to avoid deleting from m_usedComponentsMap
// while iterating over it (fail-fast iterator throws ConcurrentModificationException).
// synchronized just in case...
List<String> curls = new ArrayList<String>();
synchronized (m_usedComponentsMap) {
curls.addAll(m_usedComponentsMap.keySet());
}
for (String curl : curls ) {
releaseComponent(curl);
}
}
/**
* Gets the handle to be used toward the manager, which is
* <ul>
* <li> The handle obtained from the manager at login for normal clients
* <li> The component handle assigned by the manager at component activation time,
* if this ContainerServices instance is used for a component
* </ul>
* We don't cache the handle from acsManagerProxy because it may change after a re-login,
* and then we get errors if the stale handle would be used.
* @return The correct handle to be used to identify this client to the manager.
*/
private int getEffectiveClientHandle() {
return (m_componentHandle > 0 ? m_componentHandle : m_acsManagerProxy.getManagerHandle());
}
/**
* @see alma.acs.container.ContainerServices#getThreadFactory()
*/
public ThreadFactory getThreadFactory() {
return m_threadFactory;
}
AcsCorba getAcsCorba() {
return acsCorba;
}
/**
* With this optional call, automatic invocation logging for certain offshoot methods can be disabled.
* @param methodsExcludedFromInvocationLogging
* @see ComponentHelper#getComponentMethodsExcludedFromInvocationLogging()
*/
void setMethodsExcludedFromInvocationLogging(String[] methodsExcludedFromInvocationLogging) {
this.methodsExcludedFromInvocationLogging = methodsExcludedFromInvocationLogging;
}
/**
* Cleans up all the resources that need to be closed, like closing opened notification channels
*
* @since ACS 8.1.0
*/
public void cleanUp() {
/* Cleanup through externally registered callbacks */
for (CleanUpCallback cleanUpCallback : cleanUpCallbacks) {
try {
cleanUpCallback.containerServicesCleanUp();
}
catch (Throwable thr) {
m_logger.log(Level.WARNING, "Failed to clean up registered client object", thr);
}
}
/* Disconnect NC subscribers */
for(String channel: m_subscribers.keySet()) {
AcsEventSubscriber subscriber = m_subscribers.get(channel);
try {
subscriber.disconnect();
String tmp[] = channel.split("/");
m_logger.log(AcsLogLevel.NOTICE, "Automatically disconnected subscriber for NC '" + tmp[tmp.length - 1] + "'");
} catch (IllegalStateException e) {
// Silently ignore this exception, as the subscriber was already disconnected. Well done, developers! :)
}
}
/* Disconnect NC publishers */
for(String channel: m_publishers.keySet()) {
AcsEventPublisher subscriber = m_publishers.get(channel);
try {
subscriber.disconnect();
String tmp[] = channel.split("/");
m_logger.log(AcsLogLevel.NOTICE, "Automatically disconnected publisher for NC '" + tmp[tmp.length - 1] + "'");
} catch (IllegalStateException e) {
// Silently ignore this exception, as the subscriber was already disconnected. Well done, developers! :)
}
}
}
/**
* A hack, see {@link ContainerServicesImpl#registerCleanUpCallback(CleanUpCallback)}.
*/
public static interface CleanUpCallback {
public void containerServicesCleanUp();
}
/**
* This is a hack: NC classes can register themselves to be notified,
* in order to release remote Corba resources (and prevent crashes of Notify Service...).
* Note that without this hack, the lifecycle of NC classes is only managed by the application code,
* which means that ACS could not enforce the clean up.
* <p>
* @TODO remove this once the NC classes are properly integrated into container services
* @param cb
* @since ACS 8.1.0
*/
public void registerCleanUpCallback(ContainerServicesImpl.CleanUpCallback cb) {
cleanUpCallbacks.add(cb);
}
private NamingContext getNameService() throws AcsJContainerServicesEx {
NamingContext nameService = null;
try
{
org.omg.CORBA.Object nameServiceObj = m_acsManagerProxy.get_service("NameService", true);
nameService = NamingContextHelper.narrow(nameServiceObj);
} catch (AcsJmaciErrTypeEx ex) {
m_logger.log(Level.FINE, "Failed to get the reference to the NameService service", ex);
throw new AcsJContainerServicesEx(ex);
} catch (Throwable thr) {
String msg = "Unexpectedly failed to get the NameService reference!";
m_logger.log(Level.FINE, msg, thr);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(thr);
ex.setContextInfo(msg);
throw ex;
}
return nameService;
}
/**
* @see alma.acs.container.ContainerServices#createNotificationChannelSubscriber(String)
*/
public AcsEventSubscriber createNotificationChannelSubscriber(String channelName) throws AcsJContainerServicesEx {
return createNotificationChannelSubscriber(channelName, null); //TODO (rtobar): Is this fine? I'm only 99% sure
}
/**
* @see alma.acs.container.ContainerServices#createNotificationChannelSubscriber(String, String)
*/
public AcsEventSubscriber createNotificationChannelSubscriber(String channelName, String channelNotifyServiceDomainName) throws AcsJContainerServicesEx {
AcsEventSubscriber subscriber = null;
try {
Object[] args = new Object[]{
channelName,
channelNotifyServiceDomainName,
this,
getNameService(),
m_clientName
};
Class<?> clazz = Class.forName(CLASSNAME_NC_SUBSCRIBER);
Constructor<?> constructor = clazz.getConstructor(String.class, String.class, ContainerServicesBase.class, NamingContext.class, String.class);
subscriber = (AcsEventSubscriber)constructor.newInstance(args);
} catch(ClassNotFoundException e) {
// TODO: maybe we could prevent future NCSubscriber creation tries, since the class isn't and will not be loaded
// The same applies for the next "catch" block
m_logger.log(AcsLogLevel.ERROR, "Cannot create NC subscriber because the 'NCSubscriber' class is not present in the classpath", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
ex.setContextInfo("'" + CLASSNAME_NC_SUBSCRIBER + "' class not present in the classpath");
throw ex;
} catch(ClassCastException e) {
m_logger.log(AcsLogLevel.ERROR, "Cannot create NC subscriber because loaded class is not of type 'AcsEventSubscriber", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
ex.setContextInfo("'" + CLASSNAME_NC_SUBSCRIBER + "' class does not extend 'AcsEventSubscriber'");
throw ex;
} catch(Throwable e) {
m_logger.log(AcsLogLevel.ERROR, "Unexpected error while creating new AcsEventSubscriber object", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
throw ex;
}
m_subscribers.put( (channelNotifyServiceDomainName == null ? "" : channelNotifyServiceDomainName) + "/" + channelName, subscriber);
return subscriber;
}
/**
* @see ContainerServices#createNotificationChannelPublisher(String)
*/
public AcsEventPublisher createNotificationChannelPublisher(String channelName) throws AcsJContainerServicesEx {
return createNotificationChannelPublisher(channelName, null); // TODO (rtobar): only 99% sure that this is right
}
/**
* @see ContainerServices#createNotificationChannelPublisher(String, String)
*/
public AcsEventPublisher createNotificationChannelPublisher(String channelName, String channelNotifyServiceDomainName) throws AcsJContainerServicesEx {
AcsEventPublisher publisher = null;
try {
Object[] args = new Object[]{
channelName,
channelNotifyServiceDomainName,
this
};
Class<?> clazz = Class.forName(CLASSNAME_NC_PUBLISHER);
Constructor<?> constructor = clazz.getConstructor(String.class, String.class, ContainerServicesBase.class);
publisher = (AcsEventPublisher)constructor.newInstance(args);
} catch(ClassNotFoundException e) {
// TODO: maybe we could prevent future NCPublisher creation tries, since the class isn't and will not be loaded
// The same applies for the next "catch" block
m_logger.log(AcsLogLevel.ERROR, "Cannot create NC publisher because the 'NCPublisher' class is not present in the classpath", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
ex.setContextInfo("'" + CLASSNAME_NC_PUBLISHER + "' class not present in the classpath");
throw ex;
} catch(ClassCastException e) {
m_logger.log(AcsLogLevel.ERROR, "Cannot create NC publisher because loaded class is not of type 'AcsEventPublisher", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
ex.setContextInfo("'" + CLASSNAME_NC_PUBLISHER + "' class does not extend 'AcsEventPublisher'");
throw ex;
} catch(Throwable e) {
m_logger.log(AcsLogLevel.ERROR, "Unexpected error while creating new AcsEventPublisher object", e);
AcsJContainerServicesEx ex = new AcsJContainerServicesEx(e);
throw ex;
}
m_publishers.put( (channelNotifyServiceDomainName == null ? "" : channelNotifyServiceDomainName) + "/" + channelName, publisher);
return publisher;
}
private void submitAlarm(String faultFamily, String faultMember, int faultCode, boolean raise) throws AcsJContainerServicesEx {
try {
ACSAlarmSystemInterface source = null;
ACSFaultState faultState = null;
try {
// TODO: Store a map of the created sources if the AS supports more than one source in the future
// Also, don't have a default hardcoded source name here
source = ACSAlarmSystemInterfaceFactory.createSource("ALARM_SYSTEM_SOURCES");
} catch (SourceCreationErrorEx e) {
throw new AcsJContainerServicesEx(e);
}
try {
faultState = ACSAlarmSystemInterfaceFactory.createFaultState(faultFamily, faultMember, faultCode);
} catch (FaultStateCreationErrorEx e) {
e.printStackTrace();
// @TODO
// } catch (ACSASFactoryNotInitedEx ex) {
// log.severe("Alarm with FF=" + faultFamily + " FM=" + faultMember + " FC=" + faultCode
// + " could not be thrown. Message=" + ex.getMessage());
// } catch (SourceCreationErrorEx ex) {
// log.severe("Alarm with FF=" + faultFamily + " FM=" + faultMember + " FC=" + faultCode
// + " could not be thrown. Message=" + ex.getMessage());
// } catch (FaultStateCreationErrorEx ex) {
// log.severe("Alarm with FF=" + faultFamily + " FM=" + faultMember + " FC=" + faultCode
// + " could not be thrown. Message=" + ex.getMessage());
// }
}
if( raise )
faultState.setDescriptor(ACSFaultState.ACTIVE);
else
faultState.setDescriptor(ACSFaultState.TERMINATE);
faultState.setUserTimestamp(new Timestamp(System.currentTimeMillis()));
source.push(faultState);
} catch (ACSASFactoryNotInitedEx e) {
throw new AcsJContainerServicesEx(e);
}
}
/**
* @see ContainerServices#raiseAlarm(String, String, int)
*/
public void raiseAlarm(String faultFamily, String faultMember, int faultCode) throws AcsJContainerServicesEx {
submitAlarm(faultFamily, faultMember, faultCode, true);
}
/**
* @see ContainerServices#clearAlarm(String, String, int)
*/
public void clearAlarm(String faultFamily, String faultMember, int faultCode) throws AcsJContainerServicesEx {
submitAlarm(faultFamily, faultMember, faultCode, false);
}
} | Fixed NPE in ComponentReleaseCallbackCorbaHandler caused by null callback passed to releaseComponent(...) and yet wrapped with Corba callback.
git-svn-id: afcf11d89342f630bd950d18a70234a9e277d909@151185 523d945c-050c-4681-91ec-863ad3bb968a
| LGPL/CommonSoftware/jcont/src/alma/acs/container/ContainerServicesImpl.java | Fixed NPE in ComponentReleaseCallbackCorbaHandler caused by null callback passed to releaseComponent(...) and yet wrapped with Corba callback. |
|
Java | apache-2.0 | bc0cd3f08769c4ae724155e3de5582140e7ec8bd | 0 | tuGithub/gobblin-1,zliu41/gobblin,jenniferzheng/gobblin,yukuai518/gobblin,linkedin/gobblin,Hanmourang/Gobblin,tuGithub/gobblin-1,jack-moseley/gobblin,jenniferzheng/gobblin,the100rabh/gobblin,ydai1124/gobblin-1,ibuenros/gobblin,jinhyukchang/gobblin,mwol/gobblin,linkedin/gobblin,sahilTakiar/gobblin,pcadabam/gobblin,winlinvip/gobblin,pldash/gobblin-1,pldash/gobblin-1,jinhyukchang/gobblin,aditya1105/gobblin,zliu41/gobblin,ydai1124/gobblin-1,arhik/gobblin,ydai1124/gobblin-1,slietz/gobblin,ydai1124/gobblin-1,jack-moseley/gobblin,chavdar/gobblin-1,ibuenros/gobblin,chavdar/gobblin-1,ydailinkedin/gobblin-1,arhik/gobblin,zliu41/gobblin,chavdar/gobblin-1,linkedin/gobblin,tuGithub/gobblin-1,arjun4084346/gobblin,PaytmLabs/gobblin,pldash/gobblin-1,sahooamit/bigdata,linkedin/gobblin,PaytmLabs/gobblin,pldash/gobblin-1,xkrogen/gobblin,aditya1105/gobblin,shirshanka/gobblin,linkedin/gobblin,jordancheah/gobblin,mwol/gobblin,chavdar/gobblin-1,ydailinkedin/gobblin-1,yukuai518/gobblin,pcadabam/gobblin,tuGithub/gobblin-1,abti/gobblin,jenniferzheng/gobblin,abti/gobblin,NerdWallet/gobblin,pldash/gobblin-1,lamborryan/gobblin,jinhyukchang/gobblin,ydailinkedin/gobblin-1,jack-moseley/gobblin,pcadabam/gobblin,ydailinkedin/gobblin-1,lbendig/gobblin,arjun4084346/gobblin,mwol/gobblin,slietz/gobblin,jack-moseley/gobblin,ydailinkedin/gobblin-1,yukuai518/gobblin,NerdWallet/gobblin,sahilTakiar/gobblin,mwol/gobblin,pcadabam/gobblin,winlinvip/gobblin,lbendig/gobblin,aditya1105/gobblin,jinhyukchang/gobblin,arjun4084346/gobblin,ydai1124/gobblin-1,xkrogen/gobblin,ibuenros/gobblin,shirshanka/gobblin,aditya1105/gobblin,chavdar/gobblin-1,zyq001/gobblin,jinhyukchang/gobblin,zliu41/gobblin,liyinan926/gobblin,sahilTakiar/gobblin,jenniferzheng/gobblin,sahilTakiar/gobblin,pcadabam/gobblin,lbendig/gobblin,abti/gobblin,abti/gobblin,ydai1124/gobblin-1,jinhyukchang/gobblin,jenniferzheng/gobblin,zliu41/gobblin,jordancheah/gobblin,jenniferzheng/gobblin,shirshanka/gobblin,dvenkateshappa/gobblin,abti/gobblin,yukuai518/gobblin,sahilTakiar/gobblin,the100rabh/gobblin,ibuenros/gobblin,pldash/gobblin-1,dvenkateshappa/gobblin,lamborryan/gobblin,aditya1105/gobblin,chavdar/gobblin-1,yukuai518/gobblin,arjun4084346/gobblin,aditya1105/gobblin,sahooamit/bigdata,lbendig/gobblin,shirshanka/gobblin,ibuenros/gobblin,liyinan926/gobblin,jack-moseley/gobblin,pcadabam/gobblin,lbendig/gobblin,Hanmourang/Gobblin,yukuai518/gobblin,tuGithub/gobblin-1,abti/gobblin,mwol/gobblin,arjun4084346/gobblin,sahilTakiar/gobblin,zyq001/gobblin,ibuenros/gobblin,linkedin/gobblin,mwol/gobblin,lbendig/gobblin,shirshanka/gobblin | /*
* Copyright (C) 2014-2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.util;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.Closer;
import gobblin.configuration.ConfigurationKeys;
import gobblin.configuration.State;
import gobblin.source.workunit.MultiWorkUnit;
import gobblin.source.workunit.WorkUnit;
/**
* Utility class for the job scheduler and job launchers.
*
* @author ynli
*/
public class JobLauncherUtils {
private static Map<String, FileSystem> ownerAndFs = Maps.newConcurrentMap();
/**
* Create a new job ID.
*
* @param jobName job name
* @return new job ID
*/
public static String newJobId(String jobName) {
// Job ID in the form of job_<job_id_suffix>
// <job_id_suffix> is in the form of <job_name>_<current_timestamp>
String jobIdSuffix = String.format("%s_%d", jobName, System.currentTimeMillis());
return "job_" + jobIdSuffix;
}
/**
* Create a new task ID for the job with the given job ID.
*
* @param jobId job ID
* @param sequence task sequence number
* @return new task ID
*/
public static String newTaskId(String jobId, int sequence) {
return String.format("task_%s_%d", jobId.substring(jobId.indexOf('_') + 1), sequence);
}
/**
* Create an ID for a new multi-task (corresponding to a {@link gobblin.source.workunit.MultiWorkUnit})
* for the job with the given job ID.
*
* @param jobId job ID
* @param sequence multi-task sequence number
* @return new multi-task ID
*/
public static String newMultiTaskId(String jobId, int sequence) {
return String.format("multitask_%s_%d", jobId.substring(jobId.indexOf('_') + 1), sequence);
}
/**
* Utility method that takes in a {@link List} of {@link WorkUnit}s, and flattens them. It builds up
* the flattened list by checking each element of the given list, and seeing if it is an instance of
* {@link MultiWorkUnit}. If it is then it calls itself on the {@link WorkUnit}s returned by
* {@link MultiWorkUnit#getWorkUnits()}. If not, then it simply adds the {@link WorkUnit} to the
* flattened list.
*
* @param workUnits is a {@link List} containing either {@link WorkUnit}s or {@link MultiWorkUnit}s
* @return a {@link List} of flattened {@link WorkUnit}s
*/
public static List<WorkUnit> flattenWorkUnits(List<WorkUnit> workUnits) {
List<WorkUnit> flattenedWorkUnits = Lists.newArrayList();
for (WorkUnit workUnit : workUnits) {
if (workUnit instanceof MultiWorkUnit) {
flattenedWorkUnits.addAll(flattenWorkUnits(((MultiWorkUnit) workUnit).getWorkUnits()));
} else {
flattenedWorkUnits.add(workUnit);
}
}
return flattenedWorkUnits;
}
/**
* Cleanup the staging data for a list of Gobblin tasks. This method calls the
* {@link #cleanStagingData(State, Logger)} method.
*
* @param states a {@link List} of {@link State}s that need their staging data cleaned
*/
public static void cleanStagingData(List<? extends State> states, Logger logger) throws IOException {
for (State state : states) {
JobLauncherUtils.cleanStagingData(state, logger);
}
}
/**
* Cleanup staging data of a Gobblin task.
*
* @param state workunit state
*/
public static void cleanStagingData(State state, Logger logger) throws IOException {
int numBranches = state.getPropAsInt(ConfigurationKeys.FORK_BRANCHES_KEY, 1);
for (int branchId = 0; branchId < numBranches; branchId++) {
String writerFsUri =
state.getProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_SYSTEM_URI,
numBranches, branchId), ConfigurationKeys.LOCAL_FS_URI);
FileSystem fs = getFsWithProxy(state, writerFsUri);
Path stagingPath = WriterUtils.getWriterStagingDir(state, numBranches, branchId);
if (fs.exists(stagingPath)) {
logger.info("Cleaning up staging directory " + stagingPath.toUri().getPath());
if (!fs.delete(stagingPath, true)) {
throw new IOException("Clean up staging directory " + stagingPath.toUri().getPath() + " failed");
}
}
Path outputPath = WriterUtils.getWriterOutputDir(state, numBranches, branchId);
if (fs.exists(outputPath)) {
logger.info("Cleaning up output directory " + outputPath.toUri().getPath());
if (!fs.delete(outputPath, true)) {
throw new IOException("Clean up output directory " + outputPath.toUri().getPath() + " failed");
}
}
}
}
/**
* Cleanup staging data of a Gobblin task using a {@link ParallelRunner}
*
* @param state workunit state
* @param closer a closer that registers the given map of ParallelRunners. The caller is responsible
* for closing the closer after the cleaning is done.
* @param parallelRunners a map from FileSystem URI to ParallelRunner.
* @throws IOException
*/
public static void cleanStagingData(State state, Logger logger, Closer closer,
Map<String, ParallelRunner> parallelRunners) throws IOException {
int numBranches = state.getPropAsInt(ConfigurationKeys.FORK_BRANCHES_KEY, 1);
int parallelRunnerThreads =
state.getPropAsInt(ParallelRunner.PARALLEL_RUNNER_THREADS_KEY, ParallelRunner.DEFAULT_PARALLEL_RUNNER_THREADS);
for (int branchId = 0; branchId < numBranches; branchId++) {
String writerFsUri =
state.getProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_SYSTEM_URI,
numBranches, branchId), ConfigurationKeys.LOCAL_FS_URI);
FileSystem fs = getFsWithProxy(state, writerFsUri);
ParallelRunner parallelRunner = getParallelRunner(fs, closer, parallelRunnerThreads, parallelRunners);
Path stagingPath = WriterUtils.getWriterStagingDir(state, numBranches, branchId);
if (fs.exists(stagingPath)) {
logger.info("Cleaning up staging directory " + stagingPath.toUri().getPath());
parallelRunner.deletePath(stagingPath, true);
}
Path outputPath = WriterUtils.getWriterOutputDir(state, numBranches, branchId);
if (fs.exists(outputPath)) {
logger.info("Cleaning up output directory " + outputPath.toUri().getPath());
parallelRunner.deletePath(outputPath, true);
}
}
}
private static FileSystem getFsWithProxy(State state, String writerFsUri)
throws IOException {
if (!state.getPropAsBoolean(ConfigurationKeys.SHOULD_FS_PROXY_AS_USER,
ConfigurationKeys.DEFAULT_SHOULD_FS_PROXY_AS_USER)) {
return FileSystem.get(URI.create(writerFsUri), new Configuration());
} else {
Preconditions.checkArgument(!Strings.isNullOrEmpty(state.getProp(ConfigurationKeys.FS_PROXY_AS_USER_NAME)),
"State does not contain a proper proxy user name");
String owner = state.getProp(ConfigurationKeys.FS_PROXY_AS_USER_NAME);
if (ownerAndFs.containsKey(owner)) {
return ownerAndFs.get(owner);
} else {
try {
FileSystem proxiedFs =
new ProxiedFileSystemWrapper().getProxiedFileSystem(state, ProxiedFileSystemWrapper.AuthType.KEYTAB,
state.getProp(ConfigurationKeys.SUPER_USER_KEY_TAB_LOCATION), writerFsUri);
ownerAndFs.put(owner, proxiedFs);
return proxiedFs;
} catch (InterruptedException e) {
throw new IOException(e);
} catch (URISyntaxException e) {
throw new IOException(e);
}
}
}
}
private static ParallelRunner getParallelRunner(FileSystem fs, Closer closer, int parallelRunnerThreads,
Map<String, ParallelRunner> parallelRunners) {
String uri = fs.getUri().toString();
if (!parallelRunners.containsKey(uri)) {
parallelRunners.put(uri, closer.register(new ParallelRunner(parallelRunnerThreads, fs)));
}
return parallelRunners.get(uri);
}
}
| gobblin-utility/src/main/java/gobblin/util/JobLauncherUtils.java | /*
* Copyright (C) 2014-2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.util;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import com.google.common.collect.Lists;
import com.google.common.io.Closer;
import gobblin.configuration.ConfigurationKeys;
import gobblin.configuration.State;
import gobblin.source.workunit.MultiWorkUnit;
import gobblin.source.workunit.WorkUnit;
/**
* Utility class for the job scheduler and job launchers.
*
* @author ynli
*/
public class JobLauncherUtils {
/**
* Create a new job ID.
*
* @param jobName job name
* @return new job ID
*/
public static String newJobId(String jobName) {
// Job ID in the form of job_<job_id_suffix>
// <job_id_suffix> is in the form of <job_name>_<current_timestamp>
String jobIdSuffix = String.format("%s_%d", jobName, System.currentTimeMillis());
return "job_" + jobIdSuffix;
}
/**
* Create a new task ID for the job with the given job ID.
*
* @param jobId job ID
* @param sequence task sequence number
* @return new task ID
*/
public static String newTaskId(String jobId, int sequence) {
return String.format("task_%s_%d", jobId.substring(jobId.indexOf('_') + 1), sequence);
}
/**
* Create an ID for a new multi-task (corresponding to a {@link gobblin.source.workunit.MultiWorkUnit})
* for the job with the given job ID.
*
* @param jobId job ID
* @param sequence multi-task sequence number
* @return new multi-task ID
*/
public static String newMultiTaskId(String jobId, int sequence) {
return String.format("multitask_%s_%d", jobId.substring(jobId.indexOf('_') + 1), sequence);
}
/**
* Utility method that takes in a {@link List} of {@link WorkUnit}s, and flattens them. It builds up
* the flattened list by checking each element of the given list, and seeing if it is an instance of
* {@link MultiWorkUnit}. If it is then it calls itself on the {@link WorkUnit}s returned by
* {@link MultiWorkUnit#getWorkUnits()}. If not, then it simply adds the {@link WorkUnit} to the
* flattened list.
*
* @param workUnits is a {@link List} containing either {@link WorkUnit}s or {@link MultiWorkUnit}s
* @return a {@link List} of flattened {@link WorkUnit}s
*/
public static List<WorkUnit> flattenWorkUnits(List<WorkUnit> workUnits) {
List<WorkUnit> flattenedWorkUnits = Lists.newArrayList();
for (WorkUnit workUnit : workUnits) {
if (workUnit instanceof MultiWorkUnit) {
flattenedWorkUnits.addAll(flattenWorkUnits(((MultiWorkUnit) workUnit).getWorkUnits()));
} else {
flattenedWorkUnits.add(workUnit);
}
}
return flattenedWorkUnits;
}
/**
* Cleanup the staging data for a list of Gobblin tasks. This method calls the
* {@link #cleanStagingData(State, Logger)} method.
*
* @param states a {@link List} of {@link State}s that need their staging data cleaned
*/
public static void cleanStagingData(List<? extends State> states, Logger logger) throws IOException {
for (State state : states) {
JobLauncherUtils.cleanStagingData(state, logger);
}
}
/**
* Cleanup staging data of a Gobblin task.
*
* @param state workunit state
*/
public static void cleanStagingData(State state, Logger logger) throws IOException {
int numBranches = state.getPropAsInt(ConfigurationKeys.FORK_BRANCHES_KEY, 1);
for (int branchId = 0; branchId < numBranches; branchId++) {
String writerFsUri = state.getProp(
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, numBranches, branchId),
ConfigurationKeys.LOCAL_FS_URI);
FileSystem fs = getFsWithProxy(state, writerFsUri);
Path stagingPath = WriterUtils.getWriterStagingDir(state, numBranches, branchId);
if (fs.exists(stagingPath)) {
logger.info("Cleaning up staging directory " + stagingPath.toUri().getPath());
if (!fs.delete(stagingPath, true)) {
throw new IOException("Clean up staging directory " + stagingPath.toUri().getPath() + " failed");
}
}
Path outputPath = WriterUtils.getWriterOutputDir(state, numBranches, branchId);
if (fs.exists(outputPath)) {
logger.info("Cleaning up output directory " + outputPath.toUri().getPath());
if (!fs.delete(outputPath, true)) {
throw new IOException("Clean up output directory " + outputPath.toUri().getPath() + " failed");
}
}
}
}
/**
* Cleanup staging data of a Gobblin task using a {@link ParallelRunner}
*
* @param state workunit state
* @param closer a closer that registers the given map of ParallelRunners. The caller is responsible
* for closing the closer after the cleaning is done.
* @param parallelRunners a map from FileSystem URI to ParallelRunner.
* @throws IOException
*/
public static void cleanStagingData(State state, Logger logger, Closer closer,
Map<String, ParallelRunner> parallelRunners) throws IOException {
int numBranches = state.getPropAsInt(ConfigurationKeys.FORK_BRANCHES_KEY, 1);
int parallelRunnerThreads =
state.getPropAsInt(ParallelRunner.PARALLEL_RUNNER_THREADS_KEY, ParallelRunner.DEFAULT_PARALLEL_RUNNER_THREADS);
for (int branchId = 0; branchId < numBranches; branchId++) {
String writerFsUri = state.getProp(
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, numBranches, branchId),
ConfigurationKeys.LOCAL_FS_URI);
FileSystem fs = getFsWithProxy(state, writerFsUri);
ParallelRunner parallelRunner = getParallelRunner(fs, closer, parallelRunnerThreads, parallelRunners);
Path stagingPath = WriterUtils.getWriterStagingDir(state, numBranches, branchId);
if (fs.exists(stagingPath)) {
logger.info("Cleaning up staging directory " + stagingPath.toUri().getPath());
parallelRunner.deletePath(stagingPath, true);
}
Path outputPath = WriterUtils.getWriterOutputDir(state, numBranches, branchId);
if (fs.exists(outputPath)) {
logger.info("Cleaning up output directory " + outputPath.toUri().getPath());
parallelRunner.deletePath(outputPath, true);
}
}
}
private static FileSystem getFsWithProxy(State state, String writerFsUri) throws IOException {
if (state.getPropAsBoolean(ConfigurationKeys.SHOULD_FS_PROXY_AS_USER,
ConfigurationKeys.DEFAULT_SHOULD_FS_PROXY_AS_USER)) {
try {
return new ProxiedFileSystemWrapper().getProxiedFileSystem(state, ProxiedFileSystemWrapper.AuthType.KEYTAB,
state.getProp(ConfigurationKeys.SUPER_USER_KEY_TAB_LOCATION), writerFsUri);
} catch (InterruptedException e) {
throw new IOException(e);
} catch (URISyntaxException e) {
throw new IOException(e);
}
} else {
return FileSystem.get(URI.create(writerFsUri), new Configuration());
}
}
private static ParallelRunner getParallelRunner(FileSystem fs, Closer closer, int parallelRunnerThreads,
Map<String, ParallelRunner> parallelRunners) {
String uri = fs.getUri().toString();
if (!parallelRunners.containsKey(uri)) {
parallelRunners.put(uri, closer.register(new ParallelRunner(parallelRunnerThreads, fs)));
}
return parallelRunners.get(uri);
}
}
| Avoid duplciate keytab login in cleaning staging data
| gobblin-utility/src/main/java/gobblin/util/JobLauncherUtils.java | Avoid duplciate keytab login in cleaning staging data |
|
Java | apache-2.0 | 08452c607bad1af80630b90d83072a91c5509cd8 | 0 | kyroskoh/jmeter,etnetera/jmeter,ra0077/jmeter,vherilier/jmeter,max3163/jmeter,kschroeder/jmeter,fj11/jmeter,ThiagoGarciaAlves/jmeter,ubikfsabbe/jmeter,ra0077/jmeter,vherilier/jmeter,ThiagoGarciaAlves/jmeter,hizhangqi/jmeter-1,etnetera/jmeter,ra0077/jmeter,irfanah/jmeter,max3163/jmeter,thomsonreuters/jmeter,etnetera/jmeter,tuanhq/jmeter,DoctorQ/jmeter,liwangbest/jmeter,liwangbest/jmeter,d0k1/jmeter,d0k1/jmeter,max3163/jmeter,hemikak/jmeter,d0k1/jmeter,ubikfsabbe/jmeter,ubikfsabbe/jmeter,max3163/jmeter,ubikloadpack/jmeter,kyroskoh/jmeter,kschroeder/jmeter,ubikloadpack/jmeter,thomsonreuters/jmeter,DoctorQ/jmeter,kschroeder/jmeter,etnetera/jmeter,hemikak/jmeter,ubikloadpack/jmeter,liwangbest/jmeter,irfanah/jmeter,ubikfsabbe/jmeter,fj11/jmeter,hizhangqi/jmeter-1,ra0077/jmeter,vherilier/jmeter,ubikloadpack/jmeter,tuanhq/jmeter,d0k1/jmeter,DoctorQ/jmeter,thomsonreuters/jmeter,hemikak/jmeter,fj11/jmeter,kyroskoh/jmeter,tuanhq/jmeter,hemikak/jmeter,hizhangqi/jmeter-1,irfanah/jmeter,ThiagoGarciaAlves/jmeter,vherilier/jmeter,etnetera/jmeter | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter;
import java.awt.event.ActionEvent;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.Thread.UncaughtExceptionHandler;
import java.net.Authenticator;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.SocketException;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicInteger;
import javax.swing.JTree;
import javax.swing.tree.TreePath;
import org.apache.commons.cli.avalon.CLArgsParser;
import org.apache.commons.cli.avalon.CLOption;
import org.apache.commons.cli.avalon.CLOptionDescriptor;
import org.apache.commons.cli.avalon.CLUtil;
import org.apache.jmeter.control.ReplaceableController;
import org.apache.jmeter.engine.ClientJMeterEngine;
import org.apache.jmeter.engine.JMeterEngine;
import org.apache.jmeter.engine.RemoteJMeterEngineImpl;
import org.apache.jmeter.engine.StandardJMeterEngine;
import org.apache.jmeter.exceptions.IllegalUserActionException;
import org.apache.jmeter.gui.GuiPackage;
import org.apache.jmeter.gui.MainFrame;
import org.apache.jmeter.gui.action.ActionNames;
import org.apache.jmeter.gui.action.ActionRouter;
import org.apache.jmeter.gui.action.Load;
import org.apache.jmeter.gui.action.LoadRecentProject;
import org.apache.jmeter.gui.tree.JMeterTreeListener;
import org.apache.jmeter.gui.tree.JMeterTreeModel;
import org.apache.jmeter.gui.tree.JMeterTreeNode;
import org.apache.jmeter.gui.util.FocusRequester;
import org.apache.jmeter.plugin.JMeterPlugin;
import org.apache.jmeter.plugin.PluginManager;
import org.apache.jmeter.reporters.ResultCollector;
import org.apache.jmeter.reporters.Summariser;
import org.apache.jmeter.samplers.Remoteable;
import org.apache.jmeter.samplers.SampleEvent;
import org.apache.jmeter.save.SaveService;
import org.apache.jmeter.services.FileServer;
import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.testelement.TestStateListener;
import org.apache.jmeter.util.BeanShellInterpreter;
import org.apache.jmeter.util.BeanShellServer;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.collections.HashTree;
import org.apache.jorphan.collections.SearchByClass;
import org.apache.jorphan.gui.ComponentUtil;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.jorphan.reflect.ClassTools;
import org.apache.jorphan.util.HeapDumper;
import org.apache.jorphan.util.JMeterException;
import org.apache.jorphan.util.JOrphanUtils;
import org.apache.log.Logger;
import com.thoughtworks.xstream.converters.ConversionException;
/**
* Main JMeter class; processes options and starts the GUI, non-GUI or server as appropriate.
*/
public class JMeter implements JMeterPlugin {
private static final Logger log = LoggingManager.getLoggerForClass();
public static final int UDP_PORT_DEFAULT = 4445; // needed for ShutdownClient
public static final String HTTP_PROXY_PASS = "http.proxyPass"; // $NON-NLS-1$
public static final String HTTP_PROXY_USER = "http.proxyUser"; // $NON-NLS-1$
public static final String JMETER_NON_GUI = "JMeter.NonGui"; // $NON-NLS-1$
// If the -t flag is to "LAST", then the last loaded file (if any) is used
private static final String USE_LAST_JMX = "LAST";
// If the -j or -l flag is set to LAST or LAST.log|LAST.jtl, then the last loaded file name is used to
// generate the log file name by removing .JMX and replacing it with .log|.jtl
private static final int PROXY_PASSWORD = 'a';// $NON-NLS-1$
private static final int JMETER_HOME_OPT = 'd';// $NON-NLS-1$
private static final int HELP_OPT = 'h';// $NON-NLS-1$
// jmeter.log
private static final int JMLOGFILE_OPT = 'j';// $NON-NLS-1$
// sample result log file
private static final int LOGFILE_OPT = 'l';// $NON-NLS-1$
private static final int NONGUI_OPT = 'n';// $NON-NLS-1$
private static final int PROPFILE_OPT = 'p';// $NON-NLS-1$
private static final int PROPFILE2_OPT = 'q';// $NON-NLS-1$
private static final int REMOTE_OPT = 'r';// $NON-NLS-1$
private static final int SERVER_OPT = 's';// $NON-NLS-1$
private static final int TESTFILE_OPT = 't';// $NON-NLS-1$
private static final int PROXY_USERNAME = 'u';// $NON-NLS-1$
private static final int VERSION_OPT = 'v';// $NON-NLS-1$
private static final int SYSTEM_PROPERTY = 'D';// $NON-NLS-1$
private static final int JMETER_GLOBAL_PROP = 'G';// $NON-NLS-1$
private static final int PROXY_HOST = 'H';// $NON-NLS-1$
private static final int JMETER_PROPERTY = 'J';// $NON-NLS-1$
private static final int LOGLEVEL = 'L';// $NON-NLS-1$
private static final int NONPROXY_HOSTS = 'N';// $NON-NLS-1$
private static final int PROXY_PORT = 'P';// $NON-NLS-1$
private static final int REMOTE_OPT_PARAM = 'R';// $NON-NLS-1$
private static final int SYSTEM_PROPFILE = 'S';// $NON-NLS-1$
private static final int REMOTE_STOP = 'X';// $NON-NLS-1$
/**
* Define the understood options. Each CLOptionDescriptor contains:
* <ul>
* <li>The "long" version of the option. Eg, "help" means that "--help"
* will be recognised.</li>
* <li>The option flags, governing the option's argument(s).</li>
* <li>The "short" version of the option. Eg, 'h' means that "-h" will be
* recognised.</li>
* <li>A description of the option.</li>
* </ul>
*/
private static final CLOptionDescriptor[] options = new CLOptionDescriptor[] {
new CLOptionDescriptor("help", CLOptionDescriptor.ARGUMENT_DISALLOWED, HELP_OPT,
"print usage information and exit"),
new CLOptionDescriptor("version", CLOptionDescriptor.ARGUMENT_DISALLOWED, VERSION_OPT,
"print the version information and exit"),
new CLOptionDescriptor("propfile", CLOptionDescriptor.ARGUMENT_REQUIRED, PROPFILE_OPT,
"the jmeter property file to use"),
new CLOptionDescriptor("addprop", CLOptionDescriptor.ARGUMENT_REQUIRED
| CLOptionDescriptor.DUPLICATES_ALLOWED, PROPFILE2_OPT,
"additional JMeter property file(s)"),
new CLOptionDescriptor("testfile", CLOptionDescriptor.ARGUMENT_REQUIRED, TESTFILE_OPT,
"the jmeter test(.jmx) file to run"),
new CLOptionDescriptor("logfile", CLOptionDescriptor.ARGUMENT_REQUIRED, LOGFILE_OPT,
"the file to log samples to"),
new CLOptionDescriptor("jmeterlogfile", CLOptionDescriptor.ARGUMENT_REQUIRED, JMLOGFILE_OPT,
"jmeter run log file (jmeter.log)"),
new CLOptionDescriptor("nongui", CLOptionDescriptor.ARGUMENT_DISALLOWED, NONGUI_OPT,
"run JMeter in nongui mode"),
new CLOptionDescriptor("server", CLOptionDescriptor.ARGUMENT_DISALLOWED, SERVER_OPT,
"run the JMeter server"),
new CLOptionDescriptor("proxyHost", CLOptionDescriptor.ARGUMENT_REQUIRED, PROXY_HOST,
"Set a proxy server for JMeter to use"),
new CLOptionDescriptor("proxyPort", CLOptionDescriptor.ARGUMENT_REQUIRED, PROXY_PORT,
"Set proxy server port for JMeter to use"),
new CLOptionDescriptor("nonProxyHosts", CLOptionDescriptor.ARGUMENT_REQUIRED, NONPROXY_HOSTS,
"Set nonproxy host list (e.g. *.apache.org|localhost)"),
new CLOptionDescriptor("username", CLOptionDescriptor.ARGUMENT_REQUIRED, PROXY_USERNAME,
"Set username for proxy server that JMeter is to use"),
new CLOptionDescriptor("password", CLOptionDescriptor.ARGUMENT_REQUIRED, PROXY_PASSWORD,
"Set password for proxy server that JMeter is to use"),
new CLOptionDescriptor("jmeterproperty", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENTS_REQUIRED_2, JMETER_PROPERTY,
"Define additional JMeter properties"),
new CLOptionDescriptor("globalproperty", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENTS_REQUIRED_2, JMETER_GLOBAL_PROP,
"Define Global properties (sent to servers)\n\t\te.g. -Gport=123 or -Gglobal.properties"),
new CLOptionDescriptor("systemproperty", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENTS_REQUIRED_2, SYSTEM_PROPERTY,
"Define additional system properties"),
new CLOptionDescriptor("systemPropertyFile", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENT_REQUIRED, SYSTEM_PROPFILE,
"additional system property file(s)"),
new CLOptionDescriptor("loglevel", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENTS_REQUIRED_2, LOGLEVEL,
"[category=]level e.g. jorphan=INFO or jmeter.util=DEBUG"),
new CLOptionDescriptor("runremote", CLOptionDescriptor.ARGUMENT_DISALLOWED, REMOTE_OPT,
"Start remote servers (as defined in remote_hosts)"),
new CLOptionDescriptor("remotestart", CLOptionDescriptor.ARGUMENT_REQUIRED, REMOTE_OPT_PARAM,
"Start these remote servers (overrides remote_hosts)"),
new CLOptionDescriptor("homedir", CLOptionDescriptor.ARGUMENT_REQUIRED, JMETER_HOME_OPT,
"the jmeter home directory to use"),
new CLOptionDescriptor("remoteexit", CLOptionDescriptor.ARGUMENT_DISALLOWED, REMOTE_STOP,
"Exit the remote servers at end of test (non-GUI)"),
};
public JMeter() {
}
// Hack to allow automated tests to find when test has ended
//transient boolean testEnded = false;
private JMeter parent;
private Properties remoteProps; // Properties to be sent to remote servers
private boolean remoteStop; // should remote engines be stopped at end of non-GUI test?
/**
* Starts up JMeter in GUI mode
*/
private void startGui(String testFile) {
PluginManager.install(this, true);
JMeterTreeModel treeModel = new JMeterTreeModel();
JMeterTreeListener treeLis = new JMeterTreeListener(treeModel);
treeLis.setActionHandler(ActionRouter.getInstance());
// NOTUSED: GuiPackage guiPack =
GuiPackage.getInstance(treeLis, treeModel);
MainFrame main = new MainFrame(ActionRouter.getInstance(), treeModel, treeLis);
ComponentUtil.centerComponentInWindow(main, 80);
main.setVisible(true);
ActionRouter.getInstance().actionPerformed(new ActionEvent(main, 1, ActionNames.ADD_ALL));
if (testFile != null) {
FileInputStream reader = null;
try {
File f = new File(testFile);
log.info("Loading file: " + f);
FileServer.getFileServer().setBaseForScript(f);
reader = new FileInputStream(f);
HashTree tree = SaveService.loadTree(reader);
GuiPackage.getInstance().setTestPlanFile(f.getAbsolutePath());
Load.insertLoadedTree(1, tree);
} catch (ConversionException e) {
log.error("Failure loading test file", e);
JMeterUtils.reportErrorToUser(SaveService.CEtoString(e));
} catch (Exception e) {
log.error("Failure loading test file", e);
JMeterUtils.reportErrorToUser(e.toString());
} finally {
JOrphanUtils.closeQuietly(reader);
}
} else {
JTree jTree = GuiPackage.getInstance().getMainFrame().getTree();
TreePath path = jTree.getPathForRow(0);
jTree.setSelectionPath(path);
new FocusRequester(jTree);
}
}
/**
* Takes the command line arguments and uses them to determine how to
* startup JMeter.
*
* Called reflectively by {@link NewDriver#main(String[])}
*/
public void start(String[] args) {
CLArgsParser parser = new CLArgsParser(args, options);
String error = parser.getErrorString();
if (error == null){// Check option combinations
boolean gui = parser.getArgumentById(NONGUI_OPT)==null;
boolean nonGuiOnly = parser.getArgumentById(REMOTE_OPT)!=null
|| parser.getArgumentById(REMOTE_OPT_PARAM)!=null
|| parser.getArgumentById(REMOTE_STOP)!=null;
if (gui && nonGuiOnly) {
error = "-r and -R and -X are only valid in non-GUI mode";
}
}
if (null != error) {
System.err.println("Error: " + error);
System.out.println("Usage");
System.out.println(CLUtil.describeOptions(options).toString());
return;
}
try {
initializeProperties(parser); // Also initialises JMeter logging
/*
* The following is needed for HTTPClient.
* (originally tried doing this in HTTPSampler2,
* but it appears that it was done too late when running in GUI mode)
* Set the commons logging default to Avalon Logkit, if not already defined
*/
if (System.getProperty("org.apache.commons.logging.Log") == null) { // $NON-NLS-1$
System.setProperty("org.apache.commons.logging.Log" // $NON-NLS-1$
, "org.apache.commons.logging.impl.LogKitLogger"); // $NON-NLS-1$
}
Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
if (!(e instanceof ThreadDeath)) {
log.error("Uncaught exception: ", e);
System.err.println("Uncaught Exception " + e + ". See log file for details.");
}
}
});
log.info(JMeterUtils.getJMeterCopyright());
log.info("Version " + JMeterUtils.getJMeterVersion());
logProperty("java.version"); //$NON-NLS-1$
logProperty("java.vm.name"); //$NON-NLS-1$
logProperty("os.name"); //$NON-NLS-1$
logProperty("os.arch"); //$NON-NLS-1$
logProperty("os.version"); //$NON-NLS-1$
logProperty("file.encoding"); // $NON-NLS-1$
log.info("Default Locale=" + Locale.getDefault().getDisplayName());
log.info("JMeter Locale=" + JMeterUtils.getLocale().getDisplayName());
log.info("JMeterHome=" + JMeterUtils.getJMeterHome());
logProperty("user.dir"," ="); //$NON-NLS-1$
log.info("PWD ="+new File(".").getCanonicalPath());//$NON-NLS-1$
log.info("IP: "+JMeterUtils.getLocalHostIP()
+" Name: "+JMeterUtils.getLocalHostName()
+" FullName: "+JMeterUtils.getLocalHostFullName());
setProxy(parser);
updateClassLoader();
if (log.isDebugEnabled())
{
String jcp=System.getProperty("java.class.path");// $NON-NLS-1$
String bits[] =jcp.split(File.pathSeparator);
log.debug("ClassPath");
for(String bit : bits){
log.debug(bit);
}
log.debug(jcp);
}
// Set some (hopefully!) useful properties
long now=System.currentTimeMillis();
JMeterUtils.setProperty("START.MS",Long.toString(now));// $NON-NLS-1$
Date today=new Date(now); // so it agrees with above
// TODO perhaps should share code with __time() function for this...
JMeterUtils.setProperty("START.YMD",new SimpleDateFormat("yyyyMMdd").format(today));// $NON-NLS-1$ $NON-NLS-2$
JMeterUtils.setProperty("START.HMS",new SimpleDateFormat("HHmmss").format(today));// $NON-NLS-1$ $NON-NLS-2$
if (parser.getArgumentById(VERSION_OPT) != null) {
System.out.println(JMeterUtils.getJMeterCopyright());
System.out.println("Version " + JMeterUtils.getJMeterVersion());
} else if (parser.getArgumentById(HELP_OPT) != null) {
System.out.println(JMeterUtils.getResourceFileAsText("org/apache/jmeter/help.txt"));// $NON-NLS-1$
} else if (parser.getArgumentById(SERVER_OPT) != null) {
// Start the server
try {
RemoteJMeterEngineImpl.startServer(JMeterUtils.getPropDefault("server_port", 0)); // $NON-NLS-1$
} catch (Exception ex) {
System.err.println("Server failed to start: "+ex);
log.error("Giving up, as server failed with:", ex);
throw ex;
}
startOptionalServers();
} else {
String testFile=null;
CLOption testFileOpt = parser.getArgumentById(TESTFILE_OPT);
if (testFileOpt != null){
testFile = testFileOpt.getArgument();
if (USE_LAST_JMX.equals(testFile)) {
testFile = LoadRecentProject.getRecentFile(0);// most recent
}
}
if (parser.getArgumentById(NONGUI_OPT) == null) {
startGui(testFile);
startOptionalServers();
} else {
CLOption rem=parser.getArgumentById(REMOTE_OPT_PARAM);
if (rem==null) { rem=parser.getArgumentById(REMOTE_OPT); }
CLOption jtl = parser.getArgumentById(LOGFILE_OPT);
String jtlFile = null;
if (jtl != null){
jtlFile=processLAST(jtl.getArgument(), ".jtl"); // $NON-NLS-1$
}
startNonGui(testFile, jtlFile, rem);
startOptionalServers();
}
}
} catch (IllegalUserActionException e) {
System.out.println(e.getMessage());
System.out.println("Incorrect Usage");
System.out.println(CLUtil.describeOptions(options).toString());
} catch (Throwable e) {
log.fatalError("An error occurred: ",e);
System.out.println("An error occurred: " + e.getMessage());
System.exit(1); // TODO - could this be return?
}
}
// Update classloader if necessary
private void updateClassLoader() {
updatePath("search_paths",";"); //$NON-NLS-1$//$NON-NLS-2$
updatePath("user.classpath",File.pathSeparator);//$NON-NLS-1$
}
private void updatePath(String property, String sep) {
String userpath= JMeterUtils.getPropDefault(property,"");// $NON-NLS-1$
if (userpath.length() <= 0) { return; }
log.info(property+"="+userpath); //$NON-NLS-1$
StringTokenizer tok = new StringTokenizer(userpath, sep);
while(tok.hasMoreTokens()) {
String path=tok.nextToken();
File f=new File(path);
if (!f.canRead() && !f.isDirectory()) {
log.warn("Can't read "+path);
} else {
log.info("Adding to classpath: "+path);
try {
NewDriver.addPath(path);
} catch (MalformedURLException e) {
log.warn("Error adding: "+path+" "+e.getLocalizedMessage());
}
}
}
}
/**
*
*/
private void startOptionalServers() {
int bshport = JMeterUtils.getPropDefault("beanshell.server.port", 0);// $NON-NLS-1$
String bshfile = JMeterUtils.getPropDefault("beanshell.server.file", "");// $NON-NLS-1$ $NON-NLS-2$
if (bshport > 0) {
log.info("Starting Beanshell server (" + bshport + "," + bshfile + ")");
Runnable t = new BeanShellServer(bshport, bshfile);
t.run();
}
// Should we run a beanshell script on startup?
String bshinit = JMeterUtils.getProperty("beanshell.init.file");// $NON-NLS-1$
if (bshinit != null){
log.info("Run Beanshell on file: "+bshinit);
try {
BeanShellInterpreter bsi = new BeanShellInterpreter();//bshinit,log);
bsi.source(bshinit);
} catch (ClassNotFoundException e) {
log.warn("Could not start Beanshell: "+e.getLocalizedMessage());
} catch (JMeterException e) {
log.warn("Could not process Beanshell file: "+e.getLocalizedMessage());
}
}
int mirrorPort=JMeterUtils.getPropDefault("mirror.server.port", 0);// $NON-NLS-1$
if (mirrorPort > 0){
log.info("Starting Mirror server (" + mirrorPort + ")");
try {
Object instance = ClassTools.construct(
"org.apache.jmeter.protocol.http.control.HttpMirrorControl",// $NON-NLS-1$
mirrorPort);
ClassTools.invoke(instance,"startHttpMirror");
} catch (JMeterException e) {
log.warn("Could not start Mirror server",e);
}
}
}
/**
* Sets a proxy server for the JVM if the command line arguments are
* specified.
*/
private void setProxy(CLArgsParser parser) throws IllegalUserActionException {
if (parser.getArgumentById(PROXY_USERNAME) != null) {
Properties jmeterProps = JMeterUtils.getJMeterProperties();
if (parser.getArgumentById(PROXY_PASSWORD) != null) {
String u, p;
Authenticator.setDefault(new ProxyAuthenticator(u = parser.getArgumentById(PROXY_USERNAME)
.getArgument(), p = parser.getArgumentById(PROXY_PASSWORD).getArgument()));
log.info("Set Proxy login: " + u + "/" + p);
jmeterProps.setProperty(HTTP_PROXY_USER, u);//for Httpclient
jmeterProps.setProperty(HTTP_PROXY_PASS, p);//for Httpclient
} else {
String u;
Authenticator.setDefault(new ProxyAuthenticator(u = parser.getArgumentById(PROXY_USERNAME)
.getArgument(), ""));
log.info("Set Proxy login: " + u);
jmeterProps.setProperty(HTTP_PROXY_USER, u);
}
}
if (parser.getArgumentById(PROXY_HOST) != null && parser.getArgumentById(PROXY_PORT) != null) {
String h = parser.getArgumentById(PROXY_HOST).getArgument();
String p = parser.getArgumentById(PROXY_PORT).getArgument();
System.setProperty("http.proxyHost", h );// $NON-NLS-1$
System.setProperty("https.proxyHost", h);// $NON-NLS-1$
System.setProperty("http.proxyPort", p);// $NON-NLS-1$
System.setProperty("https.proxyPort", p);// $NON-NLS-1$
log.info("Set http[s].proxyHost: " + h + " Port: " + p);
} else if (parser.getArgumentById(PROXY_HOST) != null || parser.getArgumentById(PROXY_PORT) != null) {
throw new IllegalUserActionException(JMeterUtils.getResString("proxy_cl_error"));// $NON-NLS-1$
}
if (parser.getArgumentById(NONPROXY_HOSTS) != null) {
String n = parser.getArgumentById(NONPROXY_HOSTS).getArgument();
System.setProperty("http.nonProxyHosts", n );// $NON-NLS-1$
System.setProperty("https.nonProxyHosts", n );// $NON-NLS-1$
log.info("Set http[s].nonProxyHosts: "+n);
}
}
private void initializeProperties(CLArgsParser parser) {
if (parser.getArgumentById(PROPFILE_OPT) != null) {
JMeterUtils.loadJMeterProperties(parser.getArgumentById(PROPFILE_OPT).getArgument());
} else {
JMeterUtils.loadJMeterProperties(NewDriver.getJMeterDir() + File.separator
+ "bin" + File.separator // $NON-NLS-1$
+ "jmeter.properties");// $NON-NLS-1$
}
if (parser.getArgumentById(JMLOGFILE_OPT) != null){
String jmlogfile=parser.getArgumentById(JMLOGFILE_OPT).getArgument();
jmlogfile = processLAST(jmlogfile, ".log");// $NON-NLS-1$
JMeterUtils.setProperty(LoggingManager.LOG_FILE,jmlogfile);
}
JMeterUtils.initLogging();
JMeterUtils.initLocale();
// Bug 33845 - allow direct override of Home dir
if (parser.getArgumentById(JMETER_HOME_OPT) == null) {
JMeterUtils.setJMeterHome(NewDriver.getJMeterDir());
} else {
JMeterUtils.setJMeterHome(parser.getArgumentById(JMETER_HOME_OPT).getArgument());
}
Properties jmeterProps = JMeterUtils.getJMeterProperties();
remoteProps = new Properties();
// Add local JMeter properties, if the file is found
String userProp = JMeterUtils.getPropDefault("user.properties",""); //$NON-NLS-1$
if (userProp.length() > 0){ //$NON-NLS-1$
FileInputStream fis=null;
try {
File file = JMeterUtils.findFile(userProp);
if (file.canRead()){
log.info("Loading user properties from: "+file.getCanonicalPath());
fis = new FileInputStream(file);
Properties tmp = new Properties();
tmp.load(fis);
jmeterProps.putAll(tmp);
LoggingManager.setLoggingLevels(tmp);//Do what would be done earlier
}
} catch (IOException e) {
log.warn("Error loading user property file: " + userProp, e);
} finally {
JOrphanUtils.closeQuietly(fis);
}
}
// Add local system properties, if the file is found
String sysProp = JMeterUtils.getPropDefault("system.properties",""); //$NON-NLS-1$
if (sysProp.length() > 0){
FileInputStream fis=null;
try {
File file = JMeterUtils.findFile(sysProp);
if (file.canRead()){
log.info("Loading system properties from: "+file.getCanonicalPath());
fis = new FileInputStream(file);
System.getProperties().load(fis);
}
} catch (IOException e) {
log.warn("Error loading system property file: " + sysProp, e);
} finally {
JOrphanUtils.closeQuietly(fis);
}
}
// Process command line property definitions
// These can potentially occur multiple times
List<CLOption> clOptions = parser.getArguments();
int size = clOptions.size();
for (int i = 0; i < size; i++) {
CLOption option = clOptions.get(i);
String name = option.getArgument(0);
String value = option.getArgument(1);
FileInputStream fis = null;
switch (option.getDescriptor().getId()) {
// Should not have any text arguments
case CLOption.TEXT_ARGUMENT:
throw new IllegalArgumentException("Unknown arg: "+option.getArgument());
case PROPFILE2_OPT: // Bug 33920 - allow multiple props
try {
fis = new FileInputStream(new File(name));
Properties tmp = new Properties();
tmp.load(fis);
jmeterProps.putAll(tmp);
LoggingManager.setLoggingLevels(tmp);//Do what would be done earlier
} catch (FileNotFoundException e) {
log.warn("Can't find additional property file: " + name, e);
} catch (IOException e) {
log.warn("Error loading additional property file: " + name, e);
} finally {
JOrphanUtils.closeQuietly(fis);
}
break;
case SYSTEM_PROPFILE:
log.info("Setting System properties from file: " + name);
try {
fis = new FileInputStream(new File(name));
System.getProperties().load(fis);
} catch (IOException e) {
log.warn("Cannot find system property file "+e.getLocalizedMessage());
} finally {
JOrphanUtils.closeQuietly(fis);
}
break;
case SYSTEM_PROPERTY:
if (value.length() > 0) { // Set it
log.info("Setting System property: " + name + "=" + value);
System.getProperties().setProperty(name, value);
} else { // Reset it
log.warn("Removing System property: " + name);
System.getProperties().remove(name);
}
break;
case JMETER_PROPERTY:
if (value.length() > 0) { // Set it
log.info("Setting JMeter property: " + name + "=" + value);
jmeterProps.setProperty(name, value);
} else { // Reset it
log.warn("Removing JMeter property: " + name);
jmeterProps.remove(name);
}
break;
case JMETER_GLOBAL_PROP:
if (value.length() > 0) { // Set it
log.info("Setting Global property: " + name + "=" + value);
remoteProps.setProperty(name, value);
} else {
File propFile = new File(name);
if (propFile.canRead()) {
log.info("Setting Global properties from the file "+name);
try {
fis = new FileInputStream(propFile);
remoteProps.load(fis);
} catch (FileNotFoundException e) {
log.warn("Could not find properties file: "+e.getLocalizedMessage());
} catch (IOException e) {
log.warn("Could not load properties file: "+e.getLocalizedMessage());
} finally {
JOrphanUtils.closeQuietly(fis);
}
}
}
break;
case LOGLEVEL:
if (value.length() > 0) { // Set category
log.info("LogLevel: " + name + "=" + value);
LoggingManager.setPriority(value, name);
} else { // Set root level
log.warn("LogLevel: " + name);
LoggingManager.setPriority(name);
}
break;
case REMOTE_STOP:
remoteStop = true;
break;
default:
// ignored
break;
}
}
String sample_variables = (String) jmeterProps.get(SampleEvent.SAMPLE_VARIABLES);
if (sample_variables != null){
remoteProps.put(SampleEvent.SAMPLE_VARIABLES, sample_variables);
}
}
/*
* Checks for LAST or LASTsuffix.
* Returns the LAST name with .JMX replaced by suffix.
*/
private String processLAST(String jmlogfile, String suffix) {
if (USE_LAST_JMX.equals(jmlogfile) || USE_LAST_JMX.concat(suffix).equals(jmlogfile)){
String last = LoadRecentProject.getRecentFile(0);// most recent
final String JMX_SUFFIX = ".JMX"; // $NON-NLS-1$
if (last.toUpperCase(Locale.ENGLISH).endsWith(JMX_SUFFIX)){
jmlogfile=last.substring(0, last.length() - JMX_SUFFIX.length()).concat(suffix);
}
}
return jmlogfile;
}
private void startNonGui(String testFile, String logFile, CLOption remoteStart)
throws IllegalUserActionException {
// add a system property so samplers can check to see if JMeter
// is running in NonGui mode
System.setProperty(JMETER_NON_GUI, "true");// $NON-NLS-1$
JMeter driver = new JMeter();// TODO - why does it create a new instance?
driver.remoteProps = this.remoteProps;
driver.remoteStop = this.remoteStop;
driver.parent = this;
PluginManager.install(this, false);
String remote_hosts_string = null;
if (remoteStart != null) {
remote_hosts_string = remoteStart.getArgument();
if (remote_hosts_string == null) {
remote_hosts_string = JMeterUtils.getPropDefault(
"remote_hosts", //$NON-NLS-1$
"127.0.0.1");//$NON-NLS-1$
}
}
if (testFile == null) {
throw new IllegalUserActionException("Non-GUI runs require a test plan");
}
driver.runNonGui(testFile, logFile, remoteStart != null, remote_hosts_string);
}
// run test in batch mode
private void runNonGui(String testFile, String logFile, boolean remoteStart, String remote_hosts_string) {
FileInputStream reader = null;
try {
File f = new File(testFile);
if (!f.exists() || !f.isFile()) {
println("Could not open " + testFile);
return;
}
FileServer.getFileServer().setBaseForScript(f);
reader = new FileInputStream(f);
log.info("Loading file: " + f);
HashTree tree = SaveService.loadTree(reader);
@SuppressWarnings("deprecation") // Deliberate use of deprecated ctor
JMeterTreeModel treeModel = new JMeterTreeModel(new Object());// Create non-GUI version to avoid headless problems
JMeterTreeNode root = (JMeterTreeNode) treeModel.getRoot();
treeModel.addSubTree(tree, root);
// Hack to resolve ModuleControllers in non GUI mode
SearchByClass<ReplaceableController> replaceableControllers = new SearchByClass<ReplaceableController>(ReplaceableController.class);
tree.traverse(replaceableControllers);
Collection<ReplaceableController> replaceableControllersRes = replaceableControllers.getSearchResults();
for (Iterator<ReplaceableController> iter = replaceableControllersRes.iterator(); iter.hasNext();) {
ReplaceableController replaceableController = iter.next();
replaceableController.resolveReplacementSubTree(root);
}
// Remove the disabled items
// For GUI runs this is done in Start.java
convertSubTree(tree);
Summariser summer = null;
String summariserName = JMeterUtils.getPropDefault("summariser.name", "");//$NON-NLS-1$
if (summariserName.length() > 0) {
log.info("Creating summariser <" + summariserName + ">");
println("Creating summariser <" + summariserName + ">");
summer = new Summariser(summariserName);
}
if (logFile != null) {
ResultCollector logger = new ResultCollector(summer);
logger.setFilename(logFile);
tree.add(tree.getArray()[0], logger);
}
else {
// only add Summariser if it can not be shared with the ResultCollector
if (summer != null) {
tree.add(tree.getArray()[0], summer);
}
}
List<JMeterEngine> engines = new LinkedList<JMeterEngine>();
tree.add(tree.getArray()[0], new ListenToTest(parent, (remoteStart && remoteStop) ? engines : null));
println("Created the tree successfully using "+testFile);
if (!remoteStart) {
JMeterEngine engine = new StandardJMeterEngine();
engine.configure(tree);
long now=System.currentTimeMillis();
println("Starting the test @ "+new Date(now)+" ("+now+")");
engine.runTest();
engines.add(engine);
} else {
java.util.StringTokenizer st = new java.util.StringTokenizer(remote_hosts_string, ",");//$NON-NLS-1$
while (st.hasMoreElements()) {
String el = (String) st.nextElement();
println("Configuring remote engine for " + el);
log.info("Configuring remote engine for " + el);
JMeterEngine eng = doRemoteInit(el.trim(), tree);
if (null != eng) {
engines.add(eng);
} else {
println("Failed to configure "+el);
}
}
if (engines.isEmpty()) {
println("No remote engines were started.");
return;
}
println("Starting remote engines");
log.info("Starting remote engines");
long now=System.currentTimeMillis();
println("Starting the test @ "+new Date(now)+" ("+now+")");
for (JMeterEngine engine : engines) {
engine.runTest();
}
println("Remote engines have been started");
log.info("Remote engines have been started");
}
startUdpDdaemon(engines);
} catch (Exception e) {
System.out.println("Error in NonGUIDriver " + e.toString());
log.error("Error in NonGUIDriver", e);
} finally {
JOrphanUtils.closeQuietly(reader);
}
}
/**
* Refactored from AbstractAction.java
*
* @param tree
*/
public static void convertSubTree(HashTree tree) {
LinkedList<Object> copyList = new LinkedList<Object>(tree.list());
for (Object o : copyList) {
if (o instanceof TestElement) {
TestElement item = (TestElement) o;
if (item.isEnabled()) {
if (item instanceof ReplaceableController) {
ReplaceableController rc;
// TODO this bit of code needs to be tidied up
// Unfortunately ModuleController is in components, not core
if (item.getClass().getName().equals("org.apache.jmeter.control.ModuleController")){ // Bug 47165
rc = (ReplaceableController) item;
} else {
// HACK: force the controller to load its tree
rc = (ReplaceableController) item.clone();
}
HashTree subTree = tree.getTree(item);
if (subTree != null) {
HashTree replacementTree = rc.getReplacementSubTree();
if (replacementTree != null) {
convertSubTree(replacementTree);
tree.replace(item, rc);
tree.set(rc, replacementTree);
}
} else { // null subTree
convertSubTree(tree.getTree(item));
}
} else { // not Replaceable Controller
convertSubTree(tree.getTree(item));
}
} else { // Not enabled
tree.remove(item);
}
} else { // Not a TestElement
JMeterTreeNode item = (JMeterTreeNode) o;
if (item.isEnabled()) {
// Replacement only needs to occur when starting the engine
// @see StandardJMeterEngine.run()
if (item.getUserObject() instanceof ReplaceableController) {
ReplaceableController rc =
(ReplaceableController) item.getTestElement();
HashTree subTree = tree.getTree(item);
if (subTree != null) {
HashTree replacementTree = rc.getReplacementSubTree();
if (replacementTree != null) {
convertSubTree(replacementTree);
tree.replace(item, rc);
tree.set(rc, replacementTree);
}
}
} else { // Not a ReplaceableController
convertSubTree(tree.getTree(item));
TestElement testElement = item.getTestElement();
tree.replace(item, testElement);
}
} else { // Not enabled
tree.remove(item);
}
}
}
}
private JMeterEngine doRemoteInit(String hostName, HashTree testTree) {
JMeterEngine engine = null;
try {
engine = new ClientJMeterEngine(hostName);
} catch (Exception e) {
log.fatalError("Failure connecting to remote host: "+hostName, e);
System.err.println("Failure connecting to remote host: "+hostName+" "+e);
return null;
}
engine.configure(testTree);
if (!remoteProps.isEmpty()) {
engine.setProperties(remoteProps);
}
return engine;
}
/*
* Listen to test and handle tidyup after non-GUI test completes.
* If running a remote test, then after waiting a few seconds for listeners to finish files,
* it calls ClientJMeterEngine.tidyRMI() to deal with the Naming Timer Thread.
*/
private static class ListenToTest implements TestStateListener, Runnable, Remoteable {
private AtomicInteger started = new AtomicInteger(0); // keep track of remote tests
//NOT YET USED private JMeter _parent;
private final List<JMeterEngine> engines;
public ListenToTest(JMeter parent, List<JMeterEngine> engines) {
//_parent = parent;
this.engines=engines;
}
public void testEnded(String host) {
long now=System.currentTimeMillis();
log.info("Finished remote host: " + host + " ("+now+")");
if (started.decrementAndGet() <= 0) {
Thread stopSoon = new Thread(this);
stopSoon.start();
}
}
public void testEnded() {
long now = System.currentTimeMillis();
println("Tidying up ... @ "+new Date(now)+" ("+now+")");
println("... end of run");
checkForRemainingThreads();
}
public void testStarted(String host) {
started.incrementAndGet();
long now=System.currentTimeMillis();
log.info("Started remote host: " + host + " ("+now+")");
}
public void testStarted() {
long now=System.currentTimeMillis();
log.info(JMeterUtils.getResString("running_test")+" ("+now+")");//$NON-NLS-1$
}
/**
* This is a hack to allow listeners a chance to close their files. Must
* implement a queue for sample responses tied to the engine, and the
* engine won't deliver testEnded signal till all sample responses have
* been delivered. Should also improve performance of remote JMeter
* testing.
*/
public void run() {
long now = System.currentTimeMillis();
println("Tidying up remote @ "+new Date(now)+" ("+now+")");
if (engines!=null){ // it will be null unless remoteStop = true
println("Exitting remote servers");
for (JMeterEngine e : engines){
e.exit();
}
}
try {
Thread.sleep(5000); // Allow listeners to close files
} catch (InterruptedException ignored) {
}
ClientJMeterEngine.tidyRMI(log);
println("... end of run");
checkForRemainingThreads();
}
/**
* Runs daemon thread which waits a short while;
* if JVM does not exit, lists remaining non-daemon threads on stdout.
*/
private void checkForRemainingThreads() {
// This cannot be a JMeter class variable, because properties
// are not initialised until later.
final int REMAIN_THREAD_PAUSE =
JMeterUtils.getPropDefault("jmeter.exit.check.pause", 2000); // $NON-NLS-1$
if (REMAIN_THREAD_PAUSE > 0) {
Thread daemon = new Thread(){
@Override
public void run(){
try {
Thread.sleep(REMAIN_THREAD_PAUSE); // Allow enough time for JVM to exit
} catch (InterruptedException ignored) {
}
// This is a daemon thread, which should only reach here if there are other
// non-daemon threads still active
System.out.println("The JVM should have exitted but did not.");
System.out.println("The following non-daemon threads are still running (DestroyJavaVM is OK):");
JOrphanUtils.displayThreads(false);
}
};
daemon.setDaemon(true);
daemon.start();
}
}
}
private static void println(String str) {
System.out.println(str);
}
private static final String[][] DEFAULT_ICONS = {
{ "org.apache.jmeter.control.gui.TestPlanGui", "org/apache/jmeter/images/beaker.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.timers.gui.AbstractTimerGui", "org/apache/jmeter/images/timer.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.threads.gui.ThreadGroupGui", "org/apache/jmeter/images/thread.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.visualizers.gui.AbstractListenerGui", "org/apache/jmeter/images/meter.png" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.config.gui.AbstractConfigGui", "org/apache/jmeter/images/testtubes.png" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.processor.gui.AbstractPreProcessorGui", "org/apache/jmeter/images/leafnode.gif"}, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.processor.gui.AbstractPostProcessorGui","org/apache/jmeter/images/leafnodeflip.gif"},//$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.control.gui.AbstractControllerGui", "org/apache/jmeter/images/knob.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.control.gui.WorkBenchGui", "org/apache/jmeter/images/clipboard.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.samplers.gui.AbstractSamplerGui", "org/apache/jmeter/images/pipet.png" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.assertions.gui.AbstractAssertionGui", "org/apache/jmeter/images/question.gif"} //$NON-NLS-1$ $NON-NLS-2$
};
public String[][] getIconMappings() {
final String defaultIconProp = "org/apache/jmeter/images/icon.properties"; //$NON-NLS-1$
String iconProp = JMeterUtils.getPropDefault("jmeter.icons", defaultIconProp);//$NON-NLS-1$
Properties p = JMeterUtils.loadProperties(iconProp);
if (p == null && !iconProp.equals(defaultIconProp)) {
log.info(iconProp + " not found - using " + defaultIconProp);
iconProp = defaultIconProp;
p = JMeterUtils.loadProperties(iconProp);
}
if (p == null) {
log.info(iconProp + " not found - using inbuilt icon set");
return DEFAULT_ICONS;
}
log.info("Loaded icon properties from " + iconProp);
String[][] iconlist = new String[p.size()][3];
Enumeration<?> pe = p.keys();
int i = 0;
while (pe.hasMoreElements()) {
String key = (String) pe.nextElement();
String icons[] = JOrphanUtils.split(p.getProperty(key), " ");//$NON-NLS-1$
iconlist[i][0] = key;
iconlist[i][1] = icons[0];
if (icons.length > 1) {
iconlist[i][2] = icons[1];
}
i++;
}
return iconlist;
}
public String[][] getResourceBundles() {
return new String[0][];
}
/**
* Check if JMeter is running in non-GUI mode.
*
* @return true if JMeter is running in non-GUI mode.
*/
public static boolean isNonGUI(){
return "true".equals(System.getProperty(JMeter.JMETER_NON_GUI)); //$NON-NLS-1$
}
private void logProperty(String prop){
log.info(prop+"="+System.getProperty(prop));//$NON-NLS-1$
}
private void logProperty(String prop,String separator){
log.info(prop+separator+System.getProperty(prop));//$NON-NLS-1$
}
private static void startUdpDdaemon(final List<JMeterEngine> engines) {
int port = JMeterUtils.getPropDefault("jmeterengine.nongui.port", UDP_PORT_DEFAULT); // $NON-NLS-1$
int maxPort = JMeterUtils.getPropDefault("jmeterengine.nongui.maxport", 4455); // $NON-NLS-1$
if (port > 1000){
final DatagramSocket socket = getSocket(port, maxPort);
if (socket != null) {
Thread waiter = new Thread("UDP Listener"){
@Override
public void run() {
waitForSignals(engines, socket);
}
};
waiter.setDaemon(true);
waiter.start();
} else {
System.out.println("Failed to create UDP port");
}
}
}
private static void waitForSignals(final List<JMeterEngine> engines, DatagramSocket socket) {
byte[] buf = new byte[80];
System.out.println("Waiting for possible shutdown message on port "+socket.getLocalPort());
DatagramPacket request = new DatagramPacket(buf, buf.length);
try {
while(true) {
socket.receive(request);
InetAddress address = request.getAddress();
// Only accept commands from the local host
if (address.isLoopbackAddress()){
String command = new String(request.getData(), request.getOffset(), request.getLength(),"ASCII");
System.out.println("Command: "+command+" received from "+address);
log.info("Command: "+command+" received from "+address);
if (command.equals("StopTestNow")){
for(JMeterEngine engine : engines) {
engine.stopTest(true);
}
} else if (command.equals("Shutdown")) {
for(JMeterEngine engine : engines) {
engine.stopTest(false);
}
} else if (command.equals("HeapDump")) {
HeapDumper.dumpHeap();
} else {
System.out.println("Command: "+command+" not recognised ");
}
}
}
} catch (Exception e) {
System.out.println(e);
} finally {
socket.close();
}
}
private static DatagramSocket getSocket(int udpPort, int udpPortMax) {
DatagramSocket socket = null;
int i = udpPort;
while (i<= udpPortMax) {
try {
socket = new DatagramSocket(i);
break;
} catch (SocketException e) {
i++;
}
}
return socket;
}
} | src/core/org/apache/jmeter/JMeter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter;
import java.awt.event.ActionEvent;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.Thread.UncaughtExceptionHandler;
import java.net.Authenticator;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.SocketException;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicInteger;
import javax.swing.JTree;
import javax.swing.tree.TreePath;
import org.apache.commons.cli.avalon.CLArgsParser;
import org.apache.commons.cli.avalon.CLOption;
import org.apache.commons.cli.avalon.CLOptionDescriptor;
import org.apache.commons.cli.avalon.CLUtil;
import org.apache.jmeter.control.ReplaceableController;
import org.apache.jmeter.engine.ClientJMeterEngine;
import org.apache.jmeter.engine.JMeterEngine;
import org.apache.jmeter.engine.RemoteJMeterEngineImpl;
import org.apache.jmeter.engine.StandardJMeterEngine;
import org.apache.jmeter.exceptions.IllegalUserActionException;
import org.apache.jmeter.gui.GuiPackage;
import org.apache.jmeter.gui.MainFrame;
import org.apache.jmeter.gui.action.ActionNames;
import org.apache.jmeter.gui.action.ActionRouter;
import org.apache.jmeter.gui.action.Load;
import org.apache.jmeter.gui.action.LoadRecentProject;
import org.apache.jmeter.gui.tree.JMeterTreeListener;
import org.apache.jmeter.gui.tree.JMeterTreeModel;
import org.apache.jmeter.gui.tree.JMeterTreeNode;
import org.apache.jmeter.gui.util.FocusRequester;
import org.apache.jmeter.plugin.JMeterPlugin;
import org.apache.jmeter.plugin.PluginManager;
import org.apache.jmeter.reporters.ResultCollector;
import org.apache.jmeter.reporters.Summariser;
import org.apache.jmeter.samplers.Remoteable;
import org.apache.jmeter.samplers.SampleEvent;
import org.apache.jmeter.save.SaveService;
import org.apache.jmeter.services.FileServer;
import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.testelement.TestStateListener;
import org.apache.jmeter.util.BeanShellInterpreter;
import org.apache.jmeter.util.BeanShellServer;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.collections.HashTree;
import org.apache.jorphan.collections.SearchByClass;
import org.apache.jorphan.gui.ComponentUtil;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.jorphan.reflect.ClassTools;
import org.apache.jorphan.util.HeapDumper;
import org.apache.jorphan.util.JMeterException;
import org.apache.jorphan.util.JOrphanUtils;
import org.apache.log.Logger;
import com.thoughtworks.xstream.converters.ConversionException;
/**
* Main JMeter class; processes options and starts the GUI, non-GUI or server as appropriate.
*/
public class JMeter implements JMeterPlugin {
private static final Logger log = LoggingManager.getLoggerForClass();
public static final int UDP_PORT_DEFAULT = 4445; // needed for ShutdownClient
public static final String HTTP_PROXY_PASS = "http.proxyPass"; // $NON-NLS-1$
public static final String HTTP_PROXY_USER = "http.proxyUser"; // $NON-NLS-1$
public static final String JMETER_NON_GUI = "JMeter.NonGui"; // $NON-NLS-1$
// If the -t flag is to "LAST", then the last loaded file (if any) is used
private static final String USE_LAST_JMX = "LAST";
// If the -j or -l flag is set to LAST or LAST.log|LAST.jtl, then the last loaded file name is used to
// generate the log file name by removing .JMX and replacing it with .log|.jtl
private static final int PROXY_PASSWORD = 'a';// $NON-NLS-1$
private static final int JMETER_HOME_OPT = 'd';// $NON-NLS-1$
private static final int HELP_OPT = 'h';// $NON-NLS-1$
// jmeter.log
private static final int JMLOGFILE_OPT = 'j';// $NON-NLS-1$
// sample result log file
private static final int LOGFILE_OPT = 'l';// $NON-NLS-1$
private static final int NONGUI_OPT = 'n';// $NON-NLS-1$
private static final int PROPFILE_OPT = 'p';// $NON-NLS-1$
private static final int PROPFILE2_OPT = 'q';// $NON-NLS-1$
private static final int REMOTE_OPT = 'r';// $NON-NLS-1$
private static final int SERVER_OPT = 's';// $NON-NLS-1$
private static final int TESTFILE_OPT = 't';// $NON-NLS-1$
private static final int PROXY_USERNAME = 'u';// $NON-NLS-1$
private static final int VERSION_OPT = 'v';// $NON-NLS-1$
private static final int SYSTEM_PROPERTY = 'D';// $NON-NLS-1$
private static final int JMETER_GLOBAL_PROP = 'G';// $NON-NLS-1$
private static final int PROXY_HOST = 'H';// $NON-NLS-1$
private static final int JMETER_PROPERTY = 'J';// $NON-NLS-1$
private static final int LOGLEVEL = 'L';// $NON-NLS-1$
private static final int NONPROXY_HOSTS = 'N';// $NON-NLS-1$
private static final int PROXY_PORT = 'P';// $NON-NLS-1$
private static final int REMOTE_OPT_PARAM = 'R';// $NON-NLS-1$
private static final int SYSTEM_PROPFILE = 'S';// $NON-NLS-1$
private static final int REMOTE_STOP = 'X';// $NON-NLS-1$
/**
* Define the understood options. Each CLOptionDescriptor contains:
* <ul>
* <li>The "long" version of the option. Eg, "help" means that "--help"
* will be recognised.</li>
* <li>The option flags, governing the option's argument(s).</li>
* <li>The "short" version of the option. Eg, 'h' means that "-h" will be
* recognised.</li>
* <li>A description of the option.</li>
* </ul>
*/
private static final CLOptionDescriptor[] options = new CLOptionDescriptor[] {
new CLOptionDescriptor("help", CLOptionDescriptor.ARGUMENT_DISALLOWED, HELP_OPT,
"print usage information and exit"),
new CLOptionDescriptor("version", CLOptionDescriptor.ARGUMENT_DISALLOWED, VERSION_OPT,
"print the version information and exit"),
new CLOptionDescriptor("propfile", CLOptionDescriptor.ARGUMENT_REQUIRED, PROPFILE_OPT,
"the jmeter property file to use"),
new CLOptionDescriptor("addprop", CLOptionDescriptor.ARGUMENT_REQUIRED
| CLOptionDescriptor.DUPLICATES_ALLOWED, PROPFILE2_OPT,
"additional JMeter property file(s)"),
new CLOptionDescriptor("testfile", CLOptionDescriptor.ARGUMENT_REQUIRED, TESTFILE_OPT,
"the jmeter test(.jmx) file to run"),
new CLOptionDescriptor("logfile", CLOptionDescriptor.ARGUMENT_REQUIRED, LOGFILE_OPT,
"the file to log samples to"),
new CLOptionDescriptor("jmeterlogfile", CLOptionDescriptor.ARGUMENT_REQUIRED, JMLOGFILE_OPT,
"jmeter run log file (jmeter.log)"),
new CLOptionDescriptor("nongui", CLOptionDescriptor.ARGUMENT_DISALLOWED, NONGUI_OPT,
"run JMeter in nongui mode"),
new CLOptionDescriptor("server", CLOptionDescriptor.ARGUMENT_DISALLOWED, SERVER_OPT,
"run the JMeter server"),
new CLOptionDescriptor("proxyHost", CLOptionDescriptor.ARGUMENT_REQUIRED, PROXY_HOST,
"Set a proxy server for JMeter to use"),
new CLOptionDescriptor("proxyPort", CLOptionDescriptor.ARGUMENT_REQUIRED, PROXY_PORT,
"Set proxy server port for JMeter to use"),
new CLOptionDescriptor("nonProxyHosts", CLOptionDescriptor.ARGUMENT_REQUIRED, NONPROXY_HOSTS,
"Set nonproxy host list (e.g. *.apache.org|localhost)"),
new CLOptionDescriptor("username", CLOptionDescriptor.ARGUMENT_REQUIRED, PROXY_USERNAME,
"Set username for proxy server that JMeter is to use"),
new CLOptionDescriptor("password", CLOptionDescriptor.ARGUMENT_REQUIRED, PROXY_PASSWORD,
"Set password for proxy server that JMeter is to use"),
new CLOptionDescriptor("jmeterproperty", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENTS_REQUIRED_2, JMETER_PROPERTY,
"Define additional JMeter properties"),
new CLOptionDescriptor("globalproperty", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENTS_REQUIRED_2, JMETER_GLOBAL_PROP,
"Define Global properties (sent to servers)\n\t\te.g. -Gport=123 or -Gglobal.properties"),
new CLOptionDescriptor("systemproperty", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENTS_REQUIRED_2, SYSTEM_PROPERTY,
"Define additional system properties"),
new CLOptionDescriptor("systemPropertyFile", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENT_REQUIRED, SYSTEM_PROPFILE,
"additional system property file(s)"),
new CLOptionDescriptor("loglevel", CLOptionDescriptor.DUPLICATES_ALLOWED
| CLOptionDescriptor.ARGUMENTS_REQUIRED_2, LOGLEVEL,
"[category=]level e.g. jorphan=INFO or jmeter.util=DEBUG"),
new CLOptionDescriptor("runremote", CLOptionDescriptor.ARGUMENT_DISALLOWED, REMOTE_OPT,
"Start remote servers (as defined in remote_hosts)"),
new CLOptionDescriptor("remotestart", CLOptionDescriptor.ARGUMENT_REQUIRED, REMOTE_OPT_PARAM,
"Start these remote servers (overrides remote_hosts)"),
new CLOptionDescriptor("homedir", CLOptionDescriptor.ARGUMENT_REQUIRED, JMETER_HOME_OPT,
"the jmeter home directory to use"),
new CLOptionDescriptor("remoteexit", CLOptionDescriptor.ARGUMENT_DISALLOWED, REMOTE_STOP,
"Exit the remote servers at end of test (non-GUI)"),
};
public JMeter() {
}
// Hack to allow automated tests to find when test has ended
//transient boolean testEnded = false;
private JMeter parent;
private Properties remoteProps; // Properties to be sent to remote servers
private boolean remoteStop; // should remote engines be stopped at end of non-GUI test?
/**
* Starts up JMeter in GUI mode
*/
private void startGui(String testFile) {
PluginManager.install(this, true);
JMeterTreeModel treeModel = new JMeterTreeModel();
JMeterTreeListener treeLis = new JMeterTreeListener(treeModel);
treeLis.setActionHandler(ActionRouter.getInstance());
// NOTUSED: GuiPackage guiPack =
GuiPackage.getInstance(treeLis, treeModel);
MainFrame main = new MainFrame(ActionRouter.getInstance(), treeModel, treeLis);
ComponentUtil.centerComponentInWindow(main, 80);
main.setVisible(true);
ActionRouter.getInstance().actionPerformed(new ActionEvent(main, 1, ActionNames.ADD_ALL));
if (testFile != null) {
FileInputStream reader = null;
try {
File f = new File(testFile);
log.info("Loading file: " + f);
FileServer.getFileServer().setBaseForScript(f);
reader = new FileInputStream(f);
HashTree tree = SaveService.loadTree(reader);
GuiPackage.getInstance().setTestPlanFile(f.getAbsolutePath());
Load.insertLoadedTree(1, tree);
} catch (ConversionException e) {
log.error("Failure loading test file", e);
JMeterUtils.reportErrorToUser(SaveService.CEtoString(e));
} catch (Exception e) {
log.error("Failure loading test file", e);
JMeterUtils.reportErrorToUser(e.toString());
} finally {
JOrphanUtils.closeQuietly(reader);
}
} else {
JTree jTree = GuiPackage.getInstance().getMainFrame().getTree();
TreePath path = jTree.getPathForRow(0);
jTree.setSelectionPath(path);
new FocusRequester(jTree);
}
}
/**
* Takes the command line arguments and uses them to determine how to
* startup JMeter.
*
* Called reflectively by {@link NewDriver#main(String[])}
*/
public void start(String[] args) {
CLArgsParser parser = new CLArgsParser(args, options);
String error = parser.getErrorString();
if (error == null){// Check option combinations
boolean gui = parser.getArgumentById(NONGUI_OPT)==null;
boolean nonGuiOnly = parser.getArgumentById(REMOTE_OPT)!=null
|| parser.getArgumentById(REMOTE_OPT_PARAM)!=null
|| parser.getArgumentById(REMOTE_STOP)!=null;
if (gui && nonGuiOnly) {
error = "-r and -R and -X are only valid in non-GUI mode";
}
}
if (null != error) {
System.err.println("Error: " + error);
System.out.println("Usage");
System.out.println(CLUtil.describeOptions(options).toString());
return;
}
try {
initializeProperties(parser); // Also initialises JMeter logging
/*
* The following is needed for HTTPClient.
* (originally tried doing this in HTTPSampler2,
* but it appears that it was done too late when running in GUI mode)
* Set the commons logging default to Avalon Logkit, if not already defined
*/
if (System.getProperty("org.apache.commons.logging.Log") == null) { // $NON-NLS-1$
System.setProperty("org.apache.commons.logging.Log" // $NON-NLS-1$
, "org.apache.commons.logging.impl.LogKitLogger"); // $NON-NLS-1$
}
Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
if (!(e instanceof ThreadDeath)) {
log.error("Uncaught exception: ", e);
System.err.println("Uncaught Exception " + e + ". See log file for details.");
}
}
});
log.info(JMeterUtils.getJMeterCopyright());
log.info("Version " + JMeterUtils.getJMeterVersion());
logProperty("java.version"); //$NON-NLS-1$
logProperty("java.vm.name"); //$NON-NLS-1$
logProperty("os.name"); //$NON-NLS-1$
logProperty("os.arch"); //$NON-NLS-1$
logProperty("os.version"); //$NON-NLS-1$
logProperty("file.encoding"); // $NON-NLS-1$
log.info("Default Locale=" + Locale.getDefault().getDisplayName());
log.info("JMeter Locale=" + JMeterUtils.getLocale().getDisplayName());
log.info("JMeterHome=" + JMeterUtils.getJMeterHome());
logProperty("user.dir"," ="); //$NON-NLS-1$
log.info("PWD ="+new File(".").getCanonicalPath());//$NON-NLS-1$
log.info("IP: "+JMeterUtils.getLocalHostIP()
+" Name: "+JMeterUtils.getLocalHostName()
+" FullName: "+JMeterUtils.getLocalHostFullName());
setProxy(parser);
updateClassLoader();
if (log.isDebugEnabled())
{
String jcp=System.getProperty("java.class.path");// $NON-NLS-1$
String bits[] =jcp.split(File.pathSeparator);
log.debug("ClassPath");
for(String bit : bits){
log.debug(bit);
}
log.debug(jcp);
}
// Set some (hopefully!) useful properties
long now=System.currentTimeMillis();
JMeterUtils.setProperty("START.MS",Long.toString(now));// $NON-NLS-1$
Date today=new Date(now); // so it agrees with above
// TODO perhaps should share code with __time() function for this...
JMeterUtils.setProperty("START.YMD",new SimpleDateFormat("yyyyMMdd").format(today));// $NON-NLS-1$ $NON-NLS-2$
JMeterUtils.setProperty("START.HMS",new SimpleDateFormat("HHmmss").format(today));// $NON-NLS-1$ $NON-NLS-2$
if (parser.getArgumentById(VERSION_OPT) != null) {
System.out.println(JMeterUtils.getJMeterCopyright());
System.out.println("Version " + JMeterUtils.getJMeterVersion());
} else if (parser.getArgumentById(HELP_OPT) != null) {
System.out.println(JMeterUtils.getResourceFileAsText("org/apache/jmeter/help.txt"));// $NON-NLS-1$
} else if (parser.getArgumentById(SERVER_OPT) != null) {
// Start the server
try {
RemoteJMeterEngineImpl.startServer(JMeterUtils.getPropDefault("server_port", 0)); // $NON-NLS-1$
} catch (Exception ex) {
System.err.println("Server failed to start: "+ex);
log.error("Giving up, as server failed with:", ex);
throw ex;
}
startOptionalServers();
} else {
String testFile=null;
CLOption testFileOpt = parser.getArgumentById(TESTFILE_OPT);
if (testFileOpt != null){
testFile = testFileOpt.getArgument();
if (USE_LAST_JMX.equals(testFile)) {
testFile = LoadRecentProject.getRecentFile(0);// most recent
}
}
if (parser.getArgumentById(NONGUI_OPT) == null) {
startGui(testFile);
startOptionalServers();
} else {
CLOption rem=parser.getArgumentById(REMOTE_OPT_PARAM);
if (rem==null) { rem=parser.getArgumentById(REMOTE_OPT); }
CLOption jtl = parser.getArgumentById(LOGFILE_OPT);
String jtlFile = null;
if (jtl != null){
jtlFile=processLAST(jtl.getArgument(), ".jtl"); // $NON-NLS-1$
}
startNonGui(testFile, jtlFile, rem);
startOptionalServers();
}
}
} catch (IllegalUserActionException e) {
System.out.println(e.getMessage());
System.out.println("Incorrect Usage");
System.out.println(CLUtil.describeOptions(options).toString());
} catch (Throwable e) {
if (log != null){
log.fatalError("An error occurred: ",e);
} else {
e.printStackTrace();
}
System.out.println("An error occurred: " + e.getMessage());
System.exit(1); // TODO - could this be return?
}
}
// Update classloader if necessary
private void updateClassLoader() {
updatePath("search_paths",";"); //$NON-NLS-1$//$NON-NLS-2$
updatePath("user.classpath",File.pathSeparator);//$NON-NLS-1$
}
private void updatePath(String property, String sep) {
String userpath= JMeterUtils.getPropDefault(property,"");// $NON-NLS-1$
if (userpath.length() <= 0) { return; }
log.info(property+"="+userpath); //$NON-NLS-1$
StringTokenizer tok = new StringTokenizer(userpath, sep);
while(tok.hasMoreTokens()) {
String path=tok.nextToken();
File f=new File(path);
if (!f.canRead() && !f.isDirectory()) {
log.warn("Can't read "+path);
} else {
log.info("Adding to classpath: "+path);
try {
NewDriver.addPath(path);
} catch (MalformedURLException e) {
log.warn("Error adding: "+path+" "+e.getLocalizedMessage());
}
}
}
}
/**
*
*/
private void startOptionalServers() {
int bshport = JMeterUtils.getPropDefault("beanshell.server.port", 0);// $NON-NLS-1$
String bshfile = JMeterUtils.getPropDefault("beanshell.server.file", "");// $NON-NLS-1$ $NON-NLS-2$
if (bshport > 0) {
log.info("Starting Beanshell server (" + bshport + "," + bshfile + ")");
Runnable t = new BeanShellServer(bshport, bshfile);
t.run();
}
// Should we run a beanshell script on startup?
String bshinit = JMeterUtils.getProperty("beanshell.init.file");// $NON-NLS-1$
if (bshinit != null){
log.info("Run Beanshell on file: "+bshinit);
try {
BeanShellInterpreter bsi = new BeanShellInterpreter();//bshinit,log);
bsi.source(bshinit);
} catch (ClassNotFoundException e) {
log.warn("Could not start Beanshell: "+e.getLocalizedMessage());
} catch (JMeterException e) {
log.warn("Could not process Beanshell file: "+e.getLocalizedMessage());
}
}
int mirrorPort=JMeterUtils.getPropDefault("mirror.server.port", 0);// $NON-NLS-1$
if (mirrorPort > 0){
log.info("Starting Mirror server (" + mirrorPort + ")");
try {
Object instance = ClassTools.construct(
"org.apache.jmeter.protocol.http.control.HttpMirrorControl",// $NON-NLS-1$
mirrorPort);
ClassTools.invoke(instance,"startHttpMirror");
} catch (JMeterException e) {
log.warn("Could not start Mirror server",e);
}
}
}
/**
* Sets a proxy server for the JVM if the command line arguments are
* specified.
*/
private void setProxy(CLArgsParser parser) throws IllegalUserActionException {
if (parser.getArgumentById(PROXY_USERNAME) != null) {
Properties jmeterProps = JMeterUtils.getJMeterProperties();
if (parser.getArgumentById(PROXY_PASSWORD) != null) {
String u, p;
Authenticator.setDefault(new ProxyAuthenticator(u = parser.getArgumentById(PROXY_USERNAME)
.getArgument(), p = parser.getArgumentById(PROXY_PASSWORD).getArgument()));
log.info("Set Proxy login: " + u + "/" + p);
jmeterProps.setProperty(HTTP_PROXY_USER, u);//for Httpclient
jmeterProps.setProperty(HTTP_PROXY_PASS, p);//for Httpclient
} else {
String u;
Authenticator.setDefault(new ProxyAuthenticator(u = parser.getArgumentById(PROXY_USERNAME)
.getArgument(), ""));
log.info("Set Proxy login: " + u);
jmeterProps.setProperty(HTTP_PROXY_USER, u);
}
}
if (parser.getArgumentById(PROXY_HOST) != null && parser.getArgumentById(PROXY_PORT) != null) {
String h = parser.getArgumentById(PROXY_HOST).getArgument();
String p = parser.getArgumentById(PROXY_PORT).getArgument();
System.setProperty("http.proxyHost", h );// $NON-NLS-1$
System.setProperty("https.proxyHost", h);// $NON-NLS-1$
System.setProperty("http.proxyPort", p);// $NON-NLS-1$
System.setProperty("https.proxyPort", p);// $NON-NLS-1$
log.info("Set http[s].proxyHost: " + h + " Port: " + p);
} else if (parser.getArgumentById(PROXY_HOST) != null || parser.getArgumentById(PROXY_PORT) != null) {
throw new IllegalUserActionException(JMeterUtils.getResString("proxy_cl_error"));// $NON-NLS-1$
}
if (parser.getArgumentById(NONPROXY_HOSTS) != null) {
String n = parser.getArgumentById(NONPROXY_HOSTS).getArgument();
System.setProperty("http.nonProxyHosts", n );// $NON-NLS-1$
System.setProperty("https.nonProxyHosts", n );// $NON-NLS-1$
log.info("Set http[s].nonProxyHosts: "+n);
}
}
private void initializeProperties(CLArgsParser parser) {
if (parser.getArgumentById(PROPFILE_OPT) != null) {
JMeterUtils.loadJMeterProperties(parser.getArgumentById(PROPFILE_OPT).getArgument());
} else {
JMeterUtils.loadJMeterProperties(NewDriver.getJMeterDir() + File.separator
+ "bin" + File.separator // $NON-NLS-1$
+ "jmeter.properties");// $NON-NLS-1$
}
if (parser.getArgumentById(JMLOGFILE_OPT) != null){
String jmlogfile=parser.getArgumentById(JMLOGFILE_OPT).getArgument();
jmlogfile = processLAST(jmlogfile, ".log");// $NON-NLS-1$
JMeterUtils.setProperty(LoggingManager.LOG_FILE,jmlogfile);
}
JMeterUtils.initLogging();
JMeterUtils.initLocale();
// Bug 33845 - allow direct override of Home dir
if (parser.getArgumentById(JMETER_HOME_OPT) == null) {
JMeterUtils.setJMeterHome(NewDriver.getJMeterDir());
} else {
JMeterUtils.setJMeterHome(parser.getArgumentById(JMETER_HOME_OPT).getArgument());
}
Properties jmeterProps = JMeterUtils.getJMeterProperties();
remoteProps = new Properties();
// Add local JMeter properties, if the file is found
String userProp = JMeterUtils.getPropDefault("user.properties",""); //$NON-NLS-1$
if (userProp.length() > 0){ //$NON-NLS-1$
FileInputStream fis=null;
try {
File file = JMeterUtils.findFile(userProp);
if (file.canRead()){
log.info("Loading user properties from: "+file.getCanonicalPath());
fis = new FileInputStream(file);
Properties tmp = new Properties();
tmp.load(fis);
jmeterProps.putAll(tmp);
LoggingManager.setLoggingLevels(tmp);//Do what would be done earlier
}
} catch (IOException e) {
log.warn("Error loading user property file: " + userProp, e);
} finally {
JOrphanUtils.closeQuietly(fis);
}
}
// Add local system properties, if the file is found
String sysProp = JMeterUtils.getPropDefault("system.properties",""); //$NON-NLS-1$
if (sysProp.length() > 0){
FileInputStream fis=null;
try {
File file = JMeterUtils.findFile(sysProp);
if (file.canRead()){
log.info("Loading system properties from: "+file.getCanonicalPath());
fis = new FileInputStream(file);
System.getProperties().load(fis);
}
} catch (IOException e) {
log.warn("Error loading system property file: " + sysProp, e);
} finally {
JOrphanUtils.closeQuietly(fis);
}
}
// Process command line property definitions
// These can potentially occur multiple times
List<CLOption> clOptions = parser.getArguments();
int size = clOptions.size();
for (int i = 0; i < size; i++) {
CLOption option = clOptions.get(i);
String name = option.getArgument(0);
String value = option.getArgument(1);
FileInputStream fis = null;
switch (option.getDescriptor().getId()) {
// Should not have any text arguments
case CLOption.TEXT_ARGUMENT:
throw new IllegalArgumentException("Unknown arg: "+option.getArgument());
case PROPFILE2_OPT: // Bug 33920 - allow multiple props
try {
fis = new FileInputStream(new File(name));
Properties tmp = new Properties();
tmp.load(fis);
jmeterProps.putAll(tmp);
LoggingManager.setLoggingLevels(tmp);//Do what would be done earlier
} catch (FileNotFoundException e) {
log.warn("Can't find additional property file: " + name, e);
} catch (IOException e) {
log.warn("Error loading additional property file: " + name, e);
} finally {
JOrphanUtils.closeQuietly(fis);
}
break;
case SYSTEM_PROPFILE:
log.info("Setting System properties from file: " + name);
try {
fis = new FileInputStream(new File(name));
System.getProperties().load(fis);
} catch (IOException e) {
log.warn("Cannot find system property file "+e.getLocalizedMessage());
} finally {
JOrphanUtils.closeQuietly(fis);
}
break;
case SYSTEM_PROPERTY:
if (value.length() > 0) { // Set it
log.info("Setting System property: " + name + "=" + value);
System.getProperties().setProperty(name, value);
} else { // Reset it
log.warn("Removing System property: " + name);
System.getProperties().remove(name);
}
break;
case JMETER_PROPERTY:
if (value.length() > 0) { // Set it
log.info("Setting JMeter property: " + name + "=" + value);
jmeterProps.setProperty(name, value);
} else { // Reset it
log.warn("Removing JMeter property: " + name);
jmeterProps.remove(name);
}
break;
case JMETER_GLOBAL_PROP:
if (value.length() > 0) { // Set it
log.info("Setting Global property: " + name + "=" + value);
remoteProps.setProperty(name, value);
} else {
File propFile = new File(name);
if (propFile.canRead()) {
log.info("Setting Global properties from the file "+name);
try {
fis = new FileInputStream(propFile);
remoteProps.load(fis);
} catch (FileNotFoundException e) {
log.warn("Could not find properties file: "+e.getLocalizedMessage());
} catch (IOException e) {
log.warn("Could not load properties file: "+e.getLocalizedMessage());
} finally {
JOrphanUtils.closeQuietly(fis);
}
}
}
break;
case LOGLEVEL:
if (value.length() > 0) { // Set category
log.info("LogLevel: " + name + "=" + value);
LoggingManager.setPriority(value, name);
} else { // Set root level
log.warn("LogLevel: " + name);
LoggingManager.setPriority(name);
}
break;
case REMOTE_STOP:
remoteStop = true;
break;
default:
// ignored
break;
}
}
String sample_variables = (String) jmeterProps.get(SampleEvent.SAMPLE_VARIABLES);
if (sample_variables != null){
remoteProps.put(SampleEvent.SAMPLE_VARIABLES, sample_variables);
}
}
/*
* Checks for LAST or LASTsuffix.
* Returns the LAST name with .JMX replaced by suffix.
*/
private String processLAST(String jmlogfile, String suffix) {
if (USE_LAST_JMX.equals(jmlogfile) || USE_LAST_JMX.concat(suffix).equals(jmlogfile)){
String last = LoadRecentProject.getRecentFile(0);// most recent
final String JMX_SUFFIX = ".JMX"; // $NON-NLS-1$
if (last.toUpperCase(Locale.ENGLISH).endsWith(JMX_SUFFIX)){
jmlogfile=last.substring(0, last.length() - JMX_SUFFIX.length()).concat(suffix);
}
}
return jmlogfile;
}
private void startNonGui(String testFile, String logFile, CLOption remoteStart)
throws IllegalUserActionException {
// add a system property so samplers can check to see if JMeter
// is running in NonGui mode
System.setProperty(JMETER_NON_GUI, "true");// $NON-NLS-1$
JMeter driver = new JMeter();// TODO - why does it create a new instance?
driver.remoteProps = this.remoteProps;
driver.remoteStop = this.remoteStop;
driver.parent = this;
PluginManager.install(this, false);
String remote_hosts_string = null;
if (remoteStart != null) {
remote_hosts_string = remoteStart.getArgument();
if (remote_hosts_string == null) {
remote_hosts_string = JMeterUtils.getPropDefault(
"remote_hosts", //$NON-NLS-1$
"127.0.0.1");//$NON-NLS-1$
}
}
if (testFile == null) {
throw new IllegalUserActionException("Non-GUI runs require a test plan");
}
driver.runNonGui(testFile, logFile, remoteStart != null, remote_hosts_string);
}
// run test in batch mode
private void runNonGui(String testFile, String logFile, boolean remoteStart, String remote_hosts_string) {
FileInputStream reader = null;
try {
File f = new File(testFile);
if (!f.exists() || !f.isFile()) {
println("Could not open " + testFile);
return;
}
FileServer.getFileServer().setBaseForScript(f);
reader = new FileInputStream(f);
log.info("Loading file: " + f);
HashTree tree = SaveService.loadTree(reader);
@SuppressWarnings("deprecation") // Deliberate use of deprecated ctor
JMeterTreeModel treeModel = new JMeterTreeModel(new Object());// Create non-GUI version to avoid headless problems
JMeterTreeNode root = (JMeterTreeNode) treeModel.getRoot();
treeModel.addSubTree(tree, root);
// Hack to resolve ModuleControllers in non GUI mode
SearchByClass<ReplaceableController> replaceableControllers = new SearchByClass<ReplaceableController>(ReplaceableController.class);
tree.traverse(replaceableControllers);
Collection<ReplaceableController> replaceableControllersRes = replaceableControllers.getSearchResults();
for (Iterator<ReplaceableController> iter = replaceableControllersRes.iterator(); iter.hasNext();) {
ReplaceableController replaceableController = iter.next();
replaceableController.resolveReplacementSubTree(root);
}
// Remove the disabled items
// For GUI runs this is done in Start.java
convertSubTree(tree);
Summariser summer = null;
String summariserName = JMeterUtils.getPropDefault("summariser.name", "");//$NON-NLS-1$
if (summariserName.length() > 0) {
log.info("Creating summariser <" + summariserName + ">");
println("Creating summariser <" + summariserName + ">");
summer = new Summariser(summariserName);
}
if (logFile != null) {
ResultCollector logger = new ResultCollector(summer);
logger.setFilename(logFile);
tree.add(tree.getArray()[0], logger);
}
else {
// only add Summariser if it can not be shared with the ResultCollector
if (summer != null) {
tree.add(tree.getArray()[0], summer);
}
}
List<JMeterEngine> engines = new LinkedList<JMeterEngine>();
tree.add(tree.getArray()[0], new ListenToTest(parent, (remoteStart && remoteStop) ? engines : null));
println("Created the tree successfully using "+testFile);
if (!remoteStart) {
JMeterEngine engine = new StandardJMeterEngine();
engine.configure(tree);
long now=System.currentTimeMillis();
println("Starting the test @ "+new Date(now)+" ("+now+")");
engine.runTest();
engines.add(engine);
} else {
java.util.StringTokenizer st = new java.util.StringTokenizer(remote_hosts_string, ",");//$NON-NLS-1$
while (st.hasMoreElements()) {
String el = (String) st.nextElement();
println("Configuring remote engine for " + el);
log.info("Configuring remote engine for " + el);
JMeterEngine eng = doRemoteInit(el.trim(), tree);
if (null != eng) {
engines.add(eng);
} else {
println("Failed to configure "+el);
}
}
if (engines.isEmpty()) {
println("No remote engines were started.");
return;
}
println("Starting remote engines");
log.info("Starting remote engines");
long now=System.currentTimeMillis();
println("Starting the test @ "+new Date(now)+" ("+now+")");
for (JMeterEngine engine : engines) {
engine.runTest();
}
println("Remote engines have been started");
log.info("Remote engines have been started");
}
startUdpDdaemon(engines);
} catch (Exception e) {
System.out.println("Error in NonGUIDriver " + e.toString());
log.error("Error in NonGUIDriver", e);
} finally {
JOrphanUtils.closeQuietly(reader);
}
}
/**
* Refactored from AbstractAction.java
*
* @param tree
*/
public static void convertSubTree(HashTree tree) {
LinkedList<Object> copyList = new LinkedList<Object>(tree.list());
for (Object o : copyList) {
if (o instanceof TestElement) {
TestElement item = (TestElement) o;
if (item.isEnabled()) {
if (item instanceof ReplaceableController) {
ReplaceableController rc;
// TODO this bit of code needs to be tidied up
// Unfortunately ModuleController is in components, not core
if (item.getClass().getName().equals("org.apache.jmeter.control.ModuleController")){ // Bug 47165
rc = (ReplaceableController) item;
} else {
// HACK: force the controller to load its tree
rc = (ReplaceableController) item.clone();
}
HashTree subTree = tree.getTree(item);
if (subTree != null) {
HashTree replacementTree = rc.getReplacementSubTree();
if (replacementTree != null) {
convertSubTree(replacementTree);
tree.replace(item, rc);
tree.set(rc, replacementTree);
}
} else { // null subTree
convertSubTree(tree.getTree(item));
}
} else { // not Replaceable Controller
convertSubTree(tree.getTree(item));
}
} else { // Not enabled
tree.remove(item);
}
} else { // Not a TestElement
JMeterTreeNode item = (JMeterTreeNode) o;
if (item.isEnabled()) {
// Replacement only needs to occur when starting the engine
// @see StandardJMeterEngine.run()
if (item.getUserObject() instanceof ReplaceableController) {
ReplaceableController rc =
(ReplaceableController) item.getTestElement();
HashTree subTree = tree.getTree(item);
if (subTree != null) {
HashTree replacementTree = rc.getReplacementSubTree();
if (replacementTree != null) {
convertSubTree(replacementTree);
tree.replace(item, rc);
tree.set(rc, replacementTree);
}
}
} else { // Not a ReplaceableController
convertSubTree(tree.getTree(item));
TestElement testElement = item.getTestElement();
tree.replace(item, testElement);
}
} else { // Not enabled
tree.remove(item);
}
}
}
}
private JMeterEngine doRemoteInit(String hostName, HashTree testTree) {
JMeterEngine engine = null;
try {
engine = new ClientJMeterEngine(hostName);
} catch (Exception e) {
log.fatalError("Failure connecting to remote host: "+hostName, e);
System.err.println("Failure connecting to remote host: "+hostName+" "+e);
return null;
}
engine.configure(testTree);
if (!remoteProps.isEmpty()) {
engine.setProperties(remoteProps);
}
return engine;
}
/*
* Listen to test and handle tidyup after non-GUI test completes.
* If running a remote test, then after waiting a few seconds for listeners to finish files,
* it calls ClientJMeterEngine.tidyRMI() to deal with the Naming Timer Thread.
*/
private static class ListenToTest implements TestStateListener, Runnable, Remoteable {
private AtomicInteger started = new AtomicInteger(0); // keep track of remote tests
//NOT YET USED private JMeter _parent;
private final List<JMeterEngine> engines;
public ListenToTest(JMeter parent, List<JMeterEngine> engines) {
//_parent = parent;
this.engines=engines;
}
public void testEnded(String host) {
long now=System.currentTimeMillis();
log.info("Finished remote host: " + host + " ("+now+")");
if (started.decrementAndGet() <= 0) {
Thread stopSoon = new Thread(this);
stopSoon.start();
}
}
public void testEnded() {
long now = System.currentTimeMillis();
println("Tidying up ... @ "+new Date(now)+" ("+now+")");
println("... end of run");
checkForRemainingThreads();
}
public void testStarted(String host) {
started.incrementAndGet();
long now=System.currentTimeMillis();
log.info("Started remote host: " + host + " ("+now+")");
}
public void testStarted() {
long now=System.currentTimeMillis();
log.info(JMeterUtils.getResString("running_test")+" ("+now+")");//$NON-NLS-1$
}
/**
* This is a hack to allow listeners a chance to close their files. Must
* implement a queue for sample responses tied to the engine, and the
* engine won't deliver testEnded signal till all sample responses have
* been delivered. Should also improve performance of remote JMeter
* testing.
*/
public void run() {
long now = System.currentTimeMillis();
println("Tidying up remote @ "+new Date(now)+" ("+now+")");
if (engines!=null){ // it will be null unless remoteStop = true
println("Exitting remote servers");
for (JMeterEngine e : engines){
e.exit();
}
}
try {
Thread.sleep(5000); // Allow listeners to close files
} catch (InterruptedException ignored) {
}
ClientJMeterEngine.tidyRMI(log);
println("... end of run");
checkForRemainingThreads();
}
/**
* Runs daemon thread which waits a short while;
* if JVM does not exit, lists remaining non-daemon threads on stdout.
*/
private void checkForRemainingThreads() {
// This cannot be a JMeter class variable, because properties
// are not initialised until later.
final int REMAIN_THREAD_PAUSE =
JMeterUtils.getPropDefault("jmeter.exit.check.pause", 2000); // $NON-NLS-1$
if (REMAIN_THREAD_PAUSE > 0) {
Thread daemon = new Thread(){
@Override
public void run(){
try {
Thread.sleep(REMAIN_THREAD_PAUSE); // Allow enough time for JVM to exit
} catch (InterruptedException ignored) {
}
// This is a daemon thread, which should only reach here if there are other
// non-daemon threads still active
System.out.println("The JVM should have exitted but did not.");
System.out.println("The following non-daemon threads are still running (DestroyJavaVM is OK):");
JOrphanUtils.displayThreads(false);
}
};
daemon.setDaemon(true);
daemon.start();
}
}
}
private static void println(String str) {
System.out.println(str);
}
private static final String[][] DEFAULT_ICONS = {
{ "org.apache.jmeter.control.gui.TestPlanGui", "org/apache/jmeter/images/beaker.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.timers.gui.AbstractTimerGui", "org/apache/jmeter/images/timer.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.threads.gui.ThreadGroupGui", "org/apache/jmeter/images/thread.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.visualizers.gui.AbstractListenerGui", "org/apache/jmeter/images/meter.png" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.config.gui.AbstractConfigGui", "org/apache/jmeter/images/testtubes.png" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.processor.gui.AbstractPreProcessorGui", "org/apache/jmeter/images/leafnode.gif"}, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.processor.gui.AbstractPostProcessorGui","org/apache/jmeter/images/leafnodeflip.gif"},//$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.control.gui.AbstractControllerGui", "org/apache/jmeter/images/knob.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.control.gui.WorkBenchGui", "org/apache/jmeter/images/clipboard.gif" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.samplers.gui.AbstractSamplerGui", "org/apache/jmeter/images/pipet.png" }, //$NON-NLS-1$ $NON-NLS-2$
{ "org.apache.jmeter.assertions.gui.AbstractAssertionGui", "org/apache/jmeter/images/question.gif"} //$NON-NLS-1$ $NON-NLS-2$
};
public String[][] getIconMappings() {
final String defaultIconProp = "org/apache/jmeter/images/icon.properties"; //$NON-NLS-1$
String iconProp = JMeterUtils.getPropDefault("jmeter.icons", defaultIconProp);//$NON-NLS-1$
Properties p = JMeterUtils.loadProperties(iconProp);
if (p == null && !iconProp.equals(defaultIconProp)) {
log.info(iconProp + " not found - using " + defaultIconProp);
iconProp = defaultIconProp;
p = JMeterUtils.loadProperties(iconProp);
}
if (p == null) {
log.info(iconProp + " not found - using inbuilt icon set");
return DEFAULT_ICONS;
}
log.info("Loaded icon properties from " + iconProp);
String[][] iconlist = new String[p.size()][3];
Enumeration<?> pe = p.keys();
int i = 0;
while (pe.hasMoreElements()) {
String key = (String) pe.nextElement();
String icons[] = JOrphanUtils.split(p.getProperty(key), " ");//$NON-NLS-1$
iconlist[i][0] = key;
iconlist[i][1] = icons[0];
if (icons.length > 1) {
iconlist[i][2] = icons[1];
}
i++;
}
return iconlist;
}
public String[][] getResourceBundles() {
return new String[0][];
}
/**
* Check if JMeter is running in non-GUI mode.
*
* @return true if JMeter is running in non-GUI mode.
*/
public static boolean isNonGUI(){
return "true".equals(System.getProperty(JMeter.JMETER_NON_GUI)); //$NON-NLS-1$
}
private void logProperty(String prop){
log.info(prop+"="+System.getProperty(prop));//$NON-NLS-1$
}
private void logProperty(String prop,String separator){
log.info(prop+separator+System.getProperty(prop));//$NON-NLS-1$
}
private static void startUdpDdaemon(final List<JMeterEngine> engines) {
int port = JMeterUtils.getPropDefault("jmeterengine.nongui.port", UDP_PORT_DEFAULT); // $NON-NLS-1$
int maxPort = JMeterUtils.getPropDefault("jmeterengine.nongui.maxport", 4455); // $NON-NLS-1$
if (port > 1000){
final DatagramSocket socket = getSocket(port, maxPort);
if (socket != null) {
Thread waiter = new Thread("UDP Listener"){
@Override
public void run() {
waitForSignals(engines, socket);
}
};
waiter.setDaemon(true);
waiter.start();
} else {
System.out.println("Failed to create UDP port");
}
}
}
private static void waitForSignals(final List<JMeterEngine> engines, DatagramSocket socket) {
byte[] buf = new byte[80];
System.out.println("Waiting for possible shutdown message on port "+socket.getLocalPort());
DatagramPacket request = new DatagramPacket(buf, buf.length);
try {
while(true) {
socket.receive(request);
InetAddress address = request.getAddress();
// Only accept commands from the local host
if (address.isLoopbackAddress()){
String command = new String(request.getData(), request.getOffset(), request.getLength(),"ASCII");
System.out.println("Command: "+command+" received from "+address);
log.info("Command: "+command+" received from "+address);
if (command.equals("StopTestNow")){
for(JMeterEngine engine : engines) {
engine.stopTest(true);
}
} else if (command.equals("Shutdown")) {
for(JMeterEngine engine : engines) {
engine.stopTest(false);
}
} else if (command.equals("HeapDump")) {
HeapDumper.dumpHeap();
} else {
System.out.println("Command: "+command+" not recognised ");
}
}
}
} catch (Exception e) {
System.out.println(e);
} finally {
socket.close();
}
}
private static DatagramSocket getSocket(int udpPort, int udpPortMax) {
DatagramSocket socket = null;
int i = udpPort;
while (i<= udpPortMax) {
try {
socket = new DatagramSocket(i);
break;
} catch (SocketException e) {
i++;
}
}
return socket;
}
} | log cannot be null
git-svn-id: 5ccfe34f605a6c2f9041ff2965ab60012c62539a@1379969 13f79535-47bb-0310-9956-ffa450edef68
| src/core/org/apache/jmeter/JMeter.java | log cannot be null |
|
Java | apache-2.0 | 40e9efc0a4a9543a9c480cb5460905c20f7671e6 | 0 | wbond/subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion | package org.tigris.subversion.lib;
/**
* ====================================================================
* Copyright (c) 2000-2001 CollabNet. All rights reserved.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at http://subversion.tigris.org/license-1.html.
* If newer versions of this license are posted there, you may use a
* newer version instead, at your option.
*
* This software consists of voluntary contributions made by many
* individuals. For exact contribution history, see the revision
* history and logs, available at http://subversion.tigris.org/.
* ====================================================================
**/
import java.util.Date;
import java.util.Hashtable;
public class Entry {
public final static int SCHEDULE_NORMAL=0;
public final static int SCHEDULE_ADD=1;
public final static int SCHEDULE_DELETE=2;
public final static int SCHEDULE_REPLACE=3;
public final static int SCHEDULE_UNADD=4;
public final static int SCHEDULE_UNDELETE=5;
public final static int NODEKIND_NONE = 0;
public final static int NODEKIND_FILE = 1;
public final static int NODEKIND_DIR = 2;
public final static int NODEKIND_UNKNOWN = 3;
private long revision = 0;
private String url = null;
private int nodekind = NODEKIND_NONE;
private int schedule = SCHEDULE_NORMAL;
private boolean conflicted = false;
private boolean copied = false;
private Date texttime = null;
private Date proptime = null;
private Hashtable attributes = new Hashtable();
public Entry()
{
super();
}
public void setRevision(long _revision)
{
revision = _revision;
}
public long getRevision()
{
return revision;
}
public void setUrl(String _url)
{
url = _url;
}
public String getUrl()
{
return url;
}
public void setNodekind(int _nodekind)
{
nodekind = _nodekind;
}
public int getNodekind()
{
return nodekind;
}
public void setSchedule(int _schedule)
{
schedule = _schedule;
}
public int getSchedule()
{
return schedule;
}
public void setConflicted(boolean _conflicted)
{
conflicted = _conflicted;
}
public boolean getConflicted()
{
return conflicted;
}
public void setCopied(boolean _copied)
{
copied = _copied;
}
public boolean getCopied()
{
return copied;
}
public void setTexttime(Date _texttime)
{
texttime = _texttime;
}
public Date getTexttime()
{
return texttime;
}
public void setProptime(Date _proptime)
{
proptime = _proptime;
}
public Date getProptime()
{
return proptime;
}
public void setAttributes(Hashtable _attributes)
{
attributes = _attributes;
}
public Hashtable getAttributes()
{
return attributes;
}
}
/*
* local variables:
* eval: (load-file "../../../../../../../svn-dev.el")
* end:
*/
| subversion/bindings/java/jni/org/tigris/subversion/lib/Entry.java | package org.tigris.subversion.lib;
/**
* ====================================================================
* Copyright (c) 2000-2001 CollabNet. All rights reserved.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at http://subversion.tigris.org/license-1.html.
* If newer versions of this license are posted there, you may use a
* newer version instead, at your option.
*
* This software consists of voluntary contributions made by many
* individuals. For exact contribution history, see the revision
* history and logs, available at http://subversion.tigris.org/.
* ====================================================================
**/
import java.util.Date;
import java.util.Hashtable;
public class Entry {
public final static int SCHEDULE_NORMAL=0;
public final static int SCHEDULE_ADD=1;
public final static int SCHEDULE_DELETE=2;
public final static int SCHEDULE_REPLACE=3;
public final static int SCHEDULE_UNADD=4;
public final static int SCHEDULE_UNDELETE=5;
public final static int NODEKIND_NONE = 0;
public final static int NODEKIND_FILE = 1;
public final static int NODEKIND_DIR = 2;
public final static int NODEKIND_UNKNOWN = 3;
private long revision = 0;
private String url = null;
private int nodekind = NODEKIND_NONE;
private int schedule = SCHEDULE_NORMAL;
private boolean conflicted = false;
private boolean copied = false;
private Date texttime = null;
private Date proptime = null;
private Hashtable attributes = new Hashtable();
public Entry()
{
}
public void setRevision(long _revision)
{
revision = _revision;
}
public long getRevision()
{
return revision;
}
public void setUrl(String _url)
{
url = _url;
}
public String getUrl()
{
return url;
}
public void setNodekind(int _nodekind)
{
nodekind = _nodekind;
}
public int getNodekind()
{
return nodekind;
}
public void setSchedule(int _schedule)
{
schedule = _schedule;
}
public int getSchedule()
{
return schedule;
}
public void setConflicted(boolean _conflicted)
{
conflicted = _conflicted;
}
public boolean getConflicted()
{
return conflicted;
}
public void setCopied(boolean _copied)
{
copied = _copied;
}
public boolean getCopied()
{
return copied;
}
public void setTexttime(Date _texttime)
{
texttime = _texttime;
}
public Date getTexttime()
{
return texttime;
}
public void setProptime(Date _proptime)
{
proptime = _proptime;
}
public Date getProptime()
{
return proptime;
}
public void setAttributes(Hashtable _attributes)
{
attributes = _attributes;
}
public Hashtable getAttributes()
{
return attributes;
}
}
/*
* local variables:
* eval: (load-file "../../../../../../../svn-dev.el")
* end:
*/
| Added call to superclass.
* org/tigris/subversion/lib/Entry.java(Entry)
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@841021 13f79535-47bb-0310-9956-ffa450edef68
| subversion/bindings/java/jni/org/tigris/subversion/lib/Entry.java | Added call to superclass. |
|
Java | apache-2.0 | 5f47fe4c5a0519e93f7758121e234c74f6039f4b | 0 | MSG134/IVCT_Framework,MSG134/IVCT,MSG134/IVCT_Framework,MSG134/IVCT_Framework | package de.fraunhofer.iosb.tc_lib;
import hla.rti1516e.AttributeHandle;
import hla.rti1516e.AttributeHandleFactory;
import hla.rti1516e.AttributeHandleSet;
import hla.rti1516e.AttributeHandleSetFactory;
import hla.rti1516e.AttributeHandleValueMap;
import hla.rti1516e.AttributeHandleValueMapFactory;
import hla.rti1516e.AttributeSetRegionSetPairList;
import hla.rti1516e.AttributeSetRegionSetPairListFactory;
import hla.rti1516e.CallbackModel;
import hla.rti1516e.DimensionHandle;
import hla.rti1516e.DimensionHandleFactory;
import hla.rti1516e.DimensionHandleSet;
import hla.rti1516e.DimensionHandleSetFactory;
import hla.rti1516e.FederateAmbassador;
import hla.rti1516e.FederateHandle;
import hla.rti1516e.FederateHandleFactory;
import hla.rti1516e.FederateHandleSet;
import hla.rti1516e.FederateHandleSetFactory;
import hla.rti1516e.InteractionClassHandle;
import hla.rti1516e.InteractionClassHandleFactory;
import hla.rti1516e.LogicalTime;
import hla.rti1516e.LogicalTimeFactory;
import hla.rti1516e.LogicalTimeInterval;
import hla.rti1516e.MessageRetractionHandle;
import hla.rti1516e.MessageRetractionReturn;
import hla.rti1516e.ObjectClassHandle;
import hla.rti1516e.ObjectClassHandleFactory;
import hla.rti1516e.ObjectInstanceHandle;
import hla.rti1516e.ObjectInstanceHandleFactory;
import hla.rti1516e.OrderType;
import hla.rti1516e.ParameterHandle;
import hla.rti1516e.ParameterHandleFactory;
import hla.rti1516e.ParameterHandleValueMap;
import hla.rti1516e.ParameterHandleValueMapFactory;
import hla.rti1516e.RTIambassador;
import hla.rti1516e.RangeBounds;
import hla.rti1516e.RegionHandle;
import hla.rti1516e.RegionHandleSet;
import hla.rti1516e.RegionHandleSetFactory;
import hla.rti1516e.ResignAction;
import hla.rti1516e.ServiceGroup;
import hla.rti1516e.TimeQueryReturn;
import hla.rti1516e.TransportationTypeHandle;
import hla.rti1516e.TransportationTypeHandleFactory;
import hla.rti1516e.encoding.EncoderFactory;
import hla.rti1516e.exceptions.AlreadyConnected;
import hla.rti1516e.exceptions.AsynchronousDeliveryAlreadyDisabled;
import hla.rti1516e.exceptions.AsynchronousDeliveryAlreadyEnabled;
import hla.rti1516e.exceptions.AttributeAcquisitionWasNotRequested;
import hla.rti1516e.exceptions.AttributeAlreadyBeingAcquired;
import hla.rti1516e.exceptions.AttributeAlreadyBeingChanged;
import hla.rti1516e.exceptions.AttributeAlreadyBeingDivested;
import hla.rti1516e.exceptions.AttributeAlreadyOwned;
import hla.rti1516e.exceptions.AttributeDivestitureWasNotRequested;
import hla.rti1516e.exceptions.AttributeNotDefined;
import hla.rti1516e.exceptions.AttributeNotOwned;
import hla.rti1516e.exceptions.AttributeNotPublished;
import hla.rti1516e.exceptions.AttributeRelevanceAdvisorySwitchIsOff;
import hla.rti1516e.exceptions.AttributeRelevanceAdvisorySwitchIsOn;
import hla.rti1516e.exceptions.AttributeScopeAdvisorySwitchIsOff;
import hla.rti1516e.exceptions.AttributeScopeAdvisorySwitchIsOn;
import hla.rti1516e.exceptions.CallNotAllowedFromWithinCallback;
import hla.rti1516e.exceptions.ConnectionFailed;
import hla.rti1516e.exceptions.CouldNotCreateLogicalTimeFactory;
import hla.rti1516e.exceptions.CouldNotOpenFDD;
import hla.rti1516e.exceptions.CouldNotOpenMIM;
import hla.rti1516e.exceptions.DeletePrivilegeNotHeld;
import hla.rti1516e.exceptions.DesignatorIsHLAstandardMIM;
import hla.rti1516e.exceptions.ErrorReadingFDD;
import hla.rti1516e.exceptions.ErrorReadingMIM;
import hla.rti1516e.exceptions.FederateAlreadyExecutionMember;
import hla.rti1516e.exceptions.FederateHandleNotKnown;
import hla.rti1516e.exceptions.FederateHasNotBegunSave;
import hla.rti1516e.exceptions.FederateIsExecutionMember;
import hla.rti1516e.exceptions.FederateNameAlreadyInUse;
import hla.rti1516e.exceptions.FederateNotExecutionMember;
import hla.rti1516e.exceptions.FederateOwnsAttributes;
import hla.rti1516e.exceptions.FederateServiceInvocationsAreBeingReportedViaMOM;
import hla.rti1516e.exceptions.FederateUnableToUseTime;
import hla.rti1516e.exceptions.FederatesCurrentlyJoined;
import hla.rti1516e.exceptions.FederationExecutionAlreadyExists;
import hla.rti1516e.exceptions.FederationExecutionDoesNotExist;
import hla.rti1516e.exceptions.IllegalName;
import hla.rti1516e.exceptions.InTimeAdvancingState;
import hla.rti1516e.exceptions.InconsistentFDD;
import hla.rti1516e.exceptions.InteractionClassAlreadyBeingChanged;
import hla.rti1516e.exceptions.InteractionClassNotDefined;
import hla.rti1516e.exceptions.InteractionClassNotPublished;
import hla.rti1516e.exceptions.InteractionParameterNotDefined;
import hla.rti1516e.exceptions.InteractionRelevanceAdvisorySwitchIsOff;
import hla.rti1516e.exceptions.InteractionRelevanceAdvisorySwitchIsOn;
import hla.rti1516e.exceptions.InvalidAttributeHandle;
import hla.rti1516e.exceptions.InvalidDimensionHandle;
import hla.rti1516e.exceptions.InvalidFederateHandle;
import hla.rti1516e.exceptions.InvalidInteractionClassHandle;
import hla.rti1516e.exceptions.InvalidLocalSettingsDesignator;
import hla.rti1516e.exceptions.InvalidLogicalTime;
import hla.rti1516e.exceptions.InvalidLookahead;
import hla.rti1516e.exceptions.InvalidMessageRetractionHandle;
import hla.rti1516e.exceptions.InvalidObjectClassHandle;
import hla.rti1516e.exceptions.InvalidOrderName;
import hla.rti1516e.exceptions.InvalidOrderType;
import hla.rti1516e.exceptions.InvalidParameterHandle;
import hla.rti1516e.exceptions.InvalidRangeBound;
import hla.rti1516e.exceptions.InvalidRegion;
import hla.rti1516e.exceptions.InvalidRegionContext;
import hla.rti1516e.exceptions.InvalidResignAction;
import hla.rti1516e.exceptions.InvalidServiceGroup;
import hla.rti1516e.exceptions.InvalidTransportationName;
import hla.rti1516e.exceptions.InvalidTransportationType;
import hla.rti1516e.exceptions.InvalidUpdateRateDesignator;
import hla.rti1516e.exceptions.LogicalTimeAlreadyPassed;
import hla.rti1516e.exceptions.MessageCanNoLongerBeRetracted;
import hla.rti1516e.exceptions.NameNotFound;
import hla.rti1516e.exceptions.NameSetWasEmpty;
import hla.rti1516e.exceptions.NoAcquisitionPending;
import hla.rti1516e.exceptions.NotConnected;
import hla.rti1516e.exceptions.ObjectClassNotDefined;
import hla.rti1516e.exceptions.ObjectClassNotPublished;
import hla.rti1516e.exceptions.ObjectClassRelevanceAdvisorySwitchIsOff;
import hla.rti1516e.exceptions.ObjectClassRelevanceAdvisorySwitchIsOn;
import hla.rti1516e.exceptions.ObjectInstanceNameInUse;
import hla.rti1516e.exceptions.ObjectInstanceNameNotReserved;
import hla.rti1516e.exceptions.ObjectInstanceNotKnown;
import hla.rti1516e.exceptions.OwnershipAcquisitionPending;
import hla.rti1516e.exceptions.RTIinternalError;
import hla.rti1516e.exceptions.RegionDoesNotContainSpecifiedDimension;
import hla.rti1516e.exceptions.RegionInUseForUpdateOrSubscription;
import hla.rti1516e.exceptions.RegionNotCreatedByThisFederate;
import hla.rti1516e.exceptions.RequestForTimeConstrainedPending;
import hla.rti1516e.exceptions.RequestForTimeRegulationPending;
import hla.rti1516e.exceptions.RestoreInProgress;
import hla.rti1516e.exceptions.RestoreNotInProgress;
import hla.rti1516e.exceptions.RestoreNotRequested;
import hla.rti1516e.exceptions.SaveInProgress;
import hla.rti1516e.exceptions.SaveNotInProgress;
import hla.rti1516e.exceptions.SaveNotInitiated;
import hla.rti1516e.exceptions.SynchronizationPointLabelNotAnnounced;
import hla.rti1516e.exceptions.TimeConstrainedAlreadyEnabled;
import hla.rti1516e.exceptions.TimeConstrainedIsNotEnabled;
import hla.rti1516e.exceptions.TimeRegulationAlreadyEnabled;
import hla.rti1516e.exceptions.TimeRegulationIsNotEnabled;
import hla.rti1516e.exceptions.UnsupportedCallbackModel;
import java.net.URL;
import java.util.Arrays;
import java.util.Set;
import org.slf4j.Logger;
/**
* Provide functions to give added-value rti calls e.g. add logging to each rti
* call e.g. combine connect, create and join within one call e.g. combine
* resign and destroy within one call
*
* @author Johannes Mulder
*/
public class IVCT_RTIambassador implements RTIambassador {
private RTIambassador _rtiAmbassador;
private EncoderFactory encoderFactory;
private Logger logger;
/**
* @param theRTIAmbassador reference to the rti ambassador
* @param encoderFactory encoder factory
* @param logger reference to the logger
*/
public IVCT_RTIambassador(final RTIambassador theRTIAmbassador, final EncoderFactory encoderFactory, final Logger logger) {
this._rtiAmbassador = theRTIAmbassador;
this.encoderFactory = encoderFactory;
this.logger = logger;
}
/**
* @return the encoder factory
*/
public EncoderFactory getEncoderFactory() {
return this.encoderFactory;
}
// 4.2
/**
* @param federateReference a reference to a user programmed callback
* @param callbackModel the type of callback
* @param localSettingsDesignator the settings for the rti
*/
public void connect(final FederateAmbassador federateReference, final CallbackModel callbackModel, final String localSettingsDesignator) throws ConnectionFailed, InvalidLocalSettingsDesignator, UnsupportedCallbackModel, AlreadyConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("connect federateReference=" + federateReference.toString() + ", callbackModel=" + callbackModel.toString() + ", localSettingsDesignator=" + localSettingsDesignator);
try {
this._rtiAmbassador.connect(federateReference, callbackModel, localSettingsDesignator);
} catch (ConnectionFailed | InvalidLocalSettingsDesignator | UnsupportedCallbackModel | AlreadyConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("connect exception=" + e.getMessage());
throw e;
}
}
// 4.2
/**
* @param federateReference a reference to a user programmed callback
* @param callbackModel the type of callback
*/
public void connect(final FederateAmbassador federateReference, final CallbackModel callbackModel) throws ConnectionFailed, InvalidLocalSettingsDesignator, UnsupportedCallbackModel, AlreadyConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("connect federateReference=" + federateReference.toString() + ", callbackModel=" + callbackModel.toString());
try {
this._rtiAmbassador.connect(federateReference, callbackModel);
} catch (ConnectionFailed | InvalidLocalSettingsDesignator | UnsupportedCallbackModel | AlreadyConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("connect exception=" + e.getMessage());
throw e;
}
}
// 4.3
/**
* @throws FederateIsExecutionMember federate is still joined
* @throws CallNotAllowedFromWithinCallback a callback is in process
* @throws RTIinternalError some rti internal error
*/
public void disconnect() throws FederateIsExecutionMember, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("disconnect");
try {
this._rtiAmbassador.disconnect();
} catch (FederateIsExecutionMember | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("disconnect exception=" + e.getMessage());
throw e;
}
}
//4.5
/**
* @param federationExecutionName federation execution name
* @param fomModules fom modules
* @param mimModule mim module
* @param logicalTimeImplementationName logical time implementation name
* @throws CouldNotCreateLogicalTimeFactory could not create logical time factory
* @throws InconsistentFDD inconsistent fdd
* @throws ErrorReadingFDD error reading fdd
* @throws CouldNotOpenFDD could not open fdd
* @throws ErrorReadingMIM error reading mim
* @throws CouldNotOpenMIM could not open mim
* @throws DesignatorIsHLAstandardMIM designator is hla standard mim
* @throws FederationExecutionAlreadyExists federation already exists
* @throws NotConnected not connected
* @throws RTIinternalError rti internal error
*/
public void createFederationExecution(final String federationExecutionName, final URL[] fomModules, final URL mimModule, final String logicalTimeImplementationName) throws CouldNotCreateLogicalTimeFactory, InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, ErrorReadingMIM, CouldNotOpenMIM, DesignatorIsHLAstandardMIM, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution federationExecutionName=" + federationExecutionName + ", fomModules=" + Arrays.toString(fomModules) + ", mimModule=" + mimModule.toString() + ", logicalTimeImplementationName=" + logicalTimeImplementationName);
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModules, mimModule, logicalTimeImplementationName);
} catch (CouldNotCreateLogicalTimeFactory | InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | ErrorReadingMIM | CouldNotOpenMIM | DesignatorIsHLAstandardMIM | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.5
/**
* @param federationExecutionName federation execution name
* @param fomModules fom modules
* @param logicalTimeImplementationName logical time implementation name
* @throws CouldNotCreateLogicalTimeFactory could not createLogical time factory
* @throws InconsistentFDD inconsistent fdd
* @throws ErrorReadingFDD error reading fdd
* @throws CouldNotOpenFDD could not open fdd
* @throws FederationExecutionAlreadyExists federation execution already exists
* @throws NotConnected not connected
* @throws RTIinternalError rti internal error
*/
public void createFederationExecution(final String federationExecutionName, final URL[] fomModules, final String logicalTimeImplementationName) throws CouldNotCreateLogicalTimeFactory, InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution federationExecutionName=" + federationExecutionName + ", fomModules=" + Arrays.toString(fomModules) + ", logicalTimeImplementationName=" + logicalTimeImplementationName);
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModules, logicalTimeImplementationName);
} catch (CouldNotCreateLogicalTimeFactory | InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.5
public void createFederationExecution(final String federationExecutionName, final URL[] fomModules, final URL mimModule) throws InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, ErrorReadingMIM, CouldNotOpenMIM, DesignatorIsHLAstandardMIM, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution federationExecutionName=" + federationExecutionName + ", fomModules=" + Arrays.toString(fomModules) + ", mimModule=" + mimModule.toString());
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModules, mimModule);
} catch (InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | ErrorReadingMIM | CouldNotOpenMIM | DesignatorIsHLAstandardMIM | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.5
public void createFederationExecution(final String federationExecutionName, final URL[] fomModules) throws InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution federationExecutionName=" + federationExecutionName + ", fomModules=" + Arrays.toString(fomModules));
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModules);
} catch (InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.5
public void createFederationExecution(final String federationExecutionName, final URL fomModule) throws InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution federationExecutionName=" + federationExecutionName + ", fomModule=" + fomModule.toString());
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModule);
} catch (InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.6
public void destroyFederationExecution(final String federationExecutionName) throws FederatesCurrentlyJoined, FederationExecutionDoesNotExist, NotConnected, RTIinternalError {
this.logger.info("destroyFederationExecution federationExecutionName=" + federationExecutionName);
try {
this._rtiAmbassador.destroyFederationExecution(federationExecutionName);
} catch (FederatesCurrentlyJoined | FederationExecutionDoesNotExist | NotConnected | RTIinternalError e) {
this.logger.error("destroyFederationExecution exception=" + e.getMessage());
throw e;
}
}
// 4.7
public void listFederationExecutions() throws NotConnected, RTIinternalError {
this.logger.info("listFederationExecutions");
try {
this._rtiAmbassador.listFederationExecutions();
} catch (NotConnected | RTIinternalError e) {
this.logger.error("listFederationExecutions exception=" + e.getMessage());
throw e;
}
}
//4.9
public FederateHandle joinFederationExecution(final String federateName, final String federateType, final String federationExecutionName, final URL[] additionalFomModules) throws CouldNotCreateLogicalTimeFactory, FederateNameAlreadyInUse, FederationExecutionDoesNotExist, InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, SaveInProgress, RestoreInProgress, FederateAlreadyExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("joinFederationExecution federateName=" + federateName + ", federateType=" + federateType + ", federationExecutionName=" + federationExecutionName + ", additionalFomModules=" + Arrays.toString(additionalFomModules));
try {
FederateHandle myFederateHandle;
myFederateHandle = this._rtiAmbassador.joinFederationExecution(federateName, federateType, federationExecutionName, additionalFomModules);
this.logger.info("joinFederationExecution return " + myFederateHandle.toString());
return myFederateHandle;
} catch (CouldNotCreateLogicalTimeFactory | FederateNameAlreadyInUse | FederationExecutionDoesNotExist | InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | SaveInProgress | RestoreInProgress | FederateAlreadyExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("joinFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.9
public FederateHandle joinFederationExecution(final String federateType, final String federationExecutionName, final URL[] additionalFomModules) throws CouldNotCreateLogicalTimeFactory, FederationExecutionDoesNotExist, InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, SaveInProgress, RestoreInProgress, FederateAlreadyExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("joinFederationExecution federateType=" + federateType + ", federationExecutionName=" + federationExecutionName + ", additionalFomModules=" + Arrays.toString(additionalFomModules));
try {
final FederateHandle federateHandle = this._rtiAmbassador.joinFederationExecution(federateType, federationExecutionName, additionalFomModules);
this.logger.info("joinFederationExecution return " + federateHandle.toString());
return federateHandle;
} catch (CouldNotCreateLogicalTimeFactory | FederationExecutionDoesNotExist | InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | SaveInProgress | RestoreInProgress | FederateAlreadyExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("joinFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.9
public FederateHandle joinFederationExecution(final String federateName, final String federateType, final String federationExecutionName) throws CouldNotCreateLogicalTimeFactory, FederateNameAlreadyInUse, FederationExecutionDoesNotExist, SaveInProgress, RestoreInProgress, FederateAlreadyExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("joinFederationExecution federateName=" + federateName + ", federateType=" + federateType + ", federationExecutionName=" + federationExecutionName);
try {
final FederateHandle federateHandle = this._rtiAmbassador.joinFederationExecution(federateName, federateType, federationExecutionName);
this.logger.info("joinFederationExecution return " + federateHandle.toString());
return federateHandle;
} catch (CouldNotCreateLogicalTimeFactory | FederateNameAlreadyInUse | FederationExecutionDoesNotExist | SaveInProgress | RestoreInProgress | FederateAlreadyExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("joinFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.9
public FederateHandle joinFederationExecution(final String federateType, final String federationExecutionName) throws CouldNotCreateLogicalTimeFactory, FederationExecutionDoesNotExist, SaveInProgress, RestoreInProgress, FederateAlreadyExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("joinFederationExecution federateType=" + federateType + ", federationExecutionName=" + federationExecutionName);
try {
final FederateHandle federateHandle = this._rtiAmbassador.joinFederationExecution(federateType, federationExecutionName);
this.logger.info("joinFederationExecution return " + federateHandle.toString());
return federateHandle;
} catch (CouldNotCreateLogicalTimeFactory | FederationExecutionDoesNotExist | SaveInProgress | RestoreInProgress | FederateAlreadyExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("joinFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.10
public void resignFederationExecution(final ResignAction resignAction) throws InvalidResignAction, OwnershipAcquisitionPending, FederateOwnsAttributes, FederateNotExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("resignFederationExecution resignAction=" + resignAction.toString());
try {
this._rtiAmbassador.resignFederationExecution(resignAction);
} catch (InvalidResignAction | OwnershipAcquisitionPending | FederateOwnsAttributes | FederateNotExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("resignFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.11
public void registerFederationSynchronizationPoint(final String synchronizationPointLabel, final byte[] userSuppliedTag) throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerFederationSynchronizationPoint synchronizationPointLabel=" + synchronizationPointLabel + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.registerFederationSynchronizationPoint(synchronizationPointLabel, userSuppliedTag);
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerFederationSynchronizationPoint exception=" + e.getMessage());
throw e;
}
}
//4.11
public void registerFederationSynchronizationPoint(final String synchronizationPointLabel, final byte[] userSuppliedTag, final FederateHandleSet synchronizationSet) throws InvalidFederateHandle, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerFederationSynchronizationPoint synchronizationPointLabel=" + synchronizationPointLabel + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag) + ", synchronizationSet=" + synchronizationSet.toString());
try {
this._rtiAmbassador.registerFederationSynchronizationPoint(synchronizationPointLabel, userSuppliedTag, synchronizationSet);
} catch (InvalidFederateHandle | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerFederationSynchronizationPoint exception=" + e.getMessage());
throw e;
}
}
//4.14
public void synchronizationPointAchieved(final String synchronizationPointLabel) throws SynchronizationPointLabelNotAnnounced, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("synchronizationPointAchieved synchronizationPointLabel=" + synchronizationPointLabel);
try {
this._rtiAmbassador.synchronizationPointAchieved(synchronizationPointLabel);
} catch (SynchronizationPointLabelNotAnnounced | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("synchronizationPointAchieved exception=" + e.getMessage());
throw e;
}
}
//4.14
public void synchronizationPointAchieved(final String synchronizationPointLabel, final boolean successIndicator) throws SynchronizationPointLabelNotAnnounced, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("synchronizationPointAchieved synchronizationPointLabel=" + synchronizationPointLabel + ", successIndicator= " + successIndicator);
try {
this._rtiAmbassador.synchronizationPointAchieved(synchronizationPointLabel, successIndicator);
} catch (SynchronizationPointLabelNotAnnounced | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("synchronizationPointAchieved exception=" + e.getMessage());
throw e;
}
}
// 4.16
public void requestFederationSave(final String label) throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestFederationSave label=" + label);
try {
this._rtiAmbassador.requestFederationSave(label);
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestFederationSave exception=" + e.getMessage());
throw e;
}
}
// 4.16
public void requestFederationSave(final String label, final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, FederateUnableToUseTime, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestFederationSave label=" + label + ", theTime=" + theTime.toString());
try {
this._rtiAmbassador.requestFederationSave(label, theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | FederateUnableToUseTime | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestFederationSave exception=" + e.getMessage());
throw e;
}
}
// 4.18
public void federateSaveBegun() throws SaveNotInitiated, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateSaveBegun");
try {
this._rtiAmbassador.federateSaveBegun();
} catch (SaveNotInitiated | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateSaveBegun exception=" + e.getMessage());
throw e;
}
}
// 4.19
public void federateSaveComplete() throws FederateHasNotBegunSave, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateSaveComplete");
try {
this._rtiAmbassador.federateSaveComplete();
} catch (FederateHasNotBegunSave | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateSaveComplete exception=" + e.getMessage());
throw e;
}
}
// 4.19
public void federateSaveNotComplete() throws FederateHasNotBegunSave, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateSaveNotComplete");
try {
this._rtiAmbassador.federateSaveNotComplete();
} catch (FederateHasNotBegunSave | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateSaveNotComplete exception=" + e.getMessage());
throw e;
}
}
// 4.21
public void abortFederationSave() throws SaveNotInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("abortFederationSave");
try {
this._rtiAmbassador.abortFederationSave();
} catch (SaveNotInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("abortFederationSave exception=" + e.getMessage());
throw e;
}
}
// 4.22
public void queryFederationSaveStatus() throws RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryFederationSaveStatus");
try {
this._rtiAmbassador.queryFederationSaveStatus();
} catch (RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryFederationSaveStatus exception=" + e.getMessage());
throw e;
}
}
// 4.24
public void requestFederationRestore(final String label) throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestFederationRestore label=" + label);
try {
this._rtiAmbassador.requestFederationRestore(label);
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestFederationRestore exception=" + e.getMessage());
throw e;
}
}
// 4.28
public void federateRestoreComplete() throws RestoreNotRequested, SaveInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateRestoreComplete");
try {
this._rtiAmbassador.federateRestoreComplete();
} catch (RestoreNotRequested | SaveInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateRestoreComplete exception=" + e.getMessage());
throw e;
}
}
// 4.28
public void federateRestoreNotComplete() throws RestoreNotRequested, SaveInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateRestoreNotComplete");
try {
this._rtiAmbassador.federateRestoreNotComplete();
} catch (RestoreNotRequested | SaveInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateRestoreNotComplete exception=" + e.getMessage());
throw e;
}
}
// 4.30
public void abortFederationRestore() throws RestoreNotInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("abortFederationRestore");
try {
this._rtiAmbassador.abortFederationRestore();
} catch (RestoreNotInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("abortFederationRestore exception=" + e.getMessage());
throw e;
}
}
// 4.31
public void queryFederationRestoreStatus() throws SaveInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryFederationRestoreStatus");
try {
this._rtiAmbassador.queryFederationRestoreStatus();
} catch (SaveInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryFederationRestoreStatus exception=" + e.getMessage());
throw e;
}
}
/////////////////////////////////////
// Declaration Management Services //
/////////////////////////////////////
// 5.2
public void publishObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("publishObjectClassAttributes theClass=" + theClass.toString() + ", attributeList=" + attributeList.toString());
try {
this._rtiAmbassador.publishObjectClassAttributes(theClass, attributeList);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("publishObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.3
public void unpublishObjectClass(final ObjectClassHandle theClass) throws OwnershipAcquisitionPending, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unpublishObjectClass theClass=" + theClass.toString());
try {
this._rtiAmbassador.unpublishObjectClass(theClass);
} catch (OwnershipAcquisitionPending | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unpublishObjectClass exception=" + e.getMessage());
throw e;
}
}
// 5.3
public void unpublishObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws OwnershipAcquisitionPending, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unpublishObjectClassAttributes theClass=" + theClass.toString() + ", attributeList=" + attributeList.toString());
try {
this._rtiAmbassador.unpublishObjectClassAttributes(theClass, attributeList);
} catch (OwnershipAcquisitionPending | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unpublishObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.4
public void publishInteractionClass(final InteractionClassHandle theInteraction) throws InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("publishInteractionClass theInteraction=" + theInteraction.toString());
try {
this._rtiAmbassador.publishInteractionClass(theInteraction);
} catch (InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("publishInteractionClass exception=" + e.getMessage());
throw e;
}
}
// 5.5
public void unpublishInteractionClass(final InteractionClassHandle theInteraction) throws InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unpublishInteractionClass theInteraction=" + theInteraction.toString());
try {
this._rtiAmbassador.unpublishInteractionClass(theInteraction);
} catch (InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unpublishInteractionClass exception=" + e.getMessage());
throw e;
}
}
// 5.6
public void subscribeObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributes theClass=" + theClass.toString() + ", attributeList=" + attributeList.toString());
try {
this._rtiAmbassador.subscribeObjectClassAttributes(theClass, attributeList);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.6
public void subscribeObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList, final String updateRateDesignator) throws AttributeNotDefined, ObjectClassNotDefined, InvalidUpdateRateDesignator, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributes theClass=" + theClass.toString() + ", attributeList=" + attributeList.toString() + ", updateRateDesignator=" + updateRateDesignator);
try {
this._rtiAmbassador.subscribeObjectClassAttributes(theClass, attributeList, updateRateDesignator);
} catch (AttributeNotDefined | ObjectClassNotDefined | InvalidUpdateRateDesignator | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.6
public void subscribeObjectClassAttributesPassively(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesPassively theClass=" + theClass.toString() + ", attributeList=" + attributeList.toString());
try {
this._rtiAmbassador.subscribeObjectClassAttributesPassively(theClass, attributeList);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesPassively exception=" + e.getMessage());
throw e;
}
}
// 5.6
public void subscribeObjectClassAttributesPassively(final ObjectClassHandle theClass, final AttributeHandleSet attributeList, final String updateRateDesignator) throws AttributeNotDefined, ObjectClassNotDefined, InvalidUpdateRateDesignator, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesPassively theClass=" + theClass.toString() + ", attributeList=" + attributeList.toString() + ", updateRateDesignator=" + updateRateDesignator);
try {
this._rtiAmbassador.subscribeObjectClassAttributesPassively(theClass, attributeList, updateRateDesignator);
} catch (AttributeNotDefined | ObjectClassNotDefined | InvalidUpdateRateDesignator | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesPassively exception=" + e.getMessage());
throw e;
}
}
// 5.7
public void unsubscribeObjectClass(final ObjectClassHandle theClass) throws ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeObjectClass theClass=" + theClass.toString());
try {
this._rtiAmbassador.unsubscribeObjectClass(theClass);
} catch (ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeObjectClass exception=" + e.getMessage());
throw e;
}
}
// 5.7
public void unsubscribeObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeObjectClassAttributes theClass=" + theClass.toString() + ", attributeList=" + attributeList.toString());
try {
this._rtiAmbassador.unsubscribeObjectClassAttributes(theClass, attributeList);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.8
public void subscribeInteractionClass(final InteractionClassHandle theClass) throws FederateServiceInvocationsAreBeingReportedViaMOM, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeInteractionClass theClass=" + theClass.toString());
try {
this._rtiAmbassador.subscribeInteractionClass(theClass);
} catch (FederateServiceInvocationsAreBeingReportedViaMOM | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeInteractionClass exception=" + e.getMessage());
throw e;
}
}
// 5.8
public void subscribeInteractionClassPassively(final InteractionClassHandle theClass) throws FederateServiceInvocationsAreBeingReportedViaMOM, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeInteractionClassPassively theClass=" + theClass.toString());
try {
this._rtiAmbassador.subscribeInteractionClassPassively(theClass);
} catch (FederateServiceInvocationsAreBeingReportedViaMOM | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeInteractionClassPassively exception=" + e.getMessage());
throw e;
}
}
// 5.9
public void unsubscribeInteractionClass(final InteractionClassHandle theClass) throws InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeInteractionClass theClass=" + theClass.toString());
try {
this._rtiAmbassador.unsubscribeInteractionClass(theClass);
} catch (InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeInteractionClass exception=" + e.getMessage());
throw e;
}
}
////////////////////////////////
// Object Management Services //
////////////////////////////////
// 6.2
public void reserveObjectInstanceName(final String theObjectName) throws IllegalName, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("reserveObjectInstanceName theObjectName=" + theObjectName);
try {
this._rtiAmbassador.reserveObjectInstanceName(theObjectName);
} catch (IllegalName | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("reserveObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 6.4
public void releaseObjectInstanceName(final String theObjectInstanceName) throws ObjectInstanceNameNotReserved, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("releaseObjectInstanceName theObjectInstanceName=" + theObjectInstanceName);
try {
this._rtiAmbassador.releaseObjectInstanceName(theObjectInstanceName);
} catch (ObjectInstanceNameNotReserved | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("releaseObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 6.5
public void reserveMultipleObjectInstanceName(final Set<String> theObjectNames) throws IllegalName, NameSetWasEmpty, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("reserveMultipleObjectInstanceName theObjectNames=" + theObjectNames.toString());
try {
this._rtiAmbassador.reserveMultipleObjectInstanceName(theObjectNames);
} catch (IllegalName | NameSetWasEmpty | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("reserveMultipleObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 6.7
public void releaseMultipleObjectInstanceName(final Set<String> theObjectNames) throws ObjectInstanceNameNotReserved, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("releaseMultipleObjectInstanceName theObjectNames=" + theObjectNames.toString());
try {
this._rtiAmbassador.releaseMultipleObjectInstanceName(theObjectNames);
} catch (ObjectInstanceNameNotReserved | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("releaseMultipleObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 6.8
public ObjectInstanceHandle registerObjectInstance(final ObjectClassHandle theClass) throws ObjectClassNotPublished, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerObjectInstance theClass=" + theClass.toString());
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.registerObjectInstance(theClass);
this.logger.info("registerObjectInstance return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (ObjectClassNotPublished | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.8
public ObjectInstanceHandle registerObjectInstance(final ObjectClassHandle theClass, final String theObjectName) throws ObjectInstanceNameInUse, ObjectInstanceNameNotReserved, ObjectClassNotPublished, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerObjectInstance theClass=" + theClass.toString() + ", theObjectName=" + theObjectName);
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.registerObjectInstance(theClass, theObjectName);
this.logger.info("registerObjectInstance return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (ObjectInstanceNameInUse | ObjectInstanceNameNotReserved | ObjectClassNotPublished | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.10
public void updateAttributeValues(final ObjectInstanceHandle theObject, final AttributeHandleValueMap theAttributes, final byte[] userSuppliedTag) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("updateAttributeValues theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.updateAttributeValues(theObject, theAttributes, userSuppliedTag);
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("updateAttributeValues exception=" + e.getMessage());
throw e;
}
}
// 6.10
public MessageRetractionReturn updateAttributeValues(final ObjectInstanceHandle theObject, final AttributeHandleValueMap theAttributes, final byte[] userSuppliedTag, final LogicalTime theTime) throws InvalidLogicalTime, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("updateAttributeValues theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag) + ", theTime=" + theTime.toString());
try {
final MessageRetractionReturn messageRetractionReturn = this._rtiAmbassador.updateAttributeValues(theObject, theAttributes, userSuppliedTag, theTime);
this.logger.info("updateAttributeValues return " + messageRetractionReturn.toString());
return messageRetractionReturn;
} catch (InvalidLogicalTime | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("updateAttributeValues exception=" + e.getMessage());
throw e;
}
}
// 6.12
public void sendInteraction(final InteractionClassHandle theInteraction, final ParameterHandleValueMap theParameters, final byte[] userSuppliedTag) throws InteractionClassNotPublished, InteractionParameterNotDefined, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("sendInteraction theInteraction=" + theInteraction.toString() + ", theParameters=" + theParameters.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.sendInteraction(theInteraction, theParameters, userSuppliedTag);
} catch (InteractionClassNotPublished | InteractionParameterNotDefined | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("sendInteraction exception=" + e.getMessage());
throw e;
}
}
// 6.12
public MessageRetractionReturn sendInteraction(final InteractionClassHandle theInteraction, final ParameterHandleValueMap theParameters, final byte[] userSuppliedTag, final LogicalTime theTime) throws InvalidLogicalTime, InteractionClassNotPublished, InteractionParameterNotDefined, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("sendInteraction theInteraction=" + theInteraction.toString() + ", theParameters=" + theParameters.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag) + ", theTime=" + theTime.toString());
try {
final MessageRetractionReturn messageRetractionReturn = this._rtiAmbassador.sendInteraction(theInteraction, theParameters, userSuppliedTag, theTime);
this.logger.info("sendInteraction return " + messageRetractionReturn.toString());
return messageRetractionReturn;
} catch (InvalidLogicalTime | InteractionClassNotPublished | InteractionParameterNotDefined | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("sendInteraction exception=" + e.getMessage());
throw e;
}
}
// 6.14
public void deleteObjectInstance(final ObjectInstanceHandle objectHandle, final byte[] userSuppliedTag) throws DeletePrivilegeNotHeld, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("deleteObjectInstance objectHandle=" + objectHandle.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.deleteObjectInstance(objectHandle, userSuppliedTag);
} catch (DeletePrivilegeNotHeld | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("deleteObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.14
public MessageRetractionReturn deleteObjectInstance(final ObjectInstanceHandle objectHandle, final byte[] userSuppliedTag, final LogicalTime theTime) throws InvalidLogicalTime, DeletePrivilegeNotHeld, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("deleteObjectInstance objectHandle=" + objectHandle.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag) + ", theTime=" + theTime.toString());
try {
final MessageRetractionReturn messageRetractionReturn = this._rtiAmbassador.deleteObjectInstance(objectHandle, userSuppliedTag, theTime);
this.logger.info("deleteObjectInstance return " + messageRetractionReturn.toString());
return messageRetractionReturn;
} catch (InvalidLogicalTime | DeletePrivilegeNotHeld | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("deleteObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.16
public void localDeleteObjectInstance(final ObjectInstanceHandle objectHandle) throws OwnershipAcquisitionPending, FederateOwnsAttributes, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("localDeleteObjectInstance objectHandle=" + objectHandle.toString());
try {
this._rtiAmbassador.localDeleteObjectInstance(objectHandle);
} catch (OwnershipAcquisitionPending | FederateOwnsAttributes | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("localDeleteObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.19
public void requestAttributeValueUpdate(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final byte[] userSuppliedTag) throws AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestAttributeValueUpdate theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.requestAttributeValueUpdate(theObject, theAttributes, userSuppliedTag);
} catch (AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestAttributeValueUpdate exception=" + e.getMessage());
throw e;
}
}
// 6.19
public void requestAttributeValueUpdate(final ObjectClassHandle theClass, final AttributeHandleSet theAttributes, final byte[] userSuppliedTag) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestAttributeValueUpdate theClass=" + theClass.toString() + ", theAttributes=" + theAttributes.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.requestAttributeValueUpdate(theClass, theAttributes, userSuppliedTag);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestAttributeValueUpdate exception=" + e.getMessage());
throw e;
}
}
// 6.23
public void requestAttributeTransportationTypeChange(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final TransportationTypeHandle theType) throws AttributeAlreadyBeingChanged, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, InvalidTransportationType, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestAttributeTransportationTypeChange theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString() + ", theType=" + theType.toString());
try {
this._rtiAmbassador.requestAttributeTransportationTypeChange(theObject, theAttributes, theType);
} catch (AttributeAlreadyBeingChanged | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | InvalidTransportationType | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestAttributeTransportationTypeChange exception=" + e.getMessage());
throw e;
}
}
// 6.25
public void queryAttributeTransportationType(final ObjectInstanceHandle theObject, final AttributeHandle theAttribute) throws AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryAttributeTransportationType theObject=" + theObject.toString() + ", theAttribute=" + theAttribute.toString());
try {
this._rtiAmbassador.queryAttributeTransportationType(theObject, theAttribute);
} catch (AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryAttributeTransportationType exception=" + e.getMessage());
throw e;
}
}
// 6.27
public void requestInteractionTransportationTypeChange(final InteractionClassHandle theClass, final TransportationTypeHandle theType) throws InteractionClassAlreadyBeingChanged, InteractionClassNotPublished, InteractionClassNotDefined, InvalidTransportationType, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestInteractionTransportationTypeChange theClass=" + theClass.toString() + ", theType=" + theType.toString());
try {
this._rtiAmbassador.requestInteractionTransportationTypeChange(theClass, theType);
} catch (InteractionClassAlreadyBeingChanged | InteractionClassNotPublished | InteractionClassNotDefined | InvalidTransportationType | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestInteractionTransportationTypeChange exception=" + e.getMessage());
throw e;
}
}
// 6.29
public void queryInteractionTransportationType(final FederateHandle theFederate, final InteractionClassHandle theInteraction) throws InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryInteractionTransportationType theFederate=" + theFederate.toString() + ", theInteraction=" + theInteraction.toString());
try {
this._rtiAmbassador.queryInteractionTransportationType(theFederate, theInteraction);
} catch (InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryInteractionTransportationType exception=" + e.getMessage());
throw e;
}
}
///////////////////////////////////
// Ownership Management Services //
///////////////////////////////////
// 7.2
public void unconditionalAttributeOwnershipDivestiture(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unconditionalAttributeOwnershipDivestiture theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString());
try {
this._rtiAmbassador.unconditionalAttributeOwnershipDivestiture(theObject, theAttributes);
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unconditionalAttributeOwnershipDivestiture exception=" + e.getMessage());
throw e;
}
}
// 7.3
public void negotiatedAttributeOwnershipDivestiture(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final byte[] userSuppliedTag) throws AttributeAlreadyBeingDivested, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("negotiatedAttributeOwnershipDivestiture theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.negotiatedAttributeOwnershipDivestiture(theObject, theAttributes, userSuppliedTag);
} catch (AttributeAlreadyBeingDivested | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("negotiatedAttributeOwnershipDivestiture exception=" + e.getMessage());
throw e;
}
}
// 7.6
public void confirmDivestiture(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final byte[] userSuppliedTag) throws NoAcquisitionPending, AttributeDivestitureWasNotRequested, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("confirmDivestiture theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.confirmDivestiture(theObject, theAttributes, userSuppliedTag);
} catch (NoAcquisitionPending | AttributeDivestitureWasNotRequested | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("confirmDivestiture exception=" + e.getMessage());
throw e;
}
}
// 7.8
public void attributeOwnershipAcquisition(final ObjectInstanceHandle theObject, final AttributeHandleSet desiredAttributes, final byte[] userSuppliedTag) throws AttributeNotPublished, ObjectClassNotPublished, FederateOwnsAttributes, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("attributeOwnershipAcquisition theObject=" + theObject.toString() + ", desiredAttributes=" + desiredAttributes.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.attributeOwnershipAcquisition(theObject, desiredAttributes, userSuppliedTag);
} catch (AttributeNotPublished | ObjectClassNotPublished | FederateOwnsAttributes | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("attributeOwnershipAcquisition exception=" + e.getMessage());
throw e;
}
}
// 7.9
public void attributeOwnershipAcquisitionIfAvailable(final ObjectInstanceHandle theObject, final AttributeHandleSet desiredAttributes) throws AttributeAlreadyBeingAcquired, AttributeNotPublished, ObjectClassNotPublished, FederateOwnsAttributes, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("attributeOwnershipAcquisitionIfAvailable theObject=" + theObject.toString() + ", desiredAttributes=" + desiredAttributes.toString());
try {
this._rtiAmbassador.attributeOwnershipAcquisitionIfAvailable(theObject, desiredAttributes);
} catch (AttributeAlreadyBeingAcquired | AttributeNotPublished | ObjectClassNotPublished | FederateOwnsAttributes | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("attributeOwnershipAcquisitionIfAvailable exception=" + e.getMessage());
throw e;
}
}
// 7.12
public void attributeOwnershipReleaseDenied(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("attributeOwnershipReleaseDenied theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString());
try {
this._rtiAmbassador.attributeOwnershipReleaseDenied(theObject, theAttributes);
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("attributeOwnershipReleaseDenied exception=" + e.getMessage());
throw e;
}
}
// 7.13
public AttributeHandleSet attributeOwnershipDivestitureIfWanted(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("attributeOwnershipDivestitureIfWanted theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString());
try {
final AttributeHandleSet attributeHandleSet = this._rtiAmbassador.attributeOwnershipDivestitureIfWanted(theObject, theAttributes);
this.logger.info("attributeOwnershipDivestitureIfWanted return " + attributeHandleSet.toString());
return attributeHandleSet;
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("attributeOwnershipDivestitureIfWanted exception=" + e.getMessage());
throw e;
}
}
// 7.14
public void cancelNegotiatedAttributeOwnershipDivestiture(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeDivestitureWasNotRequested, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("cancelNegotiatedAttributeOwnershipDivestiture theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString());
try {
this._rtiAmbassador.cancelNegotiatedAttributeOwnershipDivestiture(theObject, theAttributes);
} catch (AttributeDivestitureWasNotRequested | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("cancelNegotiatedAttributeOwnershipDivestiture exception=" + e.getMessage());
throw e;
}
}
// 7.15
public void cancelAttributeOwnershipAcquisition(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeAcquisitionWasNotRequested, AttributeAlreadyOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("cancelAttributeOwnershipAcquisition theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString());
try {
this._rtiAmbassador.cancelAttributeOwnershipAcquisition(theObject, theAttributes);
} catch (AttributeAcquisitionWasNotRequested | AttributeAlreadyOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("cancelAttributeOwnershipAcquisition exception=" + e.getMessage());
throw e;
}
}
// 7.17
public void queryAttributeOwnership(final ObjectInstanceHandle theObject, final AttributeHandle theAttribute) throws AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryAttributeOwnership theObject=" + theObject.toString() + ", theAttribute=" + theAttribute.toString());
try {
this._rtiAmbassador.queryAttributeOwnership(theObject, theAttribute);
} catch (AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryAttributeOwnership exception=" + e.getMessage());
throw e;
}
}
// 7.19
public boolean isAttributeOwnedByFederate(final ObjectInstanceHandle theObject, final AttributeHandle theAttribute) throws AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("isAttributeOwnedByFederate theObject=" + theObject.toString() + ", theAttribute=" + theAttribute.toString());
try {
final boolean bool = this._rtiAmbassador.isAttributeOwnedByFederate(theObject, theAttribute);
this.logger.info("isAttributeOwnedByFederate return " + bool);
return bool;
} catch (AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("isAttributeOwnedByFederate exception=" + e.getMessage());
throw e;
}
}
//////////////////////////////
// Time Management Services //
//////////////////////////////
// 8.2
public void enableTimeRegulation(final LogicalTimeInterval theLookahead) throws InvalidLookahead, InTimeAdvancingState, RequestForTimeRegulationPending, TimeRegulationAlreadyEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableTimeRegulation theLookahead=" + theLookahead.toString());
try {
this._rtiAmbassador.enableTimeRegulation(theLookahead);
} catch (InvalidLookahead | InTimeAdvancingState | RequestForTimeRegulationPending | TimeRegulationAlreadyEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableTimeRegulation exception=" + e.getMessage());
throw e;
}
}
// 8.4
public void disableTimeRegulation() throws TimeRegulationIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableTimeRegulation");
try {
this._rtiAmbassador.disableTimeRegulation();
} catch (TimeRegulationIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableTimeRegulation exception=" + e.getMessage());
throw e;
}
}
// 8.5
public void enableTimeConstrained() throws InTimeAdvancingState, RequestForTimeConstrainedPending, TimeConstrainedAlreadyEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableTimeConstrained");
try {
this._rtiAmbassador.enableTimeConstrained();
} catch (InTimeAdvancingState | RequestForTimeConstrainedPending | TimeConstrainedAlreadyEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableTimeConstrained exception=" + e.getMessage());
throw e;
}
}
// 8.7
public void disableTimeConstrained() throws TimeConstrainedIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableTimeConstrained");
try {
this._rtiAmbassador.disableTimeConstrained();
} catch (TimeConstrainedIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableTimeConstrained exception=" + e.getMessage());
throw e;
}
}
// 8.8
public void timeAdvanceRequest(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("timeAdvanceRequest theTime=" + theTime.toString());
try {
this._rtiAmbassador.timeAdvanceRequest(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("timeAdvanceRequest exception=" + e.getMessage());
throw e;
}
}
// 8.9
public void timeAdvanceRequestAvailable(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("timeAdvanceRequestAvailable theTime=" + theTime.toString());
try {
this._rtiAmbassador.timeAdvanceRequestAvailable(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("timeAdvanceRequestAvailable exception=" + e.getMessage());
throw e;
}
}
// 8.10
public void nextMessageRequest(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("nextMessageRequest theTime=" + theTime.toString());
try {
this._rtiAmbassador.nextMessageRequest(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("nextMessageRequest exception=" + e.getMessage());
throw e;
}
}
// 8.11
public void nextMessageRequestAvailable(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("nextMessageRequestAvailable theTime=" + theTime.toString());
try {
this._rtiAmbassador.nextMessageRequestAvailable(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("nextMessageRequestAvailable exception=" + e.getMessage());
throw e;
}
}
// 8.12
public void flushQueueRequest(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("flushQueueRequest theTime=" + theTime.toString());
try {
this._rtiAmbassador.flushQueueRequest(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("flushQueueRequest exception=" + e.getMessage());
throw e;
}
}
// 8.14
public void enableAsynchronousDelivery() throws AsynchronousDeliveryAlreadyEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableAsynchronousDelivery");
try {
this._rtiAmbassador.enableAsynchronousDelivery();
} catch (AsynchronousDeliveryAlreadyEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableAsynchronousDelivery exception=" + e.getMessage());
throw e;
}
}
// 8.15
public void disableAsynchronousDelivery() throws AsynchronousDeliveryAlreadyDisabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableAsynchronousDelivery");
try {
this._rtiAmbassador.disableAsynchronousDelivery();
} catch (AsynchronousDeliveryAlreadyDisabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableAsynchronousDelivery exception=" + e.getMessage());
throw e;
}
}
// 8.16
public TimeQueryReturn queryGALT() throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryGALT");
try {
final TimeQueryReturn timeQueryReturn = this._rtiAmbassador.queryGALT();
this.logger.info("queryGALT return " + timeQueryReturn.toString());
return timeQueryReturn;
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryGALT exception=" + e.getMessage());
throw e;
}
}
// 8.17
public LogicalTime queryLogicalTime() throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryLogicalTime");
try {
final LogicalTime logicalTime = this._rtiAmbassador.queryLogicalTime();
this.logger.info("queryLogicalTime return " + logicalTime.toString());
return logicalTime;
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryLogicalTime exception=" + e.getMessage());
throw e;
}
}
// 8.18
public TimeQueryReturn queryLITS() throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryLITS");
try {
final TimeQueryReturn timeQueryReturn = this._rtiAmbassador.queryLITS();
this.logger.info("queryLITS return " + timeQueryReturn.toString());
return timeQueryReturn;
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryLITS exception=" + e.getMessage());
throw e;
}
}
// 8.19
public void modifyLookahead(final LogicalTimeInterval theLookahead) throws InvalidLookahead, InTimeAdvancingState, TimeRegulationIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("modifyLookahead theLookahead=" + theLookahead.toString());
try {
this._rtiAmbassador.modifyLookahead(theLookahead);
} catch (InvalidLookahead | InTimeAdvancingState | TimeRegulationIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("modifyLookahead exception=" + e.getMessage());
throw e;
}
}
// 8.20
public LogicalTimeInterval queryLookahead() throws TimeRegulationIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryLookahead");
try {
final LogicalTimeInterval logicalTimeInterval = this._rtiAmbassador.queryLookahead();
this.logger.info("queryLookahead return " + logicalTimeInterval.toString());
return logicalTimeInterval;
} catch (TimeRegulationIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryLookahead exception=" + e.getMessage());
throw e;
}
}
// 8.21
public void retract(final MessageRetractionHandle theHandle) throws MessageCanNoLongerBeRetracted, InvalidMessageRetractionHandle, TimeRegulationIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("retract theHandle=" + theHandle.toString());
try {
this._rtiAmbassador.retract(theHandle);
} catch (MessageCanNoLongerBeRetracted | InvalidMessageRetractionHandle | TimeRegulationIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("retract exception=" + e.getMessage());
throw e;
}
}
// 8.23
public void changeAttributeOrderType(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final OrderType theType) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("changeAttributeOrderType theObject=" + theObject.toString() + ", theAttributes=" + theAttributes.toString() + ", theType=" + theType.toString());
try {
this._rtiAmbassador.changeAttributeOrderType(theObject, theAttributes, theType);
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("changeAttributeOrderType exception=" + e.getMessage());
throw e;
}
}
// 8.24
public void changeInteractionOrderType(final InteractionClassHandle theClass, final OrderType theType) throws InteractionClassNotPublished, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("changeInteractionOrderType theClass=" + theClass.toString() + ", theType=" + theType.toString());
try {
this._rtiAmbassador.changeInteractionOrderType(theClass, theType);
} catch (InteractionClassNotPublished | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("changeInteractionOrderType exception=" + e.getMessage());
throw e;
}
}
//////////////////////////////////
// Data Distribution Management //
//////////////////////////////////
// 9.2
public RegionHandle createRegion(final DimensionHandleSet dimensions) throws InvalidDimensionHandle, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("createRegion dimensions=" + dimensions.toString());
try {
final RegionHandle regionHandle = this._rtiAmbassador.createRegion(dimensions);
this.logger.info("createRegion return " + regionHandle.toString());
return regionHandle;
} catch (InvalidDimensionHandle | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("createRegion exception=" + e.getMessage());
throw e;
}
}
// 9.3
public void commitRegionModifications(final RegionHandleSet regions) throws RegionNotCreatedByThisFederate, InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("commitRegionModifications regions=" + regions.toString());
try {
this._rtiAmbassador.commitRegionModifications(regions);
} catch (RegionNotCreatedByThisFederate | InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("commitRegionModifications exception=" + e.getMessage());
throw e;
}
}
// 9.4
public void deleteRegion(final RegionHandle theRegion) throws RegionInUseForUpdateOrSubscription, RegionNotCreatedByThisFederate, InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("deleteRegion theRegion=" + theRegion.toString());
try {
this._rtiAmbassador.deleteRegion(theRegion);
} catch (RegionInUseForUpdateOrSubscription | RegionNotCreatedByThisFederate | InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("deleteRegion exception=" + e.getMessage());
throw e;
}
}
//9.5
public ObjectInstanceHandle registerObjectInstanceWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotPublished, ObjectClassNotPublished, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerObjectInstanceWithRegions theClass=" + theClass.toString() + ", attributesAndRegions=" + attributesAndRegions.toString());
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.registerObjectInstanceWithRegions(theClass, attributesAndRegions);
this.logger.info("registerObjectInstanceWithRegions return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotPublished | ObjectClassNotPublished | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerObjectInstanceWithRegions exception=" + e.getMessage());
throw e;
}
}
//9.5
public ObjectInstanceHandle registerObjectInstanceWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions, final String theObject) throws ObjectInstanceNameInUse, ObjectInstanceNameNotReserved, InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotPublished, ObjectClassNotPublished, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerObjectInstanceWithRegions theClass=" + theClass.toString() + ", attributesAndRegions=" + attributesAndRegions.toString() + ", theObject=" + theObject.toString());
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.registerObjectInstanceWithRegions(theClass, attributesAndRegions, theObject);
this.logger.info("registerObjectInstanceWithRegions return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (ObjectInstanceNameInUse | ObjectInstanceNameNotReserved | InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotPublished | ObjectClassNotPublished | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerObjectInstanceWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.6
public void associateRegionsForUpdates(final ObjectInstanceHandle theObject, final AttributeSetRegionSetPairList attributesAndRegions) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("associateRegionsForUpdates theObject=" + theObject.toString() + ", attributesAndRegions=" + attributesAndRegions.toString());
try {
this._rtiAmbassador.associateRegionsForUpdates(theObject, attributesAndRegions);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("associateRegionsForUpdates exception=" + e.getMessage());
throw e;
}
}
// 9.7
public void unassociateRegionsForUpdates(final ObjectInstanceHandle theObject, final AttributeSetRegionSetPairList attributesAndRegions) throws RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unassociateRegionsForUpdates theObject=" + theObject.toString() + ", attributesAndRegions=" + attributesAndRegions.toString());
try {
this._rtiAmbassador.unassociateRegionsForUpdates(theObject, attributesAndRegions);
} catch (RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unassociateRegionsForUpdates exception=" + e.getMessage());
throw e;
}
}
// 9.8
public void subscribeObjectClassAttributesWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesWithRegions theClass=" + theClass.toString() + ", attributesAndRegions=" + attributesAndRegions.toString());
try {
this._rtiAmbassador.subscribeObjectClassAttributesWithRegions(theClass, attributesAndRegions);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.8
public void subscribeObjectClassAttributesWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions, final String updateRateDesignator) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, InvalidUpdateRateDesignator, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesWithRegions theClass=" + theClass.toString() + ", attributesAndRegions=" + attributesAndRegions.toString() + ", updateRateDesignator=" + updateRateDesignator);
try {
this._rtiAmbassador.subscribeObjectClassAttributesWithRegions(theClass, attributesAndRegions, updateRateDesignator);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | InvalidUpdateRateDesignator | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.8
public void subscribeObjectClassAttributesPassivelyWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesPassivelyWithRegions theClass=" + theClass.toString() + ", attributesAndRegions=" + attributesAndRegions.toString());
try {
this._rtiAmbassador.subscribeObjectClassAttributesPassivelyWithRegions(theClass, attributesAndRegions);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesPassivelyWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.8
public void subscribeObjectClassAttributesPassivelyWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions, final String updateRateDesignator) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, InvalidUpdateRateDesignator, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesPassivelyWithRegions theClass=" + theClass.toString() + ", attributesAndRegions=" + attributesAndRegions.toString() + ", updateRateDesignator=" + updateRateDesignator);
try {
this._rtiAmbassador.subscribeObjectClassAttributesPassivelyWithRegions(theClass, attributesAndRegions, updateRateDesignator);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | InvalidUpdateRateDesignator | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesPassivelyWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.9
public void unsubscribeObjectClassAttributesWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions) throws RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeObjectClassAttributesWithRegions theClass=" + theClass.toString() + ", attributesAndRegions=" + attributesAndRegions.toString());
try {
this._rtiAmbassador.unsubscribeObjectClassAttributesWithRegions(theClass, attributesAndRegions);
} catch (RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeObjectClassAttributesWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.10
public void subscribeInteractionClassWithRegions(final InteractionClassHandle theClass, final RegionHandleSet regions) throws FederateServiceInvocationsAreBeingReportedViaMOM, InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeInteractionClassWithRegions theClass=" + theClass.toString() + ", regions=" + regions.toString());
try {
this._rtiAmbassador.subscribeInteractionClassWithRegions(theClass, regions);
} catch (FederateServiceInvocationsAreBeingReportedViaMOM | InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeInteractionClassWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.10
public void subscribeInteractionClassPassivelyWithRegions(final InteractionClassHandle theClass, final RegionHandleSet regions) throws FederateServiceInvocationsAreBeingReportedViaMOM, InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeInteractionClassPassivelyWithRegions theClass=" + theClass.toString() + ", regions=" + regions.toString());
try {
this._rtiAmbassador.subscribeInteractionClassPassivelyWithRegions(theClass, regions);
} catch (FederateServiceInvocationsAreBeingReportedViaMOM | InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeInteractionClassPassivelyWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.11
public void unsubscribeInteractionClassWithRegions(final InteractionClassHandle theClass, final RegionHandleSet regions) throws RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeInteractionClassWithRegions theClass=" + theClass.toString() + ", regions=" + regions.toString());
try {
this._rtiAmbassador.unsubscribeInteractionClassWithRegions(theClass, regions);
} catch (RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeInteractionClassWithRegions exception=" + e.getMessage());
throw e;
}
}
//9.12
public void sendInteractionWithRegions(final InteractionClassHandle theInteraction, final ParameterHandleValueMap theParameters, final RegionHandleSet regions, final byte[] userSuppliedTag) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotPublished, InteractionParameterNotDefined, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("sendInteractionWithRegions theInteraction=" + theInteraction.toString() + ", theParameters=" + theParameters.toString() + ", regions=" + regions.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.sendInteractionWithRegions(theInteraction, theParameters, regions, userSuppliedTag);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotPublished | InteractionParameterNotDefined | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("sendInteractionWithRegions exception=" + e.getMessage());
throw e;
}
}
//9.12
public MessageRetractionReturn sendInteractionWithRegions(final InteractionClassHandle theInteraction, final ParameterHandleValueMap theParameters, final RegionHandleSet regions, final byte[] userSuppliedTag, final LogicalTime theTime) throws InvalidLogicalTime, InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotPublished, InteractionParameterNotDefined, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("sendInteractionWithRegions theInteraction=" + theInteraction.toString() + ", theParameters=" + theParameters.toString() + ", regions=" + regions.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag) + ", theTime=" + theTime.toString());
try {
final MessageRetractionReturn messageRetractionReturn = this._rtiAmbassador.sendInteractionWithRegions(theInteraction, theParameters, regions, userSuppliedTag, theTime);
this.logger.info("sendInteractionWithRegions return " + messageRetractionReturn.toString());
return messageRetractionReturn;
} catch (InvalidLogicalTime | InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotPublished | InteractionParameterNotDefined | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("sendInteractionWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.13
public void requestAttributeValueUpdateWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions, final byte[] userSuppliedTag) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestAttributeValueUpdateWithRegions theClass=" + theClass.toString() + ", attributesAndRegions=" + attributesAndRegions.toString() + ", userSuppliedTag=" + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.requestAttributeValueUpdateWithRegions(theClass, attributesAndRegions, userSuppliedTag);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestAttributeValueUpdateWithRegions exception=" + e.getMessage());
throw e;
}
}
//////////////////////////
// RTI Support Services //
//////////////////////////
// 10.2
public ResignAction getAutomaticResignDirective() throws FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAutomaticResignDirective");
try {
final ResignAction resignAction = this._rtiAmbassador.getAutomaticResignDirective();
this.logger.info("getAutomaticResignDirective return " + resignAction.toString());
return resignAction;
} catch (FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAutomaticResignDirective exception=" + e.getMessage());
throw e;
}
}
// 10.3
public void setAutomaticResignDirective(final ResignAction resignAction) throws InvalidResignAction, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("setAutomaticResignDirective resignAction=" + resignAction.toString());
try {
this._rtiAmbassador.setAutomaticResignDirective(resignAction);
} catch (InvalidResignAction | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("setAutomaticResignDirective exception=" + e.getMessage());
throw e;
}
}
// 10.4
public FederateHandle getFederateHandle(final String theName) throws NameNotFound, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getFederateHandle theName=" + theName);
try {
final FederateHandle federateHandle = this._rtiAmbassador.getFederateHandle(theName);
this.logger.info("getFederateHandle return " + federateHandle.toString());
return federateHandle;
} catch (NameNotFound | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getFederateHandle exception=" + e.getMessage());
throw e;
}
}
// 10.5
public String getFederateName(final FederateHandle theHandle) throws InvalidFederateHandle, FederateHandleNotKnown, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getFederateName theHandle=" + theHandle.toString());
try {
final String str = this._rtiAmbassador.getFederateName(theHandle);
this.logger.info("getFederateName return " + str);
return str;
} catch (InvalidFederateHandle | FederateHandleNotKnown | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getFederateName exception=" + e.getMessage());
throw e;
}
}
// 10.6
public ObjectClassHandle getObjectClassHandle(final String theName) throws NameNotFound, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getObjectClassHandle theName=" + theName);
try {
final ObjectClassHandle objectClassHandle = this._rtiAmbassador.getObjectClassHandle(theName);
this.logger.info("getObjectClassHandle return " + objectClassHandle.toString());
return objectClassHandle;
} catch (NameNotFound | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getObjectClassHandle exception=" + e.getMessage());
throw e;
}
}
// 10.7
public String getObjectClassName(final ObjectClassHandle theHandle) throws InvalidObjectClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getObjectClassName theHandle=" + theHandle.toString());
try {
final String str = this._rtiAmbassador.getObjectClassName(theHandle);
this.logger.info("getObjectClassName return " + str);
return str;
} catch (InvalidObjectClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getObjectClassName exception=" + e.getMessage());
throw e;
}
}
// 10.8
public ObjectClassHandle getKnownObjectClassHandle(final ObjectInstanceHandle theObject) throws ObjectInstanceNotKnown, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getKnownObjectClassHandle theObject=" + theObject.toString());
try {
final ObjectClassHandle objectClassHandle = this._rtiAmbassador.getKnownObjectClassHandle(theObject);
this.logger.info("getKnownObjectClassHandle return " + objectClassHandle.toString());
return objectClassHandle;
} catch (ObjectInstanceNotKnown | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getKnownObjectClassHandle exception=" + e.getMessage());
throw e;
}
}
// 10.9
public ObjectInstanceHandle getObjectInstanceHandle(final String theName) throws ObjectInstanceNotKnown, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getObjectInstanceHandle theName=" + theName);
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.getObjectInstanceHandle(theName);
this.logger.info("getObjectInstanceHandle return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (ObjectInstanceNotKnown | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getObjectInstanceHandle exception=" + e.getMessage());
throw e;
}
}
// 10.10
public String getObjectInstanceName(final ObjectInstanceHandle theHandle) throws ObjectInstanceNotKnown, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getObjectInstanceName theHandle=" + theHandle.toString());
try {
final String str = this._rtiAmbassador.getObjectInstanceName(theHandle);
this.logger.info("getObjectInstanceName return " + str);
return str;
} catch (ObjectInstanceNotKnown | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 10.11
public AttributeHandle getAttributeHandle(final ObjectClassHandle whichClass, final String theName) throws NameNotFound, InvalidObjectClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAttributeHandle whichClass=" + whichClass.toString() + ", theName=" + theName);
try {
final AttributeHandle attributeHandle = this._rtiAmbassador.getAttributeHandle(whichClass, theName);
this.logger.info("getAttributeHandle return " + attributeHandle.toString());
return attributeHandle;
} catch (NameNotFound | InvalidObjectClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAttributeHandle exception=" + e.getMessage());
throw e;
}
}
// 10.12
public String getAttributeName(final ObjectClassHandle whichClass, final AttributeHandle theHandle) throws AttributeNotDefined, InvalidAttributeHandle, InvalidObjectClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAttributeName whichClass=" + whichClass.toString() + ", theHandle=" + theHandle.toString());
try {
final String str = this._rtiAmbassador.getAttributeName(whichClass, theHandle);
this.logger.info("getAttributeName return " + str);
return str;
} catch (AttributeNotDefined | InvalidAttributeHandle | InvalidObjectClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAttributeName exception=" + e.getMessage());
throw e;
}
}
// 10.13
public double getUpdateRateValue(final String updateRateDesignator) throws InvalidUpdateRateDesignator, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getUpdateRateValue updateRateDesignator=" + updateRateDesignator);
try {
final double d = this._rtiAmbassador.getUpdateRateValue(updateRateDesignator);
this.logger.info("getUpdateRateValue return " + d);
return d;
} catch (InvalidUpdateRateDesignator | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getUpdateRateValue exception=" + e.getMessage());
throw e;
}
}
// 10.14
public double getUpdateRateValueForAttribute(final ObjectInstanceHandle theObject, final AttributeHandle theAttribute) throws ObjectInstanceNotKnown, AttributeNotDefined, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getUpdateRateValueForAttribute theObject=" + theObject.toString() + ", theAttribute=" + theAttribute.toString());
try {
final double d = this._rtiAmbassador.getUpdateRateValueForAttribute(theObject, theAttribute);
this.logger.info("getUpdateRateValueForAttribute return " + d);
return d;
} catch (ObjectInstanceNotKnown | AttributeNotDefined | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getUpdateRateValueForAttribute exception=" + e.getMessage());
throw e;
}
}
// 10.15
public InteractionClassHandle getInteractionClassHandle(final String theName) throws NameNotFound, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getInteractionClassHandle theName=" + theName);
try {
final InteractionClassHandle interactionClassHandle = this._rtiAmbassador.getInteractionClassHandle(theName);
this.logger.info("getInteractionClassHandle return " + interactionClassHandle.toString());
return interactionClassHandle;
} catch (NameNotFound | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getInteractionClassHandle exception=" + e.getMessage());
throw e;
}
}
// 10.16
public String getInteractionClassName(final InteractionClassHandle theHandle) throws InvalidInteractionClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getInteractionClassName theHandle=" + theHandle.toString());
try {
final String str = this._rtiAmbassador.getInteractionClassName(theHandle);
this.logger.info("getInteractionClassName return " + str);
return str;
} catch (InvalidInteractionClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getInteractionClassName exception=" + e.getMessage());
throw e;
}
}
// 10.17
public ParameterHandle getParameterHandle(final InteractionClassHandle whichClass, final String theName) throws NameNotFound, InvalidInteractionClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getParameterHandle whichClass=" + whichClass.toString() + ", theName=" + theName);
try {
final ParameterHandle parameterHandle = this._rtiAmbassador.getParameterHandle(whichClass, theName);
this.logger.info("getParameterHandle return " + parameterHandle.toString());
return parameterHandle;
} catch (NameNotFound | InvalidInteractionClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getParameterHandle exception=" + e.getMessage());
throw e;
}
}
// 10.18
public String getParameterName(final InteractionClassHandle whichClass, final ParameterHandle theHandle) throws InteractionParameterNotDefined, InvalidParameterHandle, InvalidInteractionClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getParameterName whichClass=" + whichClass.toString() + ", theHandle=" + theHandle.toString());
try {
String str = this._rtiAmbassador.getParameterName(whichClass, theHandle);
this.logger.info("getParameterName return " + str);
return str;
} catch (InteractionParameterNotDefined | InvalidParameterHandle | InvalidInteractionClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getParameterName exception=" + e.getMessage());
throw e;
}
}
// 10.19
public OrderType getOrderType(final String theName) throws InvalidOrderName, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getOrderType theName=" + theName);
try {
OrderType orderType = this._rtiAmbassador.getOrderType(theName);
this.logger.info("getOrderType return " + orderType.toString());
return orderType;
} catch (InvalidOrderName | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getOrderType exception=" + e.getMessage());
throw e;
}
}
// 10.20
public String getOrderName(final OrderType theType) throws InvalidOrderType, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getOrderName theType=" + theType.toString());
try {
String str = this._rtiAmbassador.getOrderName(theType);
this.logger.info("getOrderName return " + str);
return str;
} catch (InvalidOrderType | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getOrderName exception=" + e.getMessage());
throw e;
}
}
// 10.21
public TransportationTypeHandle getTransportationTypeHandle(final String theName) throws InvalidTransportationName, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getTransportationTypeHandle theName=" + theName);
try {
TransportationTypeHandle transportationTypeHandle = this._rtiAmbassador.getTransportationTypeHandle(theName);
this.logger.info("getTransportationTypeHandle return " + transportationTypeHandle.toString());
return transportationTypeHandle;
} catch (InvalidTransportationName | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getTransportationTypeHandle exception=" + e.getMessage());
throw e;
}
}
// 10.22
public String getTransportationTypeName(final TransportationTypeHandle theHandle) throws InvalidTransportationType, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getTransportationTypeName theHandle=" + theHandle.toString());
try {
String str = this._rtiAmbassador.getTransportationTypeName(theHandle);
this.logger.info("getTransportationTypeName return " + str);
return str;
} catch (InvalidTransportationType | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getTransportationTypeName exception=" + e.getMessage());
throw e;
}
}
// 10.23
public DimensionHandleSet getAvailableDimensionsForClassAttribute(final ObjectClassHandle whichClass, final AttributeHandle theHandle) throws AttributeNotDefined, InvalidAttributeHandle, InvalidObjectClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAvailableDimensionsForClassAttribute whichClass=" + whichClass.toString() + ", theHandle=" + theHandle.toString());
try {
DimensionHandleSet dimensionHandleSet = this._rtiAmbassador.getAvailableDimensionsForClassAttribute(whichClass, theHandle);
this.logger.info("getAvailableDimensionsForClassAttribute return " + dimensionHandleSet.toString());
return dimensionHandleSet;
} catch (AttributeNotDefined | InvalidAttributeHandle | InvalidObjectClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAvailableDimensionsForClassAttribute exception=" + e.getMessage());
throw e;
}
}
// 10.24
public DimensionHandleSet getAvailableDimensionsForInteractionClass(final InteractionClassHandle theHandle) throws InvalidInteractionClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAvailableDimensionsForInteractionClass theHandle=" + theHandle.toString());
try {
DimensionHandleSet dimensionHandleSet = this._rtiAmbassador.getAvailableDimensionsForInteractionClass(theHandle);
this.logger.info("getAvailableDimensionsForInteractionClass return " + dimensionHandleSet.toString());
return dimensionHandleSet;
} catch (InvalidInteractionClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAvailableDimensionsForInteractionClass exception=" + e.getMessage());
throw e;
}
}
// 10.25
public DimensionHandle getDimensionHandle(final String theName) throws NameNotFound, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getDimensionHandle theName=" + theName);
try {
DimensionHandle dimensionHandle = this._rtiAmbassador.getDimensionHandle(theName);
this.logger.info("getDimensionHandle return " + dimensionHandle.toString());
return dimensionHandle;
} catch (NameNotFound | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getDimensionHandle exception=" + e.getMessage());
throw e;
}
}
// 10.26
public String getDimensionName(final DimensionHandle theHandle) throws InvalidDimensionHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getDimensionName theHandle=" + theHandle.toString());
try {
String str = this._rtiAmbassador.getDimensionName(theHandle);
this.logger.info("getDimensionName return " + str);
return str;
} catch (InvalidDimensionHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getDimensionName exception=" + e.getMessage());
throw e;
}
}
// 10.27
public long getDimensionUpperBound(final DimensionHandle theHandle) throws InvalidDimensionHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getDimensionUpperBound theHandle=" + theHandle.toString());
try {
long upperBound = this._rtiAmbassador.getDimensionUpperBound(theHandle);
this.logger.info("getDimensionUpperBound return " + upperBound);
return upperBound;
} catch (InvalidDimensionHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getDimensionUpperBound exception=" + e.getMessage());
throw e;
}
}
// 10.28
public DimensionHandleSet getDimensionHandleSet(final RegionHandle region) throws InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getDimensionHandleSet region=" + region.toString());
try {
DimensionHandleSet dimensionHandleSet = this._rtiAmbassador.getDimensionHandleSet(region);
this.logger.info("getDimensionHandleSet return " + dimensionHandleSet.toString());
return dimensionHandleSet;
} catch (InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getDimensionHandleSet exception=" + e.getMessage());
throw e;
}
}
// 10.29
public RangeBounds getRangeBounds(final RegionHandle region, final DimensionHandle dimension) throws RegionDoesNotContainSpecifiedDimension, InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getRangeBounds region=" + region.toString() + ", dimension=" + dimension.toString());
try {
RangeBounds rangeBounds = this._rtiAmbassador.getRangeBounds(region, dimension);
this.logger.info("getRangeBounds return " + rangeBounds.toString());
return rangeBounds;
} catch (RegionDoesNotContainSpecifiedDimension | InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getRangeBounds exception=" + e.getMessage());
throw e;
}
}
// 10.30
public void setRangeBounds(final RegionHandle region, final DimensionHandle dimension, final RangeBounds bounds) throws InvalidRangeBound, RegionDoesNotContainSpecifiedDimension, RegionNotCreatedByThisFederate, InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("setRangeBounds region=" + region.toString() + ", dimension=" + dimension.toString() + ", bounds=" + bounds.toString());
try {
this._rtiAmbassador.setRangeBounds(region, dimension, bounds);
} catch (InvalidRangeBound | RegionDoesNotContainSpecifiedDimension | RegionNotCreatedByThisFederate | InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("setRangeBounds exception=" + e.getMessage());
throw e;
}
}
// 10.31
public long normalizeFederateHandle(final FederateHandle federateHandle) throws InvalidFederateHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("normalizeFederateHandle federateHandle=" + federateHandle.toString());
try {
long normalizedFederateHandle = this._rtiAmbassador.normalizeFederateHandle(federateHandle);
this.logger.info("normalizeFederateHandle return " + normalizedFederateHandle);
return normalizedFederateHandle;
} catch (InvalidFederateHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("normalizeFederateHandle exception=" + e.getMessage());
throw e;
}
}
// 10.32
public long normalizeServiceGroup(final ServiceGroup group) throws InvalidServiceGroup, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("normalizeServiceGroup group=" + group.toString());
try {
long normalizedServiceGroup = this._rtiAmbassador.normalizeServiceGroup(group);
this.logger.info("normalizeServiceGroup return " + normalizedServiceGroup);
return normalizedServiceGroup;
} catch (InvalidServiceGroup | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("normalizeServiceGroup exception=" + e.getMessage());
throw e;
}
}
// 10.33
public void enableObjectClassRelevanceAdvisorySwitch() throws ObjectClassRelevanceAdvisorySwitchIsOn, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableObjectClassRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.enableObjectClassRelevanceAdvisorySwitch();
} catch (ObjectClassRelevanceAdvisorySwitchIsOn | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableObjectClassRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.34
public void disableObjectClassRelevanceAdvisorySwitch() throws ObjectClassRelevanceAdvisorySwitchIsOff, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableObjectClassRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.disableObjectClassRelevanceAdvisorySwitch();
} catch (ObjectClassRelevanceAdvisorySwitchIsOff | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableObjectClassRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.35
public void enableAttributeRelevanceAdvisorySwitch() throws AttributeRelevanceAdvisorySwitchIsOn, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableAttributeRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.enableAttributeRelevanceAdvisorySwitch();
} catch (AttributeRelevanceAdvisorySwitchIsOn | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableAttributeRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.36
public void disableAttributeRelevanceAdvisorySwitch() throws AttributeRelevanceAdvisorySwitchIsOff, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableAttributeRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.disableAttributeRelevanceAdvisorySwitch();
} catch (AttributeRelevanceAdvisorySwitchIsOff | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableAttributeRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.37
public void enableAttributeScopeAdvisorySwitch() throws AttributeScopeAdvisorySwitchIsOn, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableAttributeScopeAdvisorySwitch");
try {
this._rtiAmbassador.enableAttributeScopeAdvisorySwitch();
} catch (AttributeScopeAdvisorySwitchIsOn | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableAttributeScopeAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.38
public void disableAttributeScopeAdvisorySwitch() throws AttributeScopeAdvisorySwitchIsOff, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableAttributeScopeAdvisorySwitch");
try {
this._rtiAmbassador.disableAttributeScopeAdvisorySwitch();
} catch (AttributeScopeAdvisorySwitchIsOff | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableAttributeScopeAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.39
public void enableInteractionRelevanceAdvisorySwitch() throws InteractionRelevanceAdvisorySwitchIsOn, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableInteractionRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.enableInteractionRelevanceAdvisorySwitch();
} catch (InteractionRelevanceAdvisorySwitchIsOn | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableInteractionRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.40
public void disableInteractionRelevanceAdvisorySwitch() throws InteractionRelevanceAdvisorySwitchIsOff, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableInteractionRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.disableInteractionRelevanceAdvisorySwitch();
} catch (InteractionRelevanceAdvisorySwitchIsOff | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableInteractionRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.41
public boolean evokeCallback(final double approximateMinimumTimeInSeconds) throws CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("evokeCallback approximateMinimumTimeInSeconds=" + approximateMinimumTimeInSeconds);
try {
boolean b = this._rtiAmbassador.evokeCallback(approximateMinimumTimeInSeconds);
this.logger.info("evokeCallback return " + b);
return b;
} catch (CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("evokeCallback exception=" + e.getMessage());
throw e;
}
}
// 10.42
public boolean evokeMultipleCallbacks(final double approximateMinimumTimeInSeconds, final double approximateMaximumTimeInSeconds) throws CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("evokeMultipleCallbacks approximateMinimumTimeInSeconds=" + approximateMinimumTimeInSeconds + ", approximateMaximumTimeInSeconds=" + approximateMaximumTimeInSeconds);
try {
boolean b = this._rtiAmbassador.evokeMultipleCallbacks(approximateMinimumTimeInSeconds, approximateMaximumTimeInSeconds);
this.logger.info("evokeMultipleCallbacks return " + b);
return b;
} catch (CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("evokeMultipleCallbacks exception=" + e.getMessage());
throw e;
}
}
// 10.43
public void enableCallbacks() throws SaveInProgress, RestoreInProgress, RTIinternalError {
this.logger.info("enableCallbacks");
try {
this._rtiAmbassador.enableCallbacks();
} catch (SaveInProgress | RestoreInProgress | RTIinternalError e) {
this.logger.error("enableCallbacks exception=" + e.getMessage());
throw e;
}
}
// 10.44
public void disableCallbacks() throws SaveInProgress, RestoreInProgress, RTIinternalError {
this.logger.info("disableCallbacks");
try {
this._rtiAmbassador.disableCallbacks();
} catch (SaveInProgress | RestoreInProgress | RTIinternalError e) {
this.logger.error("disableCallbacks exception=" + e.getMessage());
throw e;
}
}
//API-specific services
public AttributeHandleFactory getAttributeHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getAttributeHandleFactory");
try {
return this._rtiAmbassador.getAttributeHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getAttributeHandleFactory exception=" + e.getMessage());
throw e;
}
}
public AttributeHandleSetFactory getAttributeHandleSetFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getAttributeHandleSetFactory");
try {
return this._rtiAmbassador.getAttributeHandleSetFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getAttributeHandleSetFactory exception=" + e.getMessage());
throw e;
}
}
public AttributeHandleValueMapFactory getAttributeHandleValueMapFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getAttributeHandleValueMapFactory");
try {
return this._rtiAmbassador.getAttributeHandleValueMapFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getAttributeHandleValueMapFactory exception=" + e.getMessage());
throw e;
}
}
public AttributeSetRegionSetPairListFactory getAttributeSetRegionSetPairListFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getAttributeSetRegionSetPairListFactory");
try {
return this._rtiAmbassador.getAttributeSetRegionSetPairListFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getAttributeSetRegionSetPairListFactory exception=" + e.getMessage());
throw e;
}
}
public DimensionHandleFactory getDimensionHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getDimensionHandleFactory");
try {
return this._rtiAmbassador.getDimensionHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getDimensionHandleFactory exception=" + e.getMessage());
throw e;
}
}
public DimensionHandleSetFactory getDimensionHandleSetFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getDimensionHandleSetFactory");
try {
return this._rtiAmbassador.getDimensionHandleSetFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getDimensionHandleSetFactory exception=" + e.getMessage());
throw e;
}
}
public FederateHandleFactory getFederateHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getFederateHandleFactory");
try {
return this._rtiAmbassador.getFederateHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getFederateHandleFactory exception=" + e.getMessage());
throw e;
}
}
public FederateHandleSetFactory getFederateHandleSetFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getFederateHandleSetFactory");
try {
return this._rtiAmbassador.getFederateHandleSetFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getFederateHandleSetFactory exception=" + e.getMessage());
throw e;
}
}
public InteractionClassHandleFactory getInteractionClassHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getInteractionClassHandleFactory");
try {
return this._rtiAmbassador.getInteractionClassHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getInteractionClassHandleFactory exception=" + e.getMessage());
throw e;
}
}
public ObjectClassHandleFactory getObjectClassHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getObjectClassHandleFactory");
try {
return this._rtiAmbassador.getObjectClassHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getObjectClassHandleFactory exception=" + e.getMessage());
throw e;
}
}
public ObjectInstanceHandleFactory getObjectInstanceHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getObjectInstanceHandleFactory");
try {
return this._rtiAmbassador.getObjectInstanceHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getObjectInstanceHandleFactory exception=" + e.getMessage());
throw e;
}
}
public ParameterHandleFactory getParameterHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getParameterHandleFactory");
try {
return this._rtiAmbassador.getParameterHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getParameterHandleFactory exception=" + e.getMessage());
throw e;
}
}
public ParameterHandleValueMapFactory getParameterHandleValueMapFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getParameterHandleValueMapFactory");
try {
return this._rtiAmbassador.getParameterHandleValueMapFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getParameterHandleValueMapFactory exception=" + e.getMessage());
throw e;
}
}
public RegionHandleSetFactory getRegionHandleSetFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getRegionHandleSetFactory");
try {
return this._rtiAmbassador.getRegionHandleSetFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getRegionHandleSetFactory exception=" + e.getMessage());
throw e;
}
}
public TransportationTypeHandleFactory getTransportationTypeHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getTransportationTypeHandleFactory");
try {
return this._rtiAmbassador.getTransportationTypeHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getTransportationTypeHandleFactory exception=" + e.getMessage());
throw e;
}
}
public String getHLAversion() {
this.logger.info("getHLAversion");
String str = this._rtiAmbassador.getHLAversion();
this.logger.error("getHLAversion return " + str);
return str;
}
public LogicalTimeFactory getTimeFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getTimeFactory");
try {
return this._rtiAmbassador.getTimeFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getTimeFactory exception=" + e.getMessage());
throw e;
}
}
}
| TC/src/main/java/de/fraunhofer/iosb/tc_lib/IVCT_RTIambassador.java | package de.fraunhofer.iosb.tc_lib;
import hla.rti1516e.AttributeHandle;
import hla.rti1516e.AttributeHandleFactory;
import hla.rti1516e.AttributeHandleSet;
import hla.rti1516e.AttributeHandleSetFactory;
import hla.rti1516e.AttributeHandleValueMap;
import hla.rti1516e.AttributeHandleValueMapFactory;
import hla.rti1516e.AttributeSetRegionSetPairList;
import hla.rti1516e.AttributeSetRegionSetPairListFactory;
import hla.rti1516e.CallbackModel;
import hla.rti1516e.DimensionHandle;
import hla.rti1516e.DimensionHandleFactory;
import hla.rti1516e.DimensionHandleSet;
import hla.rti1516e.DimensionHandleSetFactory;
import hla.rti1516e.FederateAmbassador;
import hla.rti1516e.FederateHandle;
import hla.rti1516e.FederateHandleFactory;
import hla.rti1516e.FederateHandleSet;
import hla.rti1516e.FederateHandleSetFactory;
import hla.rti1516e.InteractionClassHandle;
import hla.rti1516e.InteractionClassHandleFactory;
import hla.rti1516e.LogicalTime;
import hla.rti1516e.LogicalTimeFactory;
import hla.rti1516e.LogicalTimeInterval;
import hla.rti1516e.MessageRetractionHandle;
import hla.rti1516e.MessageRetractionReturn;
import hla.rti1516e.ObjectClassHandle;
import hla.rti1516e.ObjectClassHandleFactory;
import hla.rti1516e.ObjectInstanceHandle;
import hla.rti1516e.ObjectInstanceHandleFactory;
import hla.rti1516e.OrderType;
import hla.rti1516e.ParameterHandle;
import hla.rti1516e.ParameterHandleFactory;
import hla.rti1516e.ParameterHandleValueMap;
import hla.rti1516e.ParameterHandleValueMapFactory;
import hla.rti1516e.RTIambassador;
import hla.rti1516e.RangeBounds;
import hla.rti1516e.RegionHandle;
import hla.rti1516e.RegionHandleSet;
import hla.rti1516e.RegionHandleSetFactory;
import hla.rti1516e.ResignAction;
import hla.rti1516e.ServiceGroup;
import hla.rti1516e.TimeQueryReturn;
import hla.rti1516e.TransportationTypeHandle;
import hla.rti1516e.TransportationTypeHandleFactory;
import hla.rti1516e.encoding.EncoderFactory;
import hla.rti1516e.exceptions.AlreadyConnected;
import hla.rti1516e.exceptions.AsynchronousDeliveryAlreadyDisabled;
import hla.rti1516e.exceptions.AsynchronousDeliveryAlreadyEnabled;
import hla.rti1516e.exceptions.AttributeAcquisitionWasNotRequested;
import hla.rti1516e.exceptions.AttributeAlreadyBeingAcquired;
import hla.rti1516e.exceptions.AttributeAlreadyBeingChanged;
import hla.rti1516e.exceptions.AttributeAlreadyBeingDivested;
import hla.rti1516e.exceptions.AttributeAlreadyOwned;
import hla.rti1516e.exceptions.AttributeDivestitureWasNotRequested;
import hla.rti1516e.exceptions.AttributeNotDefined;
import hla.rti1516e.exceptions.AttributeNotOwned;
import hla.rti1516e.exceptions.AttributeNotPublished;
import hla.rti1516e.exceptions.AttributeRelevanceAdvisorySwitchIsOff;
import hla.rti1516e.exceptions.AttributeRelevanceAdvisorySwitchIsOn;
import hla.rti1516e.exceptions.AttributeScopeAdvisorySwitchIsOff;
import hla.rti1516e.exceptions.AttributeScopeAdvisorySwitchIsOn;
import hla.rti1516e.exceptions.CallNotAllowedFromWithinCallback;
import hla.rti1516e.exceptions.ConnectionFailed;
import hla.rti1516e.exceptions.CouldNotCreateLogicalTimeFactory;
import hla.rti1516e.exceptions.CouldNotOpenFDD;
import hla.rti1516e.exceptions.CouldNotOpenMIM;
import hla.rti1516e.exceptions.DeletePrivilegeNotHeld;
import hla.rti1516e.exceptions.DesignatorIsHLAstandardMIM;
import hla.rti1516e.exceptions.ErrorReadingFDD;
import hla.rti1516e.exceptions.ErrorReadingMIM;
import hla.rti1516e.exceptions.FederateAlreadyExecutionMember;
import hla.rti1516e.exceptions.FederateHandleNotKnown;
import hla.rti1516e.exceptions.FederateHasNotBegunSave;
import hla.rti1516e.exceptions.FederateIsExecutionMember;
import hla.rti1516e.exceptions.FederateNameAlreadyInUse;
import hla.rti1516e.exceptions.FederateNotExecutionMember;
import hla.rti1516e.exceptions.FederateOwnsAttributes;
import hla.rti1516e.exceptions.FederateServiceInvocationsAreBeingReportedViaMOM;
import hla.rti1516e.exceptions.FederateUnableToUseTime;
import hla.rti1516e.exceptions.FederatesCurrentlyJoined;
import hla.rti1516e.exceptions.FederationExecutionAlreadyExists;
import hla.rti1516e.exceptions.FederationExecutionDoesNotExist;
import hla.rti1516e.exceptions.IllegalName;
import hla.rti1516e.exceptions.InTimeAdvancingState;
import hla.rti1516e.exceptions.InconsistentFDD;
import hla.rti1516e.exceptions.InteractionClassAlreadyBeingChanged;
import hla.rti1516e.exceptions.InteractionClassNotDefined;
import hla.rti1516e.exceptions.InteractionClassNotPublished;
import hla.rti1516e.exceptions.InteractionParameterNotDefined;
import hla.rti1516e.exceptions.InteractionRelevanceAdvisorySwitchIsOff;
import hla.rti1516e.exceptions.InteractionRelevanceAdvisorySwitchIsOn;
import hla.rti1516e.exceptions.InvalidAttributeHandle;
import hla.rti1516e.exceptions.InvalidDimensionHandle;
import hla.rti1516e.exceptions.InvalidFederateHandle;
import hla.rti1516e.exceptions.InvalidInteractionClassHandle;
import hla.rti1516e.exceptions.InvalidLocalSettingsDesignator;
import hla.rti1516e.exceptions.InvalidLogicalTime;
import hla.rti1516e.exceptions.InvalidLookahead;
import hla.rti1516e.exceptions.InvalidMessageRetractionHandle;
import hla.rti1516e.exceptions.InvalidObjectClassHandle;
import hla.rti1516e.exceptions.InvalidOrderName;
import hla.rti1516e.exceptions.InvalidOrderType;
import hla.rti1516e.exceptions.InvalidParameterHandle;
import hla.rti1516e.exceptions.InvalidRangeBound;
import hla.rti1516e.exceptions.InvalidRegion;
import hla.rti1516e.exceptions.InvalidRegionContext;
import hla.rti1516e.exceptions.InvalidResignAction;
import hla.rti1516e.exceptions.InvalidServiceGroup;
import hla.rti1516e.exceptions.InvalidTransportationName;
import hla.rti1516e.exceptions.InvalidTransportationType;
import hla.rti1516e.exceptions.InvalidUpdateRateDesignator;
import hla.rti1516e.exceptions.LogicalTimeAlreadyPassed;
import hla.rti1516e.exceptions.MessageCanNoLongerBeRetracted;
import hla.rti1516e.exceptions.NameNotFound;
import hla.rti1516e.exceptions.NameSetWasEmpty;
import hla.rti1516e.exceptions.NoAcquisitionPending;
import hla.rti1516e.exceptions.NotConnected;
import hla.rti1516e.exceptions.ObjectClassNotDefined;
import hla.rti1516e.exceptions.ObjectClassNotPublished;
import hla.rti1516e.exceptions.ObjectClassRelevanceAdvisorySwitchIsOff;
import hla.rti1516e.exceptions.ObjectClassRelevanceAdvisorySwitchIsOn;
import hla.rti1516e.exceptions.ObjectInstanceNameInUse;
import hla.rti1516e.exceptions.ObjectInstanceNameNotReserved;
import hla.rti1516e.exceptions.ObjectInstanceNotKnown;
import hla.rti1516e.exceptions.OwnershipAcquisitionPending;
import hla.rti1516e.exceptions.RTIinternalError;
import hla.rti1516e.exceptions.RegionDoesNotContainSpecifiedDimension;
import hla.rti1516e.exceptions.RegionInUseForUpdateOrSubscription;
import hla.rti1516e.exceptions.RegionNotCreatedByThisFederate;
import hla.rti1516e.exceptions.RequestForTimeConstrainedPending;
import hla.rti1516e.exceptions.RequestForTimeRegulationPending;
import hla.rti1516e.exceptions.RestoreInProgress;
import hla.rti1516e.exceptions.RestoreNotInProgress;
import hla.rti1516e.exceptions.RestoreNotRequested;
import hla.rti1516e.exceptions.SaveInProgress;
import hla.rti1516e.exceptions.SaveNotInProgress;
import hla.rti1516e.exceptions.SaveNotInitiated;
import hla.rti1516e.exceptions.SynchronizationPointLabelNotAnnounced;
import hla.rti1516e.exceptions.TimeConstrainedAlreadyEnabled;
import hla.rti1516e.exceptions.TimeConstrainedIsNotEnabled;
import hla.rti1516e.exceptions.TimeRegulationAlreadyEnabled;
import hla.rti1516e.exceptions.TimeRegulationIsNotEnabled;
import hla.rti1516e.exceptions.UnsupportedCallbackModel;
import java.net.URL;
import java.util.Arrays;
import java.util.Set;
import org.slf4j.Logger;
/**
* Provide functions to give added-value rti calls e.g. add logging to each rti
* call e.g. combine connect, create and join within one call e.g. combine
* resign and destroy within one call
*
* @author Johannes Mulder
*/
public class IVCT_RTIambassador implements RTIambassador {
private RTIambassador _rtiAmbassador;
private EncoderFactory encoderFactory;
private Logger logger;
/**
* @param theRTIAmbassador reference to the rti ambassador
* @param encoderFactory encoder factory
* @param logger reference to the logger
*/
public IVCT_RTIambassador(final RTIambassador theRTIAmbassador, final EncoderFactory encoderFactory, final Logger logger) {
this._rtiAmbassador = theRTIAmbassador;
this.encoderFactory = encoderFactory;
this.logger = logger;
}
/**
* @return the encoder factory
*/
public EncoderFactory getEncoderFactory() {
return this.encoderFactory;
}
// 4.2
/**
* @param federateReference a reference to a user programmed callback
* @param callbackModel the type of callback
* @param localSettingsDesignator the settings for the rti
*/
public void connect(final FederateAmbassador federateReference, final CallbackModel callbackModel, final String localSettingsDesignator) throws ConnectionFailed, InvalidLocalSettingsDesignator, UnsupportedCallbackModel, AlreadyConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("connect " + federateReference.toString() + " " + callbackModel.toString() + " " + localSettingsDesignator);
try {
this._rtiAmbassador.connect(federateReference, callbackModel, localSettingsDesignator);
} catch (ConnectionFailed | InvalidLocalSettingsDesignator | UnsupportedCallbackModel | AlreadyConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("connect exception=" + e.getMessage());
throw e;
}
}
// 4.2
/**
* @param federateReference a reference to a user programmed callback
* @param callbackModel the type of callback
*/
public void connect(final FederateAmbassador federateReference, final CallbackModel callbackModel) throws ConnectionFailed, InvalidLocalSettingsDesignator, UnsupportedCallbackModel, AlreadyConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("connect " + federateReference.toString() + " " + callbackModel.toString());
try {
this._rtiAmbassador.connect(federateReference, callbackModel);
} catch (ConnectionFailed | InvalidLocalSettingsDesignator | UnsupportedCallbackModel | AlreadyConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("connect exception=" + e.getMessage());
throw e;
}
}
// 4.3
/**
* @throws FederateIsExecutionMember federate is still joined
* @throws CallNotAllowedFromWithinCallback a callback is in process
* @throws RTIinternalError some rti internal error
*/
public void disconnect() throws FederateIsExecutionMember, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("disconnect");
try {
this._rtiAmbassador.disconnect();
} catch (FederateIsExecutionMember | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("disconnect exception=" + e.getMessage());
throw e;
}
}
//4.5
/**
* @param federationExecutionName federation execution name
* @param fomModules fom modules
* @param mimModule mim module
* @param logicalTimeImplementationName logical time implementation name
* @throws CouldNotCreateLogicalTimeFactory could not create logical time factory
* @throws InconsistentFDD inconsistent fdd
* @throws ErrorReadingFDD error reading fdd
* @throws CouldNotOpenFDD could not open fdd
* @throws ErrorReadingMIM error reading mim
* @throws CouldNotOpenMIM could not open mim
* @throws DesignatorIsHLAstandardMIM designator is hla standard mim
* @throws FederationExecutionAlreadyExists federation already exists
* @throws NotConnected not connected
* @throws RTIinternalError rti internal error
*/
public void createFederationExecution(final String federationExecutionName, final URL[] fomModules, final URL mimModule, final String logicalTimeImplementationName) throws CouldNotCreateLogicalTimeFactory, InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, ErrorReadingMIM, CouldNotOpenMIM, DesignatorIsHLAstandardMIM, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution " + federationExecutionName + " " + Arrays.toString(fomModules) + " " + mimModule.toString() + " " + logicalTimeImplementationName);
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModules, mimModule, logicalTimeImplementationName);
} catch (CouldNotCreateLogicalTimeFactory | InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | ErrorReadingMIM | CouldNotOpenMIM | DesignatorIsHLAstandardMIM | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.5
/**
* @param federationExecutionName federation execution name
* @param fomModules fom modules
* @param logicalTimeImplementationName logical time implementation name
* @throws CouldNotCreateLogicalTimeFactory could not createLogical time factory
* @throws InconsistentFDD inconsistent fdd
* @throws ErrorReadingFDD error reading fdd
* @throws CouldNotOpenFDD could not open fdd
* @throws FederationExecutionAlreadyExists federation execution already exists
* @throws NotConnected not connected
* @throws RTIinternalError rti internal error
*/
public void createFederationExecution(final String federationExecutionName, final URL[] fomModules, final String logicalTimeImplementationName) throws CouldNotCreateLogicalTimeFactory, InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution " + federationExecutionName + " " + Arrays.toString(fomModules) + " " + logicalTimeImplementationName);
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModules, logicalTimeImplementationName);
} catch (CouldNotCreateLogicalTimeFactory | InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.5
public void createFederationExecution(final String federationExecutionName, final URL[] fomModules, final URL mimModule) throws InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, ErrorReadingMIM, CouldNotOpenMIM, DesignatorIsHLAstandardMIM, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution " + federationExecutionName + " " + Arrays.toString(fomModules) + " " + mimModule.toString());
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModules, mimModule);
} catch (InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | ErrorReadingMIM | CouldNotOpenMIM | DesignatorIsHLAstandardMIM | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.5
public void createFederationExecution(final String federationExecutionName, final URL[] fomModules) throws InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution " + federationExecutionName + " " + Arrays.toString(fomModules));
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModules);
} catch (InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.5
public void createFederationExecution(final String federationExecutionName, final URL fomModule) throws InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, FederationExecutionAlreadyExists, NotConnected, RTIinternalError {
this.logger.info("createFederationExecution " + federationExecutionName + " " + fomModule.toString());
try {
this._rtiAmbassador.createFederationExecution(federationExecutionName, fomModule);
} catch (InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | FederationExecutionAlreadyExists | NotConnected | RTIinternalError e) {
this.logger.error("createFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.6
public void destroyFederationExecution(final String federationExecutionName) throws FederatesCurrentlyJoined, FederationExecutionDoesNotExist, NotConnected, RTIinternalError {
this.logger.info("destroyFederationExecution " + federationExecutionName);
try {
this._rtiAmbassador.destroyFederationExecution(federationExecutionName);
} catch (FederatesCurrentlyJoined | FederationExecutionDoesNotExist | NotConnected | RTIinternalError e) {
this.logger.error("destroyFederationExecution exception=" + e.getMessage());
throw e;
}
}
// 4.7
public void listFederationExecutions() throws NotConnected, RTIinternalError {
this.logger.info("listFederationExecutions");
try {
this._rtiAmbassador.listFederationExecutions();
} catch (NotConnected | RTIinternalError e) {
this.logger.error("listFederationExecutions exception=" + e.getMessage());
throw e;
}
}
//4.9
public FederateHandle joinFederationExecution(final String federateName, final String federateType, final String federationExecutionName, final URL[] additionalFomModules) throws CouldNotCreateLogicalTimeFactory, FederateNameAlreadyInUse, FederationExecutionDoesNotExist, InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, SaveInProgress, RestoreInProgress, FederateAlreadyExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("joinFederationExecution " + federateName + " " + federateType + " " + federationExecutionName + " " + Arrays.toString(additionalFomModules));
try {
FederateHandle myFederateHandle;
myFederateHandle = this._rtiAmbassador.joinFederationExecution(federateName, federateType, federationExecutionName, additionalFomModules);
this.logger.info("joinFederationExecution return " + myFederateHandle.toString());
return myFederateHandle;
} catch (CouldNotCreateLogicalTimeFactory | FederateNameAlreadyInUse | FederationExecutionDoesNotExist | InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | SaveInProgress | RestoreInProgress | FederateAlreadyExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("joinFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.9
public FederateHandle joinFederationExecution(final String federateType, final String federationExecutionName, final URL[] additionalFomModules) throws CouldNotCreateLogicalTimeFactory, FederationExecutionDoesNotExist, InconsistentFDD, ErrorReadingFDD, CouldNotOpenFDD, SaveInProgress, RestoreInProgress, FederateAlreadyExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("joinFederationExecution " + federateType + " " + federationExecutionName + " " + Arrays.toString(additionalFomModules));
try {
final FederateHandle federateHandle = this._rtiAmbassador.joinFederationExecution(federateType, federationExecutionName, additionalFomModules);
this.logger.info("joinFederationExecution return " + federateHandle.toString());
return federateHandle;
} catch (CouldNotCreateLogicalTimeFactory | FederationExecutionDoesNotExist | InconsistentFDD | ErrorReadingFDD | CouldNotOpenFDD | SaveInProgress | RestoreInProgress | FederateAlreadyExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("joinFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.9
public FederateHandle joinFederationExecution(final String federateName, final String federateType, final String federationExecutionName) throws CouldNotCreateLogicalTimeFactory, FederateNameAlreadyInUse, FederationExecutionDoesNotExist, SaveInProgress, RestoreInProgress, FederateAlreadyExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("joinFederationExecution " + federateName + " " + federateType + " " + federationExecutionName);
try {
final FederateHandle federateHandle = this._rtiAmbassador.joinFederationExecution(federateName, federateType, federationExecutionName);
this.logger.info("joinFederationExecution return " + federateHandle.toString());
return federateHandle;
} catch (CouldNotCreateLogicalTimeFactory | FederateNameAlreadyInUse | FederationExecutionDoesNotExist | SaveInProgress | RestoreInProgress | FederateAlreadyExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("joinFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.9
public FederateHandle joinFederationExecution(final String federateType, final String federationExecutionName) throws CouldNotCreateLogicalTimeFactory, FederationExecutionDoesNotExist, SaveInProgress, RestoreInProgress, FederateAlreadyExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("joinFederationExecution " + federateType + " " + federationExecutionName);
try {
final FederateHandle federateHandle = this._rtiAmbassador.joinFederationExecution(federateType, federationExecutionName);
this.logger.info("joinFederationExecution return " + federateHandle.toString());
return federateHandle;
} catch (CouldNotCreateLogicalTimeFactory | FederationExecutionDoesNotExist | SaveInProgress | RestoreInProgress | FederateAlreadyExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("joinFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.10
public void resignFederationExecution(final ResignAction resignAction) throws InvalidResignAction, OwnershipAcquisitionPending, FederateOwnsAttributes, FederateNotExecutionMember, NotConnected, CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("resignFederationExecution " + resignAction.toString());
try {
this._rtiAmbassador.resignFederationExecution(resignAction);
} catch (InvalidResignAction | OwnershipAcquisitionPending | FederateOwnsAttributes | FederateNotExecutionMember | NotConnected | CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("resignFederationExecution exception=" + e.getMessage());
throw e;
}
}
//4.11
public void registerFederationSynchronizationPoint(final String synchronizationPointLabel, final byte[] userSuppliedTag) throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerFederationSynchronizationPoint " + synchronizationPointLabel + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.registerFederationSynchronizationPoint(synchronizationPointLabel, userSuppliedTag);
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerFederationSynchronizationPoint exception=" + e.getMessage());
throw e;
}
}
//4.11
public void registerFederationSynchronizationPoint(final String synchronizationPointLabel, final byte[] userSuppliedTag, final FederateHandleSet synchronizationSet) throws InvalidFederateHandle, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerFederationSynchronizationPoint " + synchronizationPointLabel + " " + Arrays.toString(userSuppliedTag) + " " + synchronizationSet.toString());
try {
this._rtiAmbassador.registerFederationSynchronizationPoint(synchronizationPointLabel, userSuppliedTag, synchronizationSet);
} catch (InvalidFederateHandle | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerFederationSynchronizationPoint exception=" + e.getMessage());
throw e;
}
}
//4.14
public void synchronizationPointAchieved(final String synchronizationPointLabel) throws SynchronizationPointLabelNotAnnounced, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("synchronizationPointAchieved " + synchronizationPointLabel);
try {
this._rtiAmbassador.synchronizationPointAchieved(synchronizationPointLabel);
} catch (SynchronizationPointLabelNotAnnounced | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("synchronizationPointAchieved exception=" + e.getMessage());
throw e;
}
}
//4.14
public void synchronizationPointAchieved(final String synchronizationPointLabel, final boolean successIndicator) throws SynchronizationPointLabelNotAnnounced, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("synchronizationPointAchieved " + synchronizationPointLabel + " " + successIndicator);
try {
this._rtiAmbassador.synchronizationPointAchieved(synchronizationPointLabel, successIndicator);
} catch (SynchronizationPointLabelNotAnnounced | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("synchronizationPointAchieved exception=" + e.getMessage());
throw e;
}
}
// 4.16
public void requestFederationSave(final String label) throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestFederationSave " + label);
try {
this._rtiAmbassador.requestFederationSave(label);
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestFederationSave exception=" + e.getMessage());
throw e;
}
}
// 4.16
public void requestFederationSave(final String label, final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, FederateUnableToUseTime, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestFederationSave " + label + " " + theTime.toString());
try {
this._rtiAmbassador.requestFederationSave(label, theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | FederateUnableToUseTime | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestFederationSave exception=" + e.getMessage());
throw e;
}
}
// 4.18
public void federateSaveBegun() throws SaveNotInitiated, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateSaveBegun");
try {
this._rtiAmbassador.federateSaveBegun();
} catch (SaveNotInitiated | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateSaveBegun exception=" + e.getMessage());
throw e;
}
}
// 4.19
public void federateSaveComplete() throws FederateHasNotBegunSave, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateSaveComplete");
try {
this._rtiAmbassador.federateSaveComplete();
} catch (FederateHasNotBegunSave | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateSaveComplete exception=" + e.getMessage());
throw e;
}
}
// 4.19
public void federateSaveNotComplete() throws FederateHasNotBegunSave, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateSaveNotComplete");
try {
this._rtiAmbassador.federateSaveNotComplete();
} catch (FederateHasNotBegunSave | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateSaveNotComplete exception=" + e.getMessage());
throw e;
}
}
// 4.21
public void abortFederationSave() throws SaveNotInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("abortFederationSave");
try {
this._rtiAmbassador.abortFederationSave();
} catch (SaveNotInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("abortFederationSave exception=" + e.getMessage());
throw e;
}
}
// 4.22
public void queryFederationSaveStatus() throws RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryFederationSaveStatus");
try {
this._rtiAmbassador.queryFederationSaveStatus();
} catch (RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryFederationSaveStatus exception=" + e.getMessage());
throw e;
}
}
// 4.24
public void requestFederationRestore(final String label) throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestFederationRestore " + label);
try {
this._rtiAmbassador.requestFederationRestore(label);
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestFederationRestore exception=" + e.getMessage());
throw e;
}
}
// 4.28
public void federateRestoreComplete() throws RestoreNotRequested, SaveInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateRestoreComplete");
try {
this._rtiAmbassador.federateRestoreComplete();
} catch (RestoreNotRequested | SaveInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateRestoreComplete exception=" + e.getMessage());
throw e;
}
}
// 4.28
public void federateRestoreNotComplete() throws RestoreNotRequested, SaveInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("federateRestoreNotComplete");
try {
this._rtiAmbassador.federateRestoreNotComplete();
} catch (RestoreNotRequested | SaveInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("federateRestoreNotComplete exception=" + e.getMessage());
throw e;
}
}
// 4.30
public void abortFederationRestore() throws RestoreNotInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("abortFederationRestore");
try {
this._rtiAmbassador.abortFederationRestore();
} catch (RestoreNotInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("abortFederationRestore exception=" + e.getMessage());
throw e;
}
}
// 4.31
public void queryFederationRestoreStatus() throws SaveInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryFederationRestoreStatus");
try {
this._rtiAmbassador.queryFederationRestoreStatus();
} catch (SaveInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryFederationRestoreStatus exception=" + e.getMessage());
throw e;
}
}
/////////////////////////////////////
// Declaration Management Services //
/////////////////////////////////////
// 5.2
public void publishObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("publishObjectClassAttributes " + theClass.toString() + " " + attributeList.toString());
try {
this._rtiAmbassador.publishObjectClassAttributes(theClass, attributeList);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("publishObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.3
public void unpublishObjectClass(final ObjectClassHandle theClass) throws OwnershipAcquisitionPending, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unpublishObjectClass " + theClass.toString());
try {
this._rtiAmbassador.unpublishObjectClass(theClass);
} catch (OwnershipAcquisitionPending | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unpublishObjectClass exception=" + e.getMessage());
throw e;
}
}
// 5.3
public void unpublishObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws OwnershipAcquisitionPending, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unpublishObjectClassAttributes " + theClass.toString() + " " + attributeList.toString());
try {
this._rtiAmbassador.unpublishObjectClassAttributes(theClass, attributeList);
} catch (OwnershipAcquisitionPending | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unpublishObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.4
public void publishInteractionClass(final InteractionClassHandle theInteraction) throws InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("publishInteractionClass " + theInteraction.toString());
try {
this._rtiAmbassador.publishInteractionClass(theInteraction);
} catch (InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("publishInteractionClass exception=" + e.getMessage());
throw e;
}
}
// 5.5
public void unpublishInteractionClass(final InteractionClassHandle theInteraction) throws InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unpublishInteractionClass " + " " + theInteraction.toString());
try {
this._rtiAmbassador.unpublishInteractionClass(theInteraction);
} catch (InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unpublishInteractionClass exception=" + e.getMessage());
throw e;
}
}
// 5.6
public void subscribeObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributes " + theClass.toString() + attributeList.toString());
try {
this._rtiAmbassador.subscribeObjectClassAttributes(theClass, attributeList);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.6
public void subscribeObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList, final String updateRateDesignator) throws AttributeNotDefined, ObjectClassNotDefined, InvalidUpdateRateDesignator, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributes " + theClass.toString() + attributeList.toString() + " " + updateRateDesignator);
try {
this._rtiAmbassador.subscribeObjectClassAttributes(theClass, attributeList, updateRateDesignator);
} catch (AttributeNotDefined | ObjectClassNotDefined | InvalidUpdateRateDesignator | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.6
public void subscribeObjectClassAttributesPassively(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesPassively " + theClass.toString() + attributeList.toString());
try {
this._rtiAmbassador.subscribeObjectClassAttributesPassively(theClass, attributeList);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesPassively exception=" + e.getMessage());
throw e;
}
}
// 5.6
public void subscribeObjectClassAttributesPassively(final ObjectClassHandle theClass, final AttributeHandleSet attributeList, final String updateRateDesignator) throws AttributeNotDefined, ObjectClassNotDefined, InvalidUpdateRateDesignator, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesPassively " + theClass.toString() + attributeList.toString() + " " + updateRateDesignator);
try {
this._rtiAmbassador.subscribeObjectClassAttributesPassively(theClass, attributeList, updateRateDesignator);
} catch (AttributeNotDefined | ObjectClassNotDefined | InvalidUpdateRateDesignator | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesPassively exception=" + e.getMessage());
throw e;
}
}
// 5.7
public void unsubscribeObjectClass(final ObjectClassHandle theClass) throws ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeObjectClass " + theClass.toString());
try {
this._rtiAmbassador.unsubscribeObjectClass(theClass);
} catch (ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeObjectClass exception=" + e.getMessage());
throw e;
}
}
// 5.7
public void unsubscribeObjectClassAttributes(final ObjectClassHandle theClass, final AttributeHandleSet attributeList) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeObjectClassAttributes " + theClass.toString() + " " + attributeList.toString());
try {
this._rtiAmbassador.unsubscribeObjectClassAttributes(theClass, attributeList);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeObjectClassAttributes exception=" + e.getMessage());
throw e;
}
}
// 5.8
public void subscribeInteractionClass(final InteractionClassHandle theClass) throws FederateServiceInvocationsAreBeingReportedViaMOM, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeInteractionClass " + theClass.toString());
try {
this._rtiAmbassador.subscribeInteractionClass(theClass);
} catch (FederateServiceInvocationsAreBeingReportedViaMOM | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeInteractionClass exception=" + e.getMessage());
throw e;
}
}
// 5.8
public void subscribeInteractionClassPassively(final InteractionClassHandle theClass) throws FederateServiceInvocationsAreBeingReportedViaMOM, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeInteractionClassPassively " + theClass.toString());
try {
this._rtiAmbassador.subscribeInteractionClassPassively(theClass);
} catch (FederateServiceInvocationsAreBeingReportedViaMOM | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeInteractionClassPassively exception=" + e.getMessage());
throw e;
}
}
// 5.9
public void unsubscribeInteractionClass(final InteractionClassHandle theClass) throws InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeInteractionClass " + theClass.toString());
try {
this._rtiAmbassador.unsubscribeInteractionClass(theClass);
} catch (InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeInteractionClass exception=" + e.getMessage());
throw e;
}
}
////////////////////////////////
// Object Management Services //
////////////////////////////////
// 6.2
public void reserveObjectInstanceName(final String theObjectName) throws IllegalName, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("reserveObjectInstanceName " + theObjectName);
try {
this._rtiAmbassador.reserveObjectInstanceName(theObjectName);
} catch (IllegalName | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("reserveObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 6.4
public void releaseObjectInstanceName(final String theObjectInstanceName) throws ObjectInstanceNameNotReserved, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("releaseObjectInstanceName " + theObjectInstanceName);
try {
this._rtiAmbassador.releaseObjectInstanceName(theObjectInstanceName);
} catch (ObjectInstanceNameNotReserved | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("releaseObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 6.5
public void reserveMultipleObjectInstanceName(final Set<String> theObjectNames) throws IllegalName, NameSetWasEmpty, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("reserveMultipleObjectInstanceName " + theObjectNames.toString());
try {
this._rtiAmbassador.reserveMultipleObjectInstanceName(theObjectNames);
} catch (IllegalName | NameSetWasEmpty | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("reserveMultipleObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 6.7
public void releaseMultipleObjectInstanceName(final Set<String> theObjectNames) throws ObjectInstanceNameNotReserved, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("releaseMultipleObjectInstanceName " + theObjectNames.toString());
try {
this._rtiAmbassador.releaseMultipleObjectInstanceName(theObjectNames);
} catch (ObjectInstanceNameNotReserved | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("releaseMultipleObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 6.8
public ObjectInstanceHandle registerObjectInstance(final ObjectClassHandle theClass) throws ObjectClassNotPublished, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerObjectInstance " + theClass.toString());
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.registerObjectInstance(theClass);
this.logger.info("registerObjectInstance return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (ObjectClassNotPublished | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.8
public ObjectInstanceHandle registerObjectInstance(final ObjectClassHandle theClass, final String theObjectName) throws ObjectInstanceNameInUse, ObjectInstanceNameNotReserved, ObjectClassNotPublished, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerObjectInstance " + theClass.toString() + " " + theObjectName);
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.registerObjectInstance(theClass, theObjectName);
this.logger.info("registerObjectInstance return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (ObjectInstanceNameInUse | ObjectInstanceNameNotReserved | ObjectClassNotPublished | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.10
public void updateAttributeValues(final ObjectInstanceHandle theObject, final AttributeHandleValueMap theAttributes, final byte[] userSuppliedTag) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("updateAttributeValues " + theObject.toString() + " " + theAttributes.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.updateAttributeValues(theObject, theAttributes, userSuppliedTag);
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("updateAttributeValues exception=" + e.getMessage());
throw e;
}
}
// 6.10
public MessageRetractionReturn updateAttributeValues(final ObjectInstanceHandle theObject, final AttributeHandleValueMap theAttributes, final byte[] userSuppliedTag, final LogicalTime theTime) throws InvalidLogicalTime, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("updateAttributeValues " + theObject.toString() + " " + theAttributes.toString() + " " + Arrays.toString(userSuppliedTag) + " " + theTime.toString());
try {
final MessageRetractionReturn messageRetractionReturn = this._rtiAmbassador.updateAttributeValues(theObject, theAttributes, userSuppliedTag, theTime);
this.logger.info("updateAttributeValues return " + messageRetractionReturn.toString());
return messageRetractionReturn;
} catch (InvalidLogicalTime | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("updateAttributeValues exception=" + e.getMessage());
throw e;
}
}
// 6.12
public void sendInteraction(final InteractionClassHandle theInteraction, final ParameterHandleValueMap theParameters, final byte[] userSuppliedTag) throws InteractionClassNotPublished, InteractionParameterNotDefined, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("sendInteraction " + theInteraction.toString() + " " + theParameters.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.sendInteraction(theInteraction, theParameters, userSuppliedTag);
} catch (InteractionClassNotPublished | InteractionParameterNotDefined | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("sendInteraction exception=" + e.getMessage());
throw e;
}
}
// 6.12
public MessageRetractionReturn sendInteraction(final InteractionClassHandle theInteraction, final ParameterHandleValueMap theParameters, final byte[] userSuppliedTag, final LogicalTime theTime) throws InvalidLogicalTime, InteractionClassNotPublished, InteractionParameterNotDefined, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("sendInteraction " + theInteraction.toString() + " " + theParameters.toString() + " " + Arrays.toString(userSuppliedTag) + " " + theTime.toString());
try {
final MessageRetractionReturn messageRetractionReturn = this._rtiAmbassador.sendInteraction(theInteraction, theParameters, userSuppliedTag, theTime);
this.logger.info("sendInteraction return " + messageRetractionReturn.toString());
return messageRetractionReturn;
} catch (InvalidLogicalTime | InteractionClassNotPublished | InteractionParameterNotDefined | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("sendInteraction exception=" + e.getMessage());
throw e;
}
}
// 6.14
public void deleteObjectInstance(final ObjectInstanceHandle objectHandle, final byte[] userSuppliedTag) throws DeletePrivilegeNotHeld, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("deleteObjectInstance " + objectHandle.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.deleteObjectInstance(objectHandle, userSuppliedTag);
} catch (DeletePrivilegeNotHeld | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("deleteObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.14
public MessageRetractionReturn deleteObjectInstance(final ObjectInstanceHandle objectHandle, final byte[] userSuppliedTag, final LogicalTime theTime) throws InvalidLogicalTime, DeletePrivilegeNotHeld, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("deleteObjectInstance " + objectHandle.toString() + " " + Arrays.toString(userSuppliedTag) + " " + theTime.toString());
try {
final MessageRetractionReturn messageRetractionReturn = this._rtiAmbassador.deleteObjectInstance(objectHandle, userSuppliedTag, theTime);
this.logger.info("deleteObjectInstance return " + messageRetractionReturn.toString());
return messageRetractionReturn;
} catch (InvalidLogicalTime | DeletePrivilegeNotHeld | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("deleteObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.16
public void localDeleteObjectInstance(final ObjectInstanceHandle objectHandle) throws OwnershipAcquisitionPending, FederateOwnsAttributes, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("localDeleteObjectInstance " + objectHandle.toString());
try {
this._rtiAmbassador.localDeleteObjectInstance(objectHandle);
} catch (OwnershipAcquisitionPending | FederateOwnsAttributes | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("localDeleteObjectInstance exception=" + e.getMessage());
throw e;
}
}
// 6.19
public void requestAttributeValueUpdate(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final byte[] userSuppliedTag) throws AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestAttributeValueUpdate " + theObject.toString() + " " + theAttributes.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.requestAttributeValueUpdate(theObject, theAttributes, userSuppliedTag);
} catch (AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestAttributeValueUpdate exception=" + e.getMessage());
throw e;
}
}
// 6.19
public void requestAttributeValueUpdate(final ObjectClassHandle theClass, final AttributeHandleSet theAttributes, final byte[] userSuppliedTag) throws AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestAttributeValueUpdate " + theClass.toString() + " " + theAttributes.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.requestAttributeValueUpdate(theClass, theAttributes, userSuppliedTag);
} catch (AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestAttributeValueUpdate exception=" + e.getMessage());
throw e;
}
}
// 6.23
public void requestAttributeTransportationTypeChange(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final TransportationTypeHandle theType) throws AttributeAlreadyBeingChanged, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, InvalidTransportationType, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestAttributeTransportationTypeChange " + theObject.toString() + " " + theAttributes.toString() + " " + theType.toString());
try {
this._rtiAmbassador.requestAttributeTransportationTypeChange(theObject, theAttributes, theType);
} catch (AttributeAlreadyBeingChanged | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | InvalidTransportationType | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestAttributeTransportationTypeChange exception=" + e.getMessage());
throw e;
}
}
// 6.25
public void queryAttributeTransportationType(final ObjectInstanceHandle theObject, final AttributeHandle theAttribute) throws AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryAttributeTransportationType " + theObject.toString() + " " + theAttribute.toString());
try {
this._rtiAmbassador.queryAttributeTransportationType(theObject, theAttribute);
} catch (AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryAttributeTransportationType exception=" + e.getMessage());
throw e;
}
}
// 6.27
public void requestInteractionTransportationTypeChange(final InteractionClassHandle theClass, final TransportationTypeHandle theType) throws InteractionClassAlreadyBeingChanged, InteractionClassNotPublished, InteractionClassNotDefined, InvalidTransportationType, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestInteractionTransportationTypeChange " + theClass.toString() + " " + theType.toString());
try {
this._rtiAmbassador.requestInteractionTransportationTypeChange(theClass, theType);
} catch (InteractionClassAlreadyBeingChanged | InteractionClassNotPublished | InteractionClassNotDefined | InvalidTransportationType | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestInteractionTransportationTypeChange exception=" + e.getMessage());
throw e;
}
}
// 6.29
public void queryInteractionTransportationType(final FederateHandle theFederate, final InteractionClassHandle theInteraction) throws InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryInteractionTransportationType " + theFederate.toString() + " " + theInteraction.toString());
try {
this._rtiAmbassador.queryInteractionTransportationType(theFederate, theInteraction);
} catch (InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryInteractionTransportationType exception=" + e.getMessage());
throw e;
}
}
///////////////////////////////////
// Ownership Management Services //
///////////////////////////////////
// 7.2
public void unconditionalAttributeOwnershipDivestiture(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unconditionalAttributeOwnershipDivestiture " + theObject.toString() + " " + theAttributes.toString());
try {
this._rtiAmbassador.unconditionalAttributeOwnershipDivestiture(theObject, theAttributes);
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unconditionalAttributeOwnershipDivestiture exception=" + e.getMessage());
throw e;
}
}
// 7.3
public void negotiatedAttributeOwnershipDivestiture(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final byte[] userSuppliedTag) throws AttributeAlreadyBeingDivested, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("negotiatedAttributeOwnershipDivestiture " + theObject.toString() + " " + theAttributes.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.negotiatedAttributeOwnershipDivestiture(theObject, theAttributes, userSuppliedTag);
} catch (AttributeAlreadyBeingDivested | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("negotiatedAttributeOwnershipDivestiture exception=" + e.getMessage());
throw e;
}
}
// 7.6
public void confirmDivestiture(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final byte[] userSuppliedTag) throws NoAcquisitionPending, AttributeDivestitureWasNotRequested, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("confirmDivestiture " + theObject.toString() + " " + theAttributes.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.confirmDivestiture(theObject, theAttributes, userSuppliedTag);
} catch (NoAcquisitionPending | AttributeDivestitureWasNotRequested | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("confirmDivestiture exception=" + e.getMessage());
throw e;
}
}
// 7.8
public void attributeOwnershipAcquisition(final ObjectInstanceHandle theObject, final AttributeHandleSet desiredAttributes, final byte[] userSuppliedTag) throws AttributeNotPublished, ObjectClassNotPublished, FederateOwnsAttributes, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("attributeOwnershipAcquisition " + theObject.toString() + " " + desiredAttributes.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.attributeOwnershipAcquisition(theObject, desiredAttributes, userSuppliedTag);
} catch (AttributeNotPublished | ObjectClassNotPublished | FederateOwnsAttributes | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("attributeOwnershipAcquisition exception=" + e.getMessage());
throw e;
}
}
// 7.9
public void attributeOwnershipAcquisitionIfAvailable(final ObjectInstanceHandle theObject, final AttributeHandleSet desiredAttributes) throws AttributeAlreadyBeingAcquired, AttributeNotPublished, ObjectClassNotPublished, FederateOwnsAttributes, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("attributeOwnershipAcquisitionIfAvailable " + theObject.toString() + " " + desiredAttributes.toString());
try {
this._rtiAmbassador.attributeOwnershipAcquisitionIfAvailable(theObject, desiredAttributes);
} catch (AttributeAlreadyBeingAcquired | AttributeNotPublished | ObjectClassNotPublished | FederateOwnsAttributes | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("attributeOwnershipAcquisitionIfAvailable exception=" + e.getMessage());
throw e;
}
}
// 7.12
public void attributeOwnershipReleaseDenied(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("attributeOwnershipReleaseDenied " + theObject.toString() + " " + theAttributes.toString());
try {
this._rtiAmbassador.attributeOwnershipReleaseDenied(theObject, theAttributes);
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("attributeOwnershipReleaseDenied exception=" + e.getMessage());
throw e;
}
}
// 7.13
public AttributeHandleSet attributeOwnershipDivestitureIfWanted(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("attributeOwnershipDivestitureIfWanted " + theObject.toString() + " " + theAttributes.toString());
try {
final AttributeHandleSet attributeHandleSet = this._rtiAmbassador.attributeOwnershipDivestitureIfWanted(theObject, theAttributes);
this.logger.info("attributeOwnershipDivestitureIfWanted return " + attributeHandleSet.toString());
return attributeHandleSet;
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("attributeOwnershipDivestitureIfWanted exception=" + e.getMessage());
throw e;
}
}
// 7.14
public void cancelNegotiatedAttributeOwnershipDivestiture(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeDivestitureWasNotRequested, AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("cancelNegotiatedAttributeOwnershipDivestiture " + theObject.toString() + " " + theAttributes.toString());
try {
this._rtiAmbassador.cancelNegotiatedAttributeOwnershipDivestiture(theObject, theAttributes);
} catch (AttributeDivestitureWasNotRequested | AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("cancelNegotiatedAttributeOwnershipDivestiture exception=" + e.getMessage());
throw e;
}
}
// 7.15
public void cancelAttributeOwnershipAcquisition(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes) throws AttributeAcquisitionWasNotRequested, AttributeAlreadyOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("cancelAttributeOwnershipAcquisition " + theObject.toString() + " " + theAttributes.toString());
try {
this._rtiAmbassador.cancelAttributeOwnershipAcquisition(theObject, theAttributes);
} catch (AttributeAcquisitionWasNotRequested | AttributeAlreadyOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("cancelAttributeOwnershipAcquisition exception=" + e.getMessage());
throw e;
}
}
// 7.17
public void queryAttributeOwnership(final ObjectInstanceHandle theObject, final AttributeHandle theAttribute) throws AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryAttributeOwnership " + theObject.toString() + " " + theAttribute.toString());
try {
this._rtiAmbassador.queryAttributeOwnership(theObject, theAttribute);
} catch (AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryAttributeOwnership exception=" + e.getMessage());
throw e;
}
}
// 7.19
public boolean isAttributeOwnedByFederate(final ObjectInstanceHandle theObject, final AttributeHandle theAttribute) throws AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("isAttributeOwnedByFederate " + theObject.toString() + " " + theAttribute.toString());
try {
final boolean bool = this._rtiAmbassador.isAttributeOwnedByFederate(theObject, theAttribute);
this.logger.info("isAttributeOwnedByFederate return " + bool);
return bool;
} catch (AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("isAttributeOwnedByFederate exception=" + e.getMessage());
throw e;
}
}
//////////////////////////////
// Time Management Services //
//////////////////////////////
// 8.2
public void enableTimeRegulation(final LogicalTimeInterval theLookahead) throws InvalidLookahead, InTimeAdvancingState, RequestForTimeRegulationPending, TimeRegulationAlreadyEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableTimeRegulation " + theLookahead.toString());
try {
this._rtiAmbassador.enableTimeRegulation(theLookahead);
} catch (InvalidLookahead | InTimeAdvancingState | RequestForTimeRegulationPending | TimeRegulationAlreadyEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableTimeRegulation exception=" + e.getMessage());
throw e;
}
}
// 8.4
public void disableTimeRegulation() throws TimeRegulationIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableTimeRegulation");
try {
this._rtiAmbassador.disableTimeRegulation();
} catch (TimeRegulationIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableTimeRegulation exception=" + e.getMessage());
throw e;
}
}
// 8.5
public void enableTimeConstrained() throws InTimeAdvancingState, RequestForTimeConstrainedPending, TimeConstrainedAlreadyEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableTimeConstrained");
try {
this._rtiAmbassador.enableTimeConstrained();
} catch (InTimeAdvancingState | RequestForTimeConstrainedPending | TimeConstrainedAlreadyEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableTimeConstrained exception=" + e.getMessage());
throw e;
}
}
// 8.7
public void disableTimeConstrained() throws TimeConstrainedIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableTimeConstrained");
try {
this._rtiAmbassador.disableTimeConstrained();
} catch (TimeConstrainedIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableTimeConstrained exception=" + e.getMessage());
throw e;
}
}
// 8.8
public void timeAdvanceRequest(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("timeAdvanceRequest " + theTime.toString());
try {
this._rtiAmbassador.timeAdvanceRequest(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("timeAdvanceRequest exception=" + e.getMessage());
throw e;
}
}
// 8.9
public void timeAdvanceRequestAvailable(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("timeAdvanceRequestAvailable " + theTime.toString());
try {
this._rtiAmbassador.timeAdvanceRequestAvailable(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("timeAdvanceRequestAvailable exception=" + e.getMessage());
throw e;
}
}
// 8.10
public void nextMessageRequest(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("nextMessageRequest " + theTime.toString());
try {
this._rtiAmbassador.nextMessageRequest(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("nextMessageRequest exception=" + e.getMessage());
throw e;
}
}
// 8.11
public void nextMessageRequestAvailable(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("nextMessageRequestAvailable " + theTime.toString());
try {
this._rtiAmbassador.nextMessageRequestAvailable(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("nextMessageRequestAvailable exception=" + e.getMessage());
throw e;
}
}
// 8.12
public void flushQueueRequest(final LogicalTime theTime) throws LogicalTimeAlreadyPassed, InvalidLogicalTime, InTimeAdvancingState, RequestForTimeRegulationPending, RequestForTimeConstrainedPending, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("flushQueueRequest " + theTime.toString());
try {
this._rtiAmbassador.flushQueueRequest(theTime);
} catch (LogicalTimeAlreadyPassed | InvalidLogicalTime | InTimeAdvancingState | RequestForTimeRegulationPending | RequestForTimeConstrainedPending | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("flushQueueRequest exception=" + e.getMessage());
throw e;
}
}
// 8.14
public void enableAsynchronousDelivery() throws AsynchronousDeliveryAlreadyEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableAsynchronousDelivery");
try {
this._rtiAmbassador.enableAsynchronousDelivery();
} catch (AsynchronousDeliveryAlreadyEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableAsynchronousDelivery exception=" + e.getMessage());
throw e;
}
}
// 8.15
public void disableAsynchronousDelivery() throws AsynchronousDeliveryAlreadyDisabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableAsynchronousDelivery");
try {
this._rtiAmbassador.disableAsynchronousDelivery();
} catch (AsynchronousDeliveryAlreadyDisabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableAsynchronousDelivery exception=" + e.getMessage());
throw e;
}
}
// 8.16
public TimeQueryReturn queryGALT() throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryGALT");
try {
final TimeQueryReturn timeQueryReturn = this._rtiAmbassador.queryGALT();
this.logger.info("queryGALT return " + timeQueryReturn.toString());
return timeQueryReturn;
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryGALT exception=" + e.getMessage());
throw e;
}
}
// 8.17
public LogicalTime queryLogicalTime() throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryLogicalTime");
try {
final LogicalTime logicalTime = this._rtiAmbassador.queryLogicalTime();
this.logger.info("queryLogicalTime return " + logicalTime.toString());
return logicalTime;
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryLogicalTime exception=" + e.getMessage());
throw e;
}
}
// 8.18
public TimeQueryReturn queryLITS() throws SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryLITS");
try {
final TimeQueryReturn timeQueryReturn = this._rtiAmbassador.queryLITS();
this.logger.info("queryLITS return " + timeQueryReturn.toString());
return timeQueryReturn;
} catch (SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryLITS exception=" + e.getMessage());
throw e;
}
}
// 8.19
public void modifyLookahead(final LogicalTimeInterval theLookahead) throws InvalidLookahead, InTimeAdvancingState, TimeRegulationIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("modifyLookahead " + theLookahead.toString());
try {
this._rtiAmbassador.modifyLookahead(theLookahead);
} catch (InvalidLookahead | InTimeAdvancingState | TimeRegulationIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("modifyLookahead exception=" + e.getMessage());
throw e;
}
}
// 8.20
public LogicalTimeInterval queryLookahead() throws TimeRegulationIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("queryLookahead");
try {
final LogicalTimeInterval logicalTimeInterval = this._rtiAmbassador.queryLookahead();
this.logger.info("queryLookahead return " + logicalTimeInterval.toString());
return logicalTimeInterval;
} catch (TimeRegulationIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("queryLookahead exception=" + e.getMessage());
throw e;
}
}
// 8.21
public void retract(final MessageRetractionHandle theHandle) throws MessageCanNoLongerBeRetracted, InvalidMessageRetractionHandle, TimeRegulationIsNotEnabled, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("retract " + theHandle.toString());
try {
this._rtiAmbassador.retract(theHandle);
} catch (MessageCanNoLongerBeRetracted | InvalidMessageRetractionHandle | TimeRegulationIsNotEnabled | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("retract exception=" + e.getMessage());
throw e;
}
}
// 8.23
public void changeAttributeOrderType(final ObjectInstanceHandle theObject, final AttributeHandleSet theAttributes, final OrderType theType) throws AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("changeAttributeOrderType " + theObject.toString() + " " + theAttributes.toString() + " " + theType.toString());
try {
this._rtiAmbassador.changeAttributeOrderType(theObject, theAttributes, theType);
} catch (AttributeNotOwned | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("changeAttributeOrderType exception=" + e.getMessage());
throw e;
}
}
// 8.24
public void changeInteractionOrderType(final InteractionClassHandle theClass, final OrderType theType) throws InteractionClassNotPublished, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("changeInteractionOrderType " + theClass.toString() + " " + theType.toString());
try {
this._rtiAmbassador.changeInteractionOrderType(theClass, theType);
} catch (InteractionClassNotPublished | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("changeInteractionOrderType exception=" + e.getMessage());
throw e;
}
}
//////////////////////////////////
// Data Distribution Management //
//////////////////////////////////
// 9.2
public RegionHandle createRegion(final DimensionHandleSet dimensions) throws InvalidDimensionHandle, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("createRegion " + dimensions.toString());
try {
final RegionHandle regionHandle = this._rtiAmbassador.createRegion(dimensions);
this.logger.info("createRegion return " + regionHandle.toString());
return regionHandle;
} catch (InvalidDimensionHandle | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("createRegion exception=" + e.getMessage());
throw e;
}
}
// 9.3
public void commitRegionModifications(final RegionHandleSet regions) throws RegionNotCreatedByThisFederate, InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("commitRegionModifications " + regions.toString());
try {
this._rtiAmbassador.commitRegionModifications(regions);
} catch (RegionNotCreatedByThisFederate | InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("commitRegionModifications exception=" + e.getMessage());
throw e;
}
}
// 9.4
public void deleteRegion(final RegionHandle theRegion) throws RegionInUseForUpdateOrSubscription, RegionNotCreatedByThisFederate, InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("deleteRegion " + theRegion.toString());
try {
this._rtiAmbassador.deleteRegion(theRegion);
} catch (RegionInUseForUpdateOrSubscription | RegionNotCreatedByThisFederate | InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("deleteRegion exception=" + e.getMessage());
throw e;
}
}
//9.5
public ObjectInstanceHandle registerObjectInstanceWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotPublished, ObjectClassNotPublished, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerObjectInstanceWithRegions " + theClass.toString() + " " + attributesAndRegions.toString());
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.registerObjectInstanceWithRegions(theClass, attributesAndRegions);
this.logger.info("registerObjectInstanceWithRegions return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotPublished | ObjectClassNotPublished | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerObjectInstanceWithRegions exception=" + e.getMessage());
throw e;
}
}
//9.5
public ObjectInstanceHandle registerObjectInstanceWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions, final String theObject) throws ObjectInstanceNameInUse, ObjectInstanceNameNotReserved, InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotPublished, ObjectClassNotPublished, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("registerObjectInstanceWithRegions " + theClass.toString() + " " + attributesAndRegions.toString() + " " + theObject.toString());
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.registerObjectInstanceWithRegions(theClass, attributesAndRegions, theObject);
this.logger.info("registerObjectInstanceWithRegions return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (ObjectInstanceNameInUse | ObjectInstanceNameNotReserved | InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotPublished | ObjectClassNotPublished | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("registerObjectInstanceWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.6
public void associateRegionsForUpdates(final ObjectInstanceHandle theObject, final AttributeSetRegionSetPairList attributesAndRegions) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("associateRegionsForUpdates " + theObject.toString() + " " + attributesAndRegions.toString());
try {
this._rtiAmbassador.associateRegionsForUpdates(theObject, attributesAndRegions);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("associateRegionsForUpdates exception=" + e.getMessage());
throw e;
}
}
// 9.7
public void unassociateRegionsForUpdates(final ObjectInstanceHandle theObject, final AttributeSetRegionSetPairList attributesAndRegions) throws RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectInstanceNotKnown, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unassociateRegionsForUpdates " + theObject.toString() + " " + attributesAndRegions.toString());
try {
this._rtiAmbassador.unassociateRegionsForUpdates(theObject, attributesAndRegions);
} catch (RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectInstanceNotKnown | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unassociateRegionsForUpdates exception=" + e.getMessage());
throw e;
}
}
// 9.8
public void subscribeObjectClassAttributesWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesWithRegions " + theClass.toString() + " " + attributesAndRegions.toString());
try {
this._rtiAmbassador.subscribeObjectClassAttributesWithRegions(theClass, attributesAndRegions);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.8
public void subscribeObjectClassAttributesWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions, final String updateRateDesignator) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, InvalidUpdateRateDesignator, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesWithRegions " + theClass.toString() + " " + attributesAndRegions.toString() + " " + updateRateDesignator);
try {
this._rtiAmbassador.subscribeObjectClassAttributesWithRegions(theClass, attributesAndRegions, updateRateDesignator);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | InvalidUpdateRateDesignator | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.8
public void subscribeObjectClassAttributesPassivelyWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesPassivelyWithRegions " + theClass.toString() + " " + attributesAndRegions.toString());
try {
this._rtiAmbassador.subscribeObjectClassAttributesPassivelyWithRegions(theClass, attributesAndRegions);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesPassivelyWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.8
public void subscribeObjectClassAttributesPassivelyWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions, final String updateRateDesignator) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, InvalidUpdateRateDesignator, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeObjectClassAttributesPassivelyWithRegions " + theClass.toString() + " " + attributesAndRegions.toString() + " " + updateRateDesignator);
try {
this._rtiAmbassador.subscribeObjectClassAttributesPassivelyWithRegions(theClass, attributesAndRegions, updateRateDesignator);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | InvalidUpdateRateDesignator | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeObjectClassAttributesPassivelyWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.9
public void unsubscribeObjectClassAttributesWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions) throws RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeObjectClassAttributesWithRegions " + theClass.toString() + " " + attributesAndRegions.toString());
try {
this._rtiAmbassador.unsubscribeObjectClassAttributesWithRegions(theClass, attributesAndRegions);
} catch (RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeObjectClassAttributesWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.10
public void subscribeInteractionClassWithRegions(final InteractionClassHandle theClass, final RegionHandleSet regions) throws FederateServiceInvocationsAreBeingReportedViaMOM, InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeInteractionClassWithRegions " + theClass.toString() + " " + regions.toString());
try {
this._rtiAmbassador.subscribeInteractionClassWithRegions(theClass, regions);
} catch (FederateServiceInvocationsAreBeingReportedViaMOM | InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeInteractionClassWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.10
public void subscribeInteractionClassPassivelyWithRegions(final InteractionClassHandle theClass, final RegionHandleSet regions) throws FederateServiceInvocationsAreBeingReportedViaMOM, InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("subscribeInteractionClassPassivelyWithRegions " + theClass.toString() + " " + regions.toString());
try {
this._rtiAmbassador.subscribeInteractionClassPassivelyWithRegions(theClass, regions);
} catch (FederateServiceInvocationsAreBeingReportedViaMOM | InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("subscribeInteractionClassPassivelyWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.11
public void unsubscribeInteractionClassWithRegions(final InteractionClassHandle theClass, final RegionHandleSet regions) throws RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("unsubscribeInteractionClassWithRegions " + theClass.toString() + " " + regions.toString());
try {
this._rtiAmbassador.unsubscribeInteractionClassWithRegions(theClass, regions);
} catch (RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("unsubscribeInteractionClassWithRegions exception=" + e.getMessage());
throw e;
}
}
//9.12
public void sendInteractionWithRegions(final InteractionClassHandle theInteraction, final ParameterHandleValueMap theParameters, final RegionHandleSet regions, final byte[] userSuppliedTag) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotPublished, InteractionParameterNotDefined, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("sendInteractionWithRegions " + theInteraction.toString() + " " + theParameters.toString() + " " + regions.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.sendInteractionWithRegions(theInteraction, theParameters, regions, userSuppliedTag);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotPublished | InteractionParameterNotDefined | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("sendInteractionWithRegions exception=" + e.getMessage());
throw e;
}
}
//9.12
public MessageRetractionReturn sendInteractionWithRegions(final InteractionClassHandle theInteraction, final ParameterHandleValueMap theParameters, final RegionHandleSet regions, final byte[] userSuppliedTag, final LogicalTime theTime) throws InvalidLogicalTime, InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, InteractionClassNotPublished, InteractionParameterNotDefined, InteractionClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("sendInteractionWithRegions " + theInteraction.toString() + " " + theParameters.toString() + " " + regions.toString() + " " + Arrays.toString(userSuppliedTag) + " " + theTime.toString());
try {
final MessageRetractionReturn messageRetractionReturn = this._rtiAmbassador.sendInteractionWithRegions(theInteraction, theParameters, regions, userSuppliedTag, theTime);
this.logger.info("sendInteractionWithRegions return " + messageRetractionReturn.toString());
return messageRetractionReturn;
} catch (InvalidLogicalTime | InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | InteractionClassNotPublished | InteractionParameterNotDefined | InteractionClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("sendInteractionWithRegions exception=" + e.getMessage());
throw e;
}
}
// 9.13
public void requestAttributeValueUpdateWithRegions(final ObjectClassHandle theClass, final AttributeSetRegionSetPairList attributesAndRegions, final byte[] userSuppliedTag) throws InvalidRegionContext, RegionNotCreatedByThisFederate, InvalidRegion, AttributeNotDefined, ObjectClassNotDefined, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("requestAttributeValueUpdateWithRegions " + theClass.toString() + " " + attributesAndRegions.toString() + " " + Arrays.toString(userSuppliedTag));
try {
this._rtiAmbassador.requestAttributeValueUpdateWithRegions(theClass, attributesAndRegions, userSuppliedTag);
} catch (InvalidRegionContext | RegionNotCreatedByThisFederate | InvalidRegion | AttributeNotDefined | ObjectClassNotDefined | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("requestAttributeValueUpdateWithRegions exception=" + e.getMessage());
throw e;
}
}
//////////////////////////
// RTI Support Services //
//////////////////////////
// 10.2
public ResignAction getAutomaticResignDirective() throws FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAutomaticResignDirective");
try {
final ResignAction resignAction = this._rtiAmbassador.getAutomaticResignDirective();
this.logger.info("getAutomaticResignDirective return " + resignAction.toString());
return resignAction;
} catch (FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAutomaticResignDirective exception=" + e.getMessage());
throw e;
}
}
// 10.3
public void setAutomaticResignDirective(final ResignAction resignAction) throws InvalidResignAction, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("setAutomaticResignDirective " + resignAction.toString());
try {
this._rtiAmbassador.setAutomaticResignDirective(resignAction);
} catch (InvalidResignAction | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("setAutomaticResignDirective exception=" + e.getMessage());
throw e;
}
}
// 10.4
public FederateHandle getFederateHandle(final String theName) throws NameNotFound, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getFederateHandle " + theName);
try {
final FederateHandle federateHandle = this._rtiAmbassador.getFederateHandle(theName);
this.logger.info("getFederateHandle return " + federateHandle.toString());
return federateHandle;
} catch (NameNotFound | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getFederateHandle exception=" + e.getMessage());
throw e;
}
}
// 10.5
public String getFederateName(final FederateHandle theHandle) throws InvalidFederateHandle, FederateHandleNotKnown, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getFederateName " + theHandle.toString());
try {
final String str = this._rtiAmbassador.getFederateName(theHandle);
this.logger.info("getFederateName return " + str);
return str;
} catch (InvalidFederateHandle | FederateHandleNotKnown | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getFederateName exception=" + e.getMessage());
throw e;
}
}
// 10.6
public ObjectClassHandle getObjectClassHandle(final String theName) throws NameNotFound, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getObjectClassHandle " + theName);
try {
final ObjectClassHandle objectClassHandle = this._rtiAmbassador.getObjectClassHandle(theName);
this.logger.info("getObjectClassHandle return " + objectClassHandle.toString());
return objectClassHandle;
} catch (NameNotFound | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getObjectClassHandle exception=" + e.getMessage());
throw e;
}
}
// 10.7
public String getObjectClassName(final ObjectClassHandle theHandle) throws InvalidObjectClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getObjectClassName " + theHandle.toString());
try {
final String str = this._rtiAmbassador.getObjectClassName(theHandle);
this.logger.info("getObjectClassName return " + str);
return str;
} catch (InvalidObjectClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getObjectClassName exception=" + e.getMessage());
throw e;
}
}
// 10.8
public ObjectClassHandle getKnownObjectClassHandle(final ObjectInstanceHandle theObject) throws ObjectInstanceNotKnown, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getKnownObjectClassHandle " + theObject.toString());
try {
final ObjectClassHandle objectClassHandle = this._rtiAmbassador.getKnownObjectClassHandle(theObject);
this.logger.info("getKnownObjectClassHandle return " + objectClassHandle.toString());
return objectClassHandle;
} catch (ObjectInstanceNotKnown | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getKnownObjectClassHandle exception=" + e.getMessage());
throw e;
}
}
// 10.9
public ObjectInstanceHandle getObjectInstanceHandle(final String theName) throws ObjectInstanceNotKnown, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getObjectInstanceHandle " + theName);
try {
final ObjectInstanceHandle objectInstanceHandle = this._rtiAmbassador.getObjectInstanceHandle(theName);
this.logger.info("getObjectInstanceHandle return " + objectInstanceHandle.toString());
return objectInstanceHandle;
} catch (ObjectInstanceNotKnown | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getObjectInstanceHandle exception=" + e.getMessage());
throw e;
}
}
// 10.10
public String getObjectInstanceName(final ObjectInstanceHandle theHandle) throws ObjectInstanceNotKnown, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getObjectInstanceName " + theHandle.toString());
try {
final String str = this._rtiAmbassador.getObjectInstanceName(theHandle);
this.logger.info("getObjectInstanceName return " + str);
return str;
} catch (ObjectInstanceNotKnown | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getObjectInstanceName exception=" + e.getMessage());
throw e;
}
}
// 10.11
public AttributeHandle getAttributeHandle(final ObjectClassHandle whichClass, final String theName) throws NameNotFound, InvalidObjectClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAttributeHandle " + whichClass.toString() + " " + theName);
try {
final AttributeHandle attributeHandle = this._rtiAmbassador.getAttributeHandle(whichClass, theName);
this.logger.info("getAttributeHandle return " + attributeHandle.toString());
return attributeHandle;
} catch (NameNotFound | InvalidObjectClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAttributeHandle exception=" + e.getMessage());
throw e;
}
}
// 10.12
public String getAttributeName(final ObjectClassHandle whichClass, final AttributeHandle theHandle) throws AttributeNotDefined, InvalidAttributeHandle, InvalidObjectClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAttributeName " + theHandle.toString());
try {
final String str = this._rtiAmbassador.getAttributeName(whichClass, theHandle);
this.logger.info("getAttributeName return " + str);
return str;
} catch (AttributeNotDefined | InvalidAttributeHandle | InvalidObjectClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAttributeName exception=" + e.getMessage());
throw e;
}
}
// 10.13
public double getUpdateRateValue(final String updateRateDesignator) throws InvalidUpdateRateDesignator, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getUpdateRateValue " + updateRateDesignator);
try {
final double d = this._rtiAmbassador.getUpdateRateValue(updateRateDesignator);
this.logger.info("getUpdateRateValue return " + d);
return d;
} catch (InvalidUpdateRateDesignator | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getUpdateRateValue exception=" + e.getMessage());
throw e;
}
}
// 10.14
public double getUpdateRateValueForAttribute(final ObjectInstanceHandle theObject, final AttributeHandle theAttribute) throws ObjectInstanceNotKnown, AttributeNotDefined, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getUpdateRateValueForAttribute " + theObject.toString() + " " + theAttribute.toString());
try {
final double d = this._rtiAmbassador.getUpdateRateValueForAttribute(theObject, theAttribute);
this.logger.info("getUpdateRateValueForAttribute return " + d);
return d;
} catch (ObjectInstanceNotKnown | AttributeNotDefined | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getUpdateRateValueForAttribute exception=" + e.getMessage());
throw e;
}
}
// 10.15
public InteractionClassHandle getInteractionClassHandle(final String theName) throws NameNotFound, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getInteractionClassHandle " + theName);
try {
final InteractionClassHandle interactionClassHandle = this._rtiAmbassador.getInteractionClassHandle(theName);
this.logger.info("getInteractionClassHandle return " + interactionClassHandle.toString());
return interactionClassHandle;
} catch (NameNotFound | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getInteractionClassHandle exception=" + e.getMessage());
throw e;
}
}
// 10.16
public String getInteractionClassName(final InteractionClassHandle theHandle) throws InvalidInteractionClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getInteractionClassName " + theHandle.toString());
try {
final String str = this._rtiAmbassador.getInteractionClassName(theHandle);
this.logger.info("getInteractionClassName return " + str);
return str;
} catch (InvalidInteractionClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getInteractionClassName exception=" + e.getMessage());
throw e;
}
}
// 10.17
public ParameterHandle getParameterHandle(final InteractionClassHandle whichClass, final String theName) throws NameNotFound, InvalidInteractionClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getParameterHandle " + whichClass.toString() + " " + theName);
try {
final ParameterHandle parameterHandle = this._rtiAmbassador.getParameterHandle(whichClass, theName);
this.logger.info("getParameterHandle return " + parameterHandle.toString());
return parameterHandle;
} catch (NameNotFound | InvalidInteractionClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getParameterHandle exception=" + e.getMessage());
throw e;
}
}
// 10.18
public String getParameterName(final InteractionClassHandle whichClass, final ParameterHandle theHandle) throws InteractionParameterNotDefined, InvalidParameterHandle, InvalidInteractionClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getParameterName " + whichClass.toString() + " " + theHandle.toString());
try {
String str = this._rtiAmbassador.getParameterName(whichClass, theHandle);
this.logger.info("getParameterName return " + str);
return str;
} catch (InteractionParameterNotDefined | InvalidParameterHandle | InvalidInteractionClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getParameterName exception=" + e.getMessage());
throw e;
}
}
// 10.19
public OrderType getOrderType(final String theName) throws InvalidOrderName, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getOrderType " + theName);
try {
OrderType orderType = this._rtiAmbassador.getOrderType(theName);
this.logger.info("getOrderType return " + orderType.toString());
return orderType;
} catch (InvalidOrderName | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getOrderType exception=" + e.getMessage());
throw e;
}
}
// 10.20
public String getOrderName(final OrderType theType) throws InvalidOrderType, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getOrderName " + theType.toString());
try {
String str = this._rtiAmbassador.getOrderName(theType);
this.logger.info("getOrderName return " + str);
return str;
} catch (InvalidOrderType | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getOrderName exception=" + e.getMessage());
throw e;
}
}
// 10.21
public TransportationTypeHandle getTransportationTypeHandle(final String theName) throws InvalidTransportationName, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getTransportationTypeHandle " + theName);
try {
TransportationTypeHandle transportationTypeHandle = this._rtiAmbassador.getTransportationTypeHandle(theName);
this.logger.info("getTransportationTypeHandle return " + transportationTypeHandle.toString());
return transportationTypeHandle;
} catch (InvalidTransportationName | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getTransportationTypeHandle exception=" + e.getMessage());
throw e;
}
}
// 10.22
public String getTransportationTypeName(final TransportationTypeHandle theHandle) throws InvalidTransportationType, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getTransportationTypeName " + theHandle.toString());
try {
String str = this._rtiAmbassador.getTransportationTypeName(theHandle);
this.logger.info("getTransportationTypeName return " + str);
return str;
} catch (InvalidTransportationType | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getTransportationTypeName exception=" + e.getMessage());
throw e;
}
}
// 10.23
public DimensionHandleSet getAvailableDimensionsForClassAttribute(final ObjectClassHandle whichClass, final AttributeHandle theHandle) throws AttributeNotDefined, InvalidAttributeHandle, InvalidObjectClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAvailableDimensionsForClassAttribute " + whichClass.toString() + " " + theHandle.toString());
try {
DimensionHandleSet dimensionHandleSet = this._rtiAmbassador.getAvailableDimensionsForClassAttribute(whichClass, theHandle);
this.logger.info("getAvailableDimensionsForClassAttribute return " + dimensionHandleSet.toString());
return dimensionHandleSet;
} catch (AttributeNotDefined | InvalidAttributeHandle | InvalidObjectClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAvailableDimensionsForClassAttribute exception=" + e.getMessage());
throw e;
}
}
// 10.24
public DimensionHandleSet getAvailableDimensionsForInteractionClass(final InteractionClassHandle theHandle) throws InvalidInteractionClassHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getAvailableDimensionsForInteractionClass " + theHandle.toString());
try {
DimensionHandleSet dimensionHandleSet = this._rtiAmbassador.getAvailableDimensionsForInteractionClass(theHandle);
this.logger.info("getAvailableDimensionsForInteractionClass return " + dimensionHandleSet.toString());
return dimensionHandleSet;
} catch (InvalidInteractionClassHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getAvailableDimensionsForInteractionClass exception=" + e.getMessage());
throw e;
}
}
// 10.25
public DimensionHandle getDimensionHandle(final String theName) throws NameNotFound, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getDimensionHandle " + theName);
try {
DimensionHandle dimensionHandle = this._rtiAmbassador.getDimensionHandle(theName);
this.logger.info("getDimensionHandle return " + dimensionHandle.toString());
return dimensionHandle;
} catch (NameNotFound | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getDimensionHandle exception=" + e.getMessage());
throw e;
}
}
// 10.26
public String getDimensionName(final DimensionHandle theHandle) throws InvalidDimensionHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getDimensionName " + theHandle.toString());
try {
String str = this._rtiAmbassador.getDimensionName(theHandle);
this.logger.info("getDimensionName return " + str);
return str;
} catch (InvalidDimensionHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getDimensionName exception=" + e.getMessage());
throw e;
}
}
// 10.27
public long getDimensionUpperBound(final DimensionHandle theHandle) throws InvalidDimensionHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getDimensionUpperBound " + theHandle.toString());
try {
long upperBound = this._rtiAmbassador.getDimensionUpperBound(theHandle);
this.logger.info("getDimensionUpperBound return " + upperBound);
return upperBound;
} catch (InvalidDimensionHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getDimensionUpperBound exception=" + e.getMessage());
throw e;
}
}
// 10.28
public DimensionHandleSet getDimensionHandleSet(final RegionHandle region) throws InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getDimensionHandleSet " + region.toString());
try {
DimensionHandleSet dimensionHandleSet = this._rtiAmbassador.getDimensionHandleSet(region);
this.logger.info("getDimensionHandleSet return " + dimensionHandleSet.toString());
return dimensionHandleSet;
} catch (InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getDimensionHandleSet exception=" + e.getMessage());
throw e;
}
}
// 10.29
public RangeBounds getRangeBounds(final RegionHandle region, final DimensionHandle dimension) throws RegionDoesNotContainSpecifiedDimension, InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("getRangeBounds " + region.toString() + " " + dimension.toString());
try {
RangeBounds rangeBounds = this._rtiAmbassador.getRangeBounds(region, dimension);
this.logger.info("getRangeBounds return " + rangeBounds.toString());
return rangeBounds;
} catch (RegionDoesNotContainSpecifiedDimension | InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("getRangeBounds exception=" + e.getMessage());
throw e;
}
}
// 10.30
public void setRangeBounds(final RegionHandle region, final DimensionHandle dimension, final RangeBounds bounds) throws InvalidRangeBound, RegionDoesNotContainSpecifiedDimension, RegionNotCreatedByThisFederate, InvalidRegion, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("setRangeBounds " + region.toString() + " " + dimension.toString() + " " + bounds.toString());
try {
this._rtiAmbassador.setRangeBounds(region, dimension, bounds);
} catch (InvalidRangeBound | RegionDoesNotContainSpecifiedDimension | RegionNotCreatedByThisFederate | InvalidRegion | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("setRangeBounds exception=" + e.getMessage());
throw e;
}
}
// 10.31
public long normalizeFederateHandle(final FederateHandle federateHandle) throws InvalidFederateHandle, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("normalizeFederateHandle " + federateHandle.toString());
try {
long normalizedFederateHandle = this._rtiAmbassador.normalizeFederateHandle(federateHandle);
this.logger.info("normalizeFederateHandle return " + normalizedFederateHandle);
return normalizedFederateHandle;
} catch (InvalidFederateHandle | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("normalizeFederateHandle exception=" + e.getMessage());
throw e;
}
}
// 10.32
public long normalizeServiceGroup(final ServiceGroup group) throws InvalidServiceGroup, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("normalizeServiceGroup " + group.toString());
try {
long normalizedServiceGroup = this._rtiAmbassador.normalizeServiceGroup(group);
this.logger.info("normalizeServiceGroup return " + normalizedServiceGroup);
return normalizedServiceGroup;
} catch (InvalidServiceGroup | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("normalizeServiceGroup exception=" + e.getMessage());
throw e;
}
}
// 10.33
public void enableObjectClassRelevanceAdvisorySwitch() throws ObjectClassRelevanceAdvisorySwitchIsOn, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableObjectClassRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.enableObjectClassRelevanceAdvisorySwitch();
} catch (ObjectClassRelevanceAdvisorySwitchIsOn | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableObjectClassRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.34
public void disableObjectClassRelevanceAdvisorySwitch() throws ObjectClassRelevanceAdvisorySwitchIsOff, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableObjectClassRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.disableObjectClassRelevanceAdvisorySwitch();
} catch (ObjectClassRelevanceAdvisorySwitchIsOff | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableObjectClassRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.35
public void enableAttributeRelevanceAdvisorySwitch() throws AttributeRelevanceAdvisorySwitchIsOn, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableAttributeRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.enableAttributeRelevanceAdvisorySwitch();
} catch (AttributeRelevanceAdvisorySwitchIsOn | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableAttributeRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.36
public void disableAttributeRelevanceAdvisorySwitch() throws AttributeRelevanceAdvisorySwitchIsOff, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableAttributeRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.disableAttributeRelevanceAdvisorySwitch();
} catch (AttributeRelevanceAdvisorySwitchIsOff | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableAttributeRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.37
public void enableAttributeScopeAdvisorySwitch() throws AttributeScopeAdvisorySwitchIsOn, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableAttributeScopeAdvisorySwitch");
try {
this._rtiAmbassador.enableAttributeScopeAdvisorySwitch();
} catch (AttributeScopeAdvisorySwitchIsOn | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableAttributeScopeAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.38
public void disableAttributeScopeAdvisorySwitch() throws AttributeScopeAdvisorySwitchIsOff, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableAttributeScopeAdvisorySwitch");
try {
this._rtiAmbassador.disableAttributeScopeAdvisorySwitch();
} catch (AttributeScopeAdvisorySwitchIsOff | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableAttributeScopeAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.39
public void enableInteractionRelevanceAdvisorySwitch() throws InteractionRelevanceAdvisorySwitchIsOn, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("enableInteractionRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.enableInteractionRelevanceAdvisorySwitch();
} catch (InteractionRelevanceAdvisorySwitchIsOn | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("enableInteractionRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.40
public void disableInteractionRelevanceAdvisorySwitch() throws InteractionRelevanceAdvisorySwitchIsOff, SaveInProgress, RestoreInProgress, FederateNotExecutionMember, NotConnected, RTIinternalError {
this.logger.info("disableInteractionRelevanceAdvisorySwitch");
try {
this._rtiAmbassador.disableInteractionRelevanceAdvisorySwitch();
} catch (InteractionRelevanceAdvisorySwitchIsOff | SaveInProgress | RestoreInProgress | FederateNotExecutionMember | NotConnected | RTIinternalError e) {
this.logger.error("disableInteractionRelevanceAdvisorySwitch exception=" + e.getMessage());
throw e;
}
}
// 10.41
public boolean evokeCallback(final double approximateMinimumTimeInSeconds) throws CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("evokeCallback " + approximateMinimumTimeInSeconds);
try {
boolean b = this._rtiAmbassador.evokeCallback(approximateMinimumTimeInSeconds);
this.logger.info("evokeCallback return " + b);
return b;
} catch (CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("evokeCallback exception=" + e.getMessage());
throw e;
}
}
// 10.42
public boolean evokeMultipleCallbacks(final double approximateMinimumTimeInSeconds, final double approximateMaximumTimeInSeconds) throws CallNotAllowedFromWithinCallback, RTIinternalError {
this.logger.info("evokeMultipleCallbacks " + approximateMinimumTimeInSeconds + " " + approximateMaximumTimeInSeconds);
try {
boolean b = this._rtiAmbassador.evokeMultipleCallbacks(approximateMinimumTimeInSeconds, approximateMaximumTimeInSeconds);
this.logger.info("evokeMultipleCallbacks return " + b);
return b;
} catch (CallNotAllowedFromWithinCallback | RTIinternalError e) {
this.logger.error("evokeMultipleCallbacks exception=" + e.getMessage());
throw e;
}
}
// 10.43
public void enableCallbacks() throws SaveInProgress, RestoreInProgress, RTIinternalError {
this.logger.info("enableCallbacks");
try {
this._rtiAmbassador.enableCallbacks();
} catch (SaveInProgress | RestoreInProgress | RTIinternalError e) {
this.logger.error("enableCallbacks exception=" + e.getMessage());
throw e;
}
}
// 10.44
public void disableCallbacks() throws SaveInProgress, RestoreInProgress, RTIinternalError {
this.logger.info("disableCallbacks");
try {
this._rtiAmbassador.disableCallbacks();
} catch (SaveInProgress | RestoreInProgress | RTIinternalError e) {
this.logger.error("disableCallbacks exception=" + e.getMessage());
throw e;
}
}
//API-specific services
public AttributeHandleFactory getAttributeHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getAttributeHandleFactory");
try {
return this._rtiAmbassador.getAttributeHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getAttributeHandleFactory exception=" + e.getMessage());
throw e;
}
}
public AttributeHandleSetFactory getAttributeHandleSetFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getAttributeHandleSetFactory");
try {
return this._rtiAmbassador.getAttributeHandleSetFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getAttributeHandleSetFactory exception=" + e.getMessage());
throw e;
}
}
public AttributeHandleValueMapFactory getAttributeHandleValueMapFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getAttributeHandleValueMapFactory");
try {
return this._rtiAmbassador.getAttributeHandleValueMapFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getAttributeHandleValueMapFactory exception=" + e.getMessage());
throw e;
}
}
public AttributeSetRegionSetPairListFactory getAttributeSetRegionSetPairListFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getAttributeSetRegionSetPairListFactory");
try {
return this._rtiAmbassador.getAttributeSetRegionSetPairListFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getAttributeSetRegionSetPairListFactory exception=" + e.getMessage());
throw e;
}
}
public DimensionHandleFactory getDimensionHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getDimensionHandleFactory");
try {
return this._rtiAmbassador.getDimensionHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getDimensionHandleFactory exception=" + e.getMessage());
throw e;
}
}
public DimensionHandleSetFactory getDimensionHandleSetFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getDimensionHandleSetFactory");
try {
return this._rtiAmbassador.getDimensionHandleSetFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getDimensionHandleSetFactory exception=" + e.getMessage());
throw e;
}
}
public FederateHandleFactory getFederateHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getFederateHandleFactory");
try {
return this._rtiAmbassador.getFederateHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getFederateHandleFactory exception=" + e.getMessage());
throw e;
}
}
public FederateHandleSetFactory getFederateHandleSetFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getFederateHandleSetFactory");
try {
return this._rtiAmbassador.getFederateHandleSetFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getFederateHandleSetFactory exception=" + e.getMessage());
throw e;
}
}
public InteractionClassHandleFactory getInteractionClassHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getInteractionClassHandleFactory");
try {
return this._rtiAmbassador.getInteractionClassHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getInteractionClassHandleFactory exception=" + e.getMessage());
throw e;
}
}
public ObjectClassHandleFactory getObjectClassHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getObjectClassHandleFactory");
try {
return this._rtiAmbassador.getObjectClassHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getObjectClassHandleFactory exception=" + e.getMessage());
throw e;
}
}
public ObjectInstanceHandleFactory getObjectInstanceHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getObjectInstanceHandleFactory");
try {
return this._rtiAmbassador.getObjectInstanceHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getObjectInstanceHandleFactory exception=" + e.getMessage());
throw e;
}
}
public ParameterHandleFactory getParameterHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getParameterHandleFactory");
try {
return this._rtiAmbassador.getParameterHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getParameterHandleFactory exception=" + e.getMessage());
throw e;
}
}
public ParameterHandleValueMapFactory getParameterHandleValueMapFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getParameterHandleValueMapFactory");
try {
return this._rtiAmbassador.getParameterHandleValueMapFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getParameterHandleValueMapFactory exception=" + e.getMessage());
throw e;
}
}
public RegionHandleSetFactory getRegionHandleSetFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getRegionHandleSetFactory");
try {
return this._rtiAmbassador.getRegionHandleSetFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getRegionHandleSetFactory exception=" + e.getMessage());
throw e;
}
}
public TransportationTypeHandleFactory getTransportationTypeHandleFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getTransportationTypeHandleFactory");
try {
return this._rtiAmbassador.getTransportationTypeHandleFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getTransportationTypeHandleFactory exception=" + e.getMessage());
throw e;
}
}
public String getHLAversion() {
this.logger.info("getHLAversion");
String str = this._rtiAmbassador.getHLAversion();
this.logger.error("getHLAversion return " + str);
return str;
}
public LogicalTimeFactory getTimeFactory() throws FederateNotExecutionMember, NotConnected {
this.logger.info("getTimeFactory");
try {
return this._rtiAmbassador.getTimeFactory();
} catch (FederateNotExecutionMember | NotConnected e) {
this.logger.error("getTimeFactory exception=" + e.getMessage());
throw e;
}
}
}
| Add parameter name to logging strings
Use the HLA parameter names when logging the parameters.
| TC/src/main/java/de/fraunhofer/iosb/tc_lib/IVCT_RTIambassador.java | Add parameter name to logging strings |
|
Java | apache-2.0 | ff97d1db292327a361d44c1a6a2f9b8dc9bd00f9 | 0 | JungMinu/netty,hgl888/netty,liyang1025/netty,lugt/netty,qingsong-xu/netty,liuciuse/netty,nkhuyu/netty,bryce-anderson/netty,brennangaunce/netty,tempbottle/netty,wuxiaowei907/netty,mcobrien/netty,duqiao/netty,lukw00/netty,xiexingguang/netty,fantayeneh/netty,bryce-anderson/netty,moyiguket/netty,sunbeansoft/netty,dongjiaqiang/netty,hepin1989/netty,normanmaurer/netty,mikkokar/netty,xingguang2013/netty,louxiu/netty,djchen/netty,sverkera/netty,joansmith/netty,codevelop/netty,drowning/netty,tbrooks8/netty,fenik17/netty,zer0se7en/netty,kiril-me/netty,alkemist/netty,sameira/netty,WangJunTYTL/netty,eonezhang/netty,castomer/netty,balaprasanna/netty,serioussam/netty,s-gheldd/netty,sunbeansoft/netty,bryce-anderson/netty,luyiisme/netty,idelpivnitskiy/netty,liyang1025/netty,skyao/netty,jchambers/netty,shism/netty,olupotd/netty,pengzj/netty,jongyeol/netty,xingguang2013/netty,liuciuse/netty,hgl888/netty,carl-mastrangelo/netty,sameira/netty,bob329/netty,rovarga/netty,purplefox/netty-4.0.2.8-hacked,junjiemars/netty,bigheary/netty,KatsuraKKKK/netty,yrcourage/netty,sunbeansoft/netty,maliqq/netty,djchen/netty,louiscryan/netty,mikkokar/netty,seetharamireddy540/netty,ijuma/netty,AnselQiao/netty,fengjiachun/netty,sja/netty,MediumOne/netty,JungMinu/netty,SinaTadayon/netty,fenik17/netty,exinguu/netty,huanyi0723/netty,NiteshKant/netty,mcobrien/netty,shism/netty,WangJunTYTL/netty,dongjiaqiang/netty,Spikhalskiy/netty,exinguu/netty,gigold/netty,s-gheldd/netty,serioussam/netty,KatsuraKKKK/netty,carl-mastrangelo/netty,phlizik/netty,eonezhang/netty,x1957/netty,huanyi0723/netty,bob329/netty,doom369/netty,orika/netty,jenskordowski/netty,Squarespace/netty,AchinthaReemal/netty,jdivy/netty,fengjiachun/netty,lznhust/netty,yonglehou/netty-1,dongjiaqiang/netty,mcobrien/netty,MediumOne/netty,LuminateWireless/netty,IBYoung/netty,mx657649013/netty,tbrooks8/netty,yonglehou/netty-1,zzcclp/netty,f7753/netty,windie/netty,ninja-/netty,yawkat/netty,WangJunTYTL/netty,KatsuraKKKK/netty,joansmith/netty,hyangtack/netty,cnoldtree/netty,yawkat/netty,zer0se7en/netty,youprofit/netty,pengzj/netty,zzcclp/netty,shelsonjava/netty,MediumOne/netty,firebase/netty,ichaki5748/netty,zer0se7en/netty,netty/netty,nmittler/netty,DavidAlphaFox/netty,tempbottle/netty,DavidAlphaFox/netty,golovnin/netty,louiscryan/netty,LuminateWireless/netty,mway08/netty,xiexingguang/netty,lukehutch/netty,nmittler/netty,caoyanwei/netty,xiexingguang/netty,moyiguket/netty,afds/netty,moyiguket/netty,mcobrien/netty,blademainer/netty,luyiisme/netty,hgl888/netty,IBYoung/netty,carl-mastrangelo/netty,bigheary/netty,andsel/netty,Kalvar/netty,gerdriesselmann/netty,DolphinZhao/netty,idelpivnitskiy/netty,mcobrien/netty,zxhfirefox/netty,lukehutch/netty,sammychen105/netty,duqiao/netty,chrisprobst/netty,xiexingguang/netty,kiril-me/netty,lukw00/netty,johnou/netty,nkhuyu/netty,ioanbsu/netty,shenguoquan/netty,danbev/netty,shuangqiuan/netty,nayato/netty,gigold/netty,kjniemi/netty,AnselQiao/netty,purplefox/netty-4.0.2.8-hacked,wangyikai/netty,lukw00/netty,smayoorans/netty,qingsong-xu/netty,blucas/netty,CodingFabian/netty,satishsaley/netty,normanmaurer/netty,unei66/netty,nat2013/netty,yrcourage/netty,ngocdaothanh/netty,skyao/netty,Apache9/netty,KatsuraKKKK/netty,normanmaurer/netty,xiongzheng/netty,maliqq/netty,mosoft521/netty,jdivy/netty,sverkera/netty,shism/netty,mway08/netty,balaprasanna/netty,yipen9/netty,lightsocks/netty,kjniemi/netty,mosoft521/netty,castomer/netty,kvr000/netty,nkhuyu/netty,blucas/netty,yrcourage/netty,luyiisme/netty,x1957/netty,xiongzheng/netty,mx657649013/netty,idelpivnitskiy/netty,altihou/netty,mosoft521/netty,rovarga/netty,Mounika-Chirukuri/netty,nat2013/netty,Apache9/netty,ichaki5748/netty,ijuma/netty,LuminateWireless/netty,jchambers/netty,timboudreau/netty,caoyanwei/netty,tempbottle/netty,seetharamireddy540/netty,orika/netty,eincs/netty,firebase/netty,djchen/netty,SinaTadayon/netty,andsel/netty,kvr000/netty,louiscryan/netty,kvr000/netty,imangry/netty-zh,Alwayswithme/netty,serioussam/netty,ajaysarda/netty,lukehutch/netty,golovnin/netty,shenguoquan/netty,clebertsuconic/netty,kjniemi/netty,nmittler/netty,huuthang1993/netty,carl-mastrangelo/netty,chinayin/netty,artgon/netty,exinguu/netty,ioanbsu/netty,yonglehou/netty-1,sunbeansoft/netty,altihou/netty,ngocdaothanh/netty,shuangqiuan/netty,yipen9/netty,netty/netty,chinayin/netty,Techcable/netty,louxiu/netty,mx657649013/netty,smayoorans/netty,ajaysarda/netty,BrunoColin/netty,NiteshKant/netty,jdivy/netty,silvaran/netty,x1957/netty,huanyi0723/netty,CodingFabian/netty,s-gheldd/netty,clebertsuconic/netty,silvaran/netty,mikkokar/netty,zer0se7en/netty,jovezhougang/netty,chrisprobst/netty,lugt/netty,xiexingguang/netty,ejona86/netty,wuxiaowei907/netty,jongyeol/netty,yawkat/netty,yrcourage/netty,huanyi0723/netty,shelsonjava/netty,Scottmitch/netty,NiteshKant/netty,Kingson4Wu/netty,brennangaunce/netty,nkhuyu/netty,DolphinZhao/netty,sja/netty,buchgr/netty,yipen9/netty,liuciuse/netty,xiongzheng/netty,s-gheldd/netty,zhujingling/netty,danny200309/netty,altihou/netty,castomer/netty,timboudreau/netty,netty/netty,zxhfirefox/netty,gerdriesselmann/netty,alkemist/netty,smayoorans/netty,jenskordowski/netty,AnselQiao/netty,shism/netty,ajaysarda/netty,sverkera/netty,bigheary/netty,sammychen105/netty,carlbai/netty,xingguang2013/netty,ifesdjeen/netty,jovezhougang/netty,BrunoColin/netty,windie/netty,louxiu/netty,blademainer/netty,afds/netty,f7753/netty,lightsocks/netty,x1957/netty,Kalvar/netty,Scottmitch/netty,f7753/netty,CodingFabian/netty,jenskordowski/netty,mcanthony/netty,daschl/netty,unei66/netty,mway08/netty,gigold/netty,sja/netty,chanakaudaya/netty,seetharamireddy540/netty,Scottmitch/netty,ijuma/netty,silvaran/netty,SinaTadayon/netty,cnoldtree/netty,fenik17/netty,IBYoung/netty,nadeeshaan/netty,fengjiachun/netty,chanakaudaya/netty,danbev/netty,hgl888/netty,unei66/netty,develar/netty,kvr000/netty,Kingson4Wu/netty,mosoft521/netty,jchambers/netty,jdivy/netty,f7753/netty,blucas/netty,MediumOne/netty,smayoorans/netty,junjiemars/netty,phlizik/netty,Kalvar/netty,seetharamireddy540/netty,yawkat/netty,olupotd/netty,fengjiachun/netty,JungMinu/netty,mx657649013/netty,olupotd/netty,zhoffice/netty,blucas/netty,qingsong-xu/netty,SinaTadayon/netty,nayato/netty,windie/netty,youprofit/netty,s-gheldd/netty,sameira/netty,zzcclp/netty,buchgr/netty,liyang1025/netty,zer0se7en/netty,ejona86/netty,NiteshKant/netty,ifesdjeen/netty,louxiu/netty,lugt/netty,danny200309/netty,lznhust/netty,lznhust/netty,ejona86/netty,gerdriesselmann/netty,doom369/netty,wuxiaowei907/netty,jdivy/netty,chanakaudaya/netty,jovezhougang/netty,daschl/netty,hyangtack/netty,liuciuse/netty,ngocdaothanh/netty,slandelle/netty,zhoffice/netty,tempbottle/netty,lznhust/netty,rovarga/netty,eincs/netty,mcanthony/netty,niuxinghua/netty,normanmaurer/netty,doom369/netty,Squarespace/netty,wuyinxian124/netty,zhujingling/netty,sja/netty,qingsong-xu/netty,lznhust/netty,Mounika-Chirukuri/netty,mubarak/netty,exinguu/netty,silvaran/netty,danny200309/netty,sunbeansoft/netty,kiril-me/netty,woshilaiceshide/netty,artgon/netty,alkemist/netty,DolphinZhao/netty,Scottmitch/netty,olupotd/netty,golovnin/netty,Alwayswithme/netty,luyiisme/netty,satishsaley/netty,cnoldtree/netty,eonezhang/netty,Kingson4Wu/netty,firebase/netty,doom369/netty,danny200309/netty,Alwayswithme/netty,DavidAlphaFox/netty,orika/netty,jchambers/netty,tbrooks8/netty,niuxinghua/netty,BrunoColin/netty,imangry/netty-zh,caoyanwei/netty,jenskordowski/netty,windie/netty,mikkokar/netty,eonezhang/netty,wuyinxian124/netty,lukehutch/netty,AchinthaReemal/netty,purplefox/netty-4.0.2.8-hacked,kyle-liu/netty4study,Mounika-Chirukuri/netty,djchen/netty,AnselQiao/netty,wangyikai/netty,liyang1025/netty,carlbai/netty,codevelop/netty,seetharamireddy540/netty,idelpivnitskiy/netty,JungMinu/netty,nkhuyu/netty,woshilaiceshide/netty,phlizik/netty,wuyinxian124/netty,zhoffice/netty,shenguoquan/netty,clebertsuconic/netty,qingsong-xu/netty,Kingson4Wu/netty,orika/netty,chinayin/netty,afds/netty,zhujingling/netty,artgon/netty,bob329/netty,drowning/netty,youprofit/netty,eonezhang/netty,balaprasanna/netty,DolphinZhao/netty,duqiao/netty,jongyeol/netty,blademainer/netty,maliqq/netty,bob329/netty,afds/netty,jovezhougang/netty,skyao/netty,sameira/netty,yawkat/netty,niuxinghua/netty,brennangaunce/netty,huuthang1993/netty,Kalvar/netty,ichaki5748/netty,louiscryan/netty,WangJunTYTL/netty,lightsocks/netty,mway08/netty,buchgr/netty,duqiao/netty,hepin1989/netty,gerdriesselmann/netty,alkemist/netty,LuminateWireless/netty,shuangqiuan/netty,buchgr/netty,duqiao/netty,wangyikai/netty,artgon/netty,tbrooks8/netty,fantayeneh/netty,Alwayswithme/netty,Spikhalskiy/netty,Squarespace/netty,chinayin/netty,carlbai/netty,zxhfirefox/netty,tbrooks8/netty,slandelle/netty,huuthang1993/netty,junjiemars/netty,altihou/netty,danbev/netty,drowning/netty,drowning/netty,balaprasanna/netty,artgon/netty,mcanthony/netty,codevelop/netty,shuangqiuan/netty,ngocdaothanh/netty,shelsonjava/netty,carl-mastrangelo/netty,mubarak/netty,afds/netty,nayato/netty,ioanbsu/netty,mubarak/netty,imangry/netty-zh,chrisprobst/netty,wuyinxian124/netty,AnselQiao/netty,mcanthony/netty,unei66/netty,kvr000/netty,wuxiaowei907/netty,MediumOne/netty,timboudreau/netty,pengzj/netty,clebertsuconic/netty,afredlyj/learn-netty,lugt/netty,jchambers/netty,zxhfirefox/netty,xiongzheng/netty,nat2013/netty,niuxinghua/netty,brennangaunce/netty,fenik17/netty,fantayeneh/netty,blademainer/netty,slandelle/netty,chrisprobst/netty,kjniemi/netty,CodingFabian/netty,AchinthaReemal/netty,hepin1989/netty,lukw00/netty,xingguang2013/netty,shenguoquan/netty,Techcable/netty,sverkera/netty,smayoorans/netty,ajaysarda/netty,Mounika-Chirukuri/netty,chinayin/netty,ninja-/netty,jongyeol/netty,netty/netty,danbev/netty,ioanbsu/netty,golovnin/netty,ijuma/netty,johnou/netty,ejona86/netty,sverkera/netty,Squarespace/netty,hyangtack/netty,bryce-anderson/netty,netty/netty,hepin1989/netty,fengjiachun/netty,danny200309/netty,kiril-me/netty,satishsaley/netty,ichaki5748/netty,bigheary/netty,fengshao0907/netty,serioussam/netty,Spikhalskiy/netty,Alwayswithme/netty,caoyanwei/netty,jovezhougang/netty,Kalvar/netty,Mounika-Chirukuri/netty,blucas/netty,serioussam/netty,ichaki5748/netty,andsel/netty,idelpivnitskiy/netty,lightsocks/netty,f7753/netty,pengzj/netty,ioanbsu/netty,AchinthaReemal/netty,mubarak/netty,louxiu/netty,fantayeneh/netty,altihou/netty,caoyanwei/netty,IBYoung/netty,lukw00/netty,cnoldtree/netty,bigheary/netty,silvaran/netty,louiscryan/netty,shuangqiuan/netty,xingguang2013/netty,develar/netty,eincs/netty,johnou/netty,Squarespace/netty,Spikhalskiy/netty,windie/netty,afredlyj/learn-netty,kjniemi/netty,imangry/netty-zh,satishsaley/netty,Spikhalskiy/netty,maliqq/netty,maliqq/netty,carlbai/netty,balaprasanna/netty,zhoffice/netty,slandelle/netty,huuthang1993/netty,zxhfirefox/netty,dongjiaqiang/netty,KatsuraKKKK/netty,joansmith/netty,chrisprobst/netty,nadeeshaan/netty,blademainer/netty,timboudreau/netty,exinguu/netty,yrcourage/netty,ninja-/netty,kiril-me/netty,nadeeshaan/netty,bob329/netty,sameira/netty,castomer/netty,afredlyj/learn-netty,alkemist/netty,lugt/netty,golovnin/netty,fenik17/netty,djchen/netty,olupotd/netty,Techcable/netty,wangyikai/netty,moyiguket/netty,orika/netty,fengshao0907/netty,andsel/netty,Apache9/netty,BrunoColin/netty,sammychen105/netty,jongyeol/netty,xiongzheng/netty,castomer/netty,codevelop/netty,daschl/netty,luyiisme/netty,yonglehou/netty-1,imangry/netty-zh,dongjiaqiang/netty,lightsocks/netty,Techcable/netty,hyangtack/netty,phlizik/netty,shenguoquan/netty,shism/netty,huuthang1993/netty,NiteshKant/netty,lukehutch/netty,firebase/netty,skyao/netty,timboudreau/netty,purplefox/netty-4.0.2.8-hacked,tempbottle/netty,moyiguket/netty,LuminateWireless/netty,fantayeneh/netty,niuxinghua/netty,joansmith/netty,BrunoColin/netty,woshilaiceshide/netty,Scottmitch/netty,yipen9/netty,eincs/netty,gigold/netty,liuciuse/netty,IBYoung/netty,nayato/netty,bryce-anderson/netty,ninja-/netty,shelsonjava/netty,satishsaley/netty,Apache9/netty,ijuma/netty,SinaTadayon/netty,huanyi0723/netty,chanakaudaya/netty,ajaysarda/netty,johnou/netty,DolphinZhao/netty,ninja-/netty,chanakaudaya/netty,youprofit/netty,WangJunTYTL/netty,mikkokar/netty,ngocdaothanh/netty,brennangaunce/netty,gerdriesselmann/netty,joansmith/netty,zhujingling/netty,junjiemars/netty,wuxiaowei907/netty,mcanthony/netty,zhujingling/netty,woshilaiceshide/netty,normanmaurer/netty,shelsonjava/netty,woshilaiceshide/netty,andsel/netty,CodingFabian/netty,x1957/netty,DavidAlphaFox/netty,jenskordowski/netty,clebertsuconic/netty,doom369/netty,mubarak/netty,yonglehou/netty-1,zzcclp/netty,gigold/netty,kyle-liu/netty4study,Techcable/netty,johnou/netty,hgl888/netty,skyao/netty,rovarga/netty,mx657649013/netty,zhoffice/netty,zzcclp/netty,nadeeshaan/netty,AchinthaReemal/netty,carlbai/netty,fengshao0907/netty,wangyikai/netty,ejona86/netty,unei66/netty,liyang1025/netty,junjiemars/netty,youprofit/netty,mway08/netty,eincs/netty,mosoft521/netty,cnoldtree/netty,sja/netty,Apache9/netty,danbev/netty,Kingson4Wu/netty,nayato/netty,nadeeshaan/netty | /*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
package io.netty.util.internal;
import java.util.Random;
/**
* A random number generator isolated to the current thread. Like the
* global {@link java.util.Random} generator used by the {@link
* java.lang.Math} class, a {@code ThreadLocalRandom} is initialized
* with an internally generated seed that may not otherwise be
* modified. When applicable, use of {@code ThreadLocalRandom} rather
* than shared {@code Random} objects in concurrent programs will
* typically encounter much less overhead and contention. Use of
* {@code ThreadLocalRandom} is particularly appropriate when multiple
* tasks (for example, each a {@link io.netty.util.internal.chmv8.ForkJoinTask}) use random numbers
* in parallel in thread pools.
*
* <p>Usages of this class should typically be of the form:
* {@code ThreadLocalRandom.current().nextX(...)} (where
* {@code X} is {@code Int}, {@code Long}, etc).
* When all usages are of this form, it is never possible to
* accidently share a {@code ThreadLocalRandom} across multiple threads.
*
* <p>This class also provides additional commonly used bounded random
* generation methods.
*
* //@since 1.7
* //@author Doug Lea
*/
@SuppressWarnings("all")
public class ThreadLocalRandom extends Random {
// same constants as Random, but must be redeclared because private
private static final long multiplier = 0x5DEECE66DL;
private static final long addend = 0xBL;
private static final long mask = (1L << 48) - 1;
/**
* The random seed. We can't use super.seed.
*/
private long rnd;
/**
* Initialization flag to permit calls to setSeed to succeed only
* while executing the Random constructor. We can't allow others
* since it would cause setting seed in one part of a program to
* unintentionally impact other usages by the thread.
*/
boolean initialized;
// Padding to help avoid memory contention among seed updates in
// different TLRs in the common case that they are located near
// each other.
private long pad0, pad1, pad2, pad3, pad4, pad5, pad6, pad7;
/**
* The actual ThreadLocal
*/
private static final ThreadLocal<ThreadLocalRandom> localRandom =
new ThreadLocal<ThreadLocalRandom>() {
protected ThreadLocalRandom initialValue() {
return new ThreadLocalRandom();
}
};
/**
* Constructor called only by localRandom.initialValue.
*/
ThreadLocalRandom() {
super();
initialized = true;
}
/**
* Returns the current thread's {@code ThreadLocalRandom}.
*
* @return the current thread's {@code ThreadLocalRandom}
*/
public static ThreadLocalRandom current() {
return localRandom.get();
}
/**
* Throws {@code UnsupportedOperationException}. Setting seeds in
* this generator is not supported.
*
* @throws UnsupportedOperationException always
*/
public void setSeed(long seed) {
if (initialized) {
throw new UnsupportedOperationException();
}
rnd = (seed ^ multiplier) & mask;
}
protected int next(int bits) {
rnd = (rnd * multiplier + addend) & mask;
return (int) (rnd >>> (48 - bits));
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
*
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @throws IllegalArgumentException if least greater than or equal
* to bound
* @return the next value
*/
public int nextInt(int least, int bound) {
if (least >= bound) {
throw new IllegalArgumentException();
}
return nextInt(bound - least) + least;
}
/**
* Returns a pseudorandom, uniformly distributed value
* between 0 (inclusive) and the specified value (exclusive).
*
* @param n the bound on the random number to be returned. Must be
* positive.
* @return the next value
* @throws IllegalArgumentException if n is not positive
*/
public long nextLong(long n) {
if (n <= 0) {
throw new IllegalArgumentException("n must be positive");
}
// Divide n by two until small enough for nextInt. On each
// iteration (at most 31 of them but usually much less),
// randomly choose both whether to include high bit in result
// (offset) and whether to continue with the lower vs upper
// half (which makes a difference only if odd).
long offset = 0;
while (n >= Integer.MAX_VALUE) {
int bits = next(2);
long half = n >>> 1;
long nextn = ((bits & 2) == 0) ? half : n - half;
if ((bits & 1) == 0) {
offset += n - nextn;
}
n = nextn;
}
return offset + nextInt((int) n);
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
*
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @return the next value
* @throws IllegalArgumentException if least greater than or equal
* to bound
*/
public long nextLong(long least, long bound) {
if (least >= bound) {
throw new IllegalArgumentException();
}
return nextLong(bound - least) + least;
}
/**
* Returns a pseudorandom, uniformly distributed {@code double} value
* between 0 (inclusive) and the specified value (exclusive).
*
* @param n the bound on the random number to be returned. Must be
* positive.
* @return the next value
* @throws IllegalArgumentException if n is not positive
*/
public double nextDouble(double n) {
if (n <= 0) {
throw new IllegalArgumentException("n must be positive");
}
return nextDouble() * n;
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
*
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @return the next value
* @throws IllegalArgumentException if least greater than or equal
* to bound
*/
public double nextDouble(double least, double bound) {
if (least >= bound) {
throw new IllegalArgumentException();
}
return nextDouble() * (bound - least) + least;
}
private static final long serialVersionUID = -5851777807851030925L;
}
| common/src/main/java/io/netty/util/internal/ThreadLocalRandom.java | /*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
package io.netty.util.internal;
import java.util.Random;
/**
* A random number generator isolated to the current thread. Like the
* global {@link java.util.Random} generator used by the {@link
* java.lang.Math} class, a {@code ThreadLocalRandom} is initialized
* with an internally generated seed that may not otherwise be
* modified. When applicable, use of {@code ThreadLocalRandom} rather
* than shared {@code Random} objects in concurrent programs will
* typically encounter much less overhead and contention. Use of
* {@code ThreadLocalRandom} is particularly appropriate when multiple
* tasks (for example, each a {@link io.netty.util.internal.chmv8.ForkJoinTask}) use random numbers
* in parallel in thread pools.
*
* <p>Usages of this class should typically be of the form:
* {@code ThreadLocalRandom.current().nextX(...)} (where
* {@code X} is {@code Int}, {@code Long}, etc).
* When all usages are of this form, it is never possible to
* accidently share a {@code ThreadLocalRandom} across multiple threads.
*
* <p>This class also provides additional commonly used bounded random
* generation methods.
*
* @since 1.7
* @author Doug Lea
*/
@SuppressWarnings("all")
public class ThreadLocalRandom extends Random {
// same constants as Random, but must be redeclared because private
private static final long multiplier = 0x5DEECE66DL;
private static final long addend = 0xBL;
private static final long mask = (1L << 48) - 1;
/**
* The random seed. We can't use super.seed.
*/
private long rnd;
/**
* Initialization flag to permit calls to setSeed to succeed only
* while executing the Random constructor. We can't allow others
* since it would cause setting seed in one part of a program to
* unintentionally impact other usages by the thread.
*/
boolean initialized;
// Padding to help avoid memory contention among seed updates in
// different TLRs in the common case that they are located near
// each other.
private long pad0, pad1, pad2, pad3, pad4, pad5, pad6, pad7;
/**
* The actual ThreadLocal
*/
private static final ThreadLocal<ThreadLocalRandom> localRandom =
new ThreadLocal<ThreadLocalRandom>() {
protected ThreadLocalRandom initialValue() {
return new ThreadLocalRandom();
}
};
/**
* Constructor called only by localRandom.initialValue.
*/
ThreadLocalRandom() {
super();
initialized = true;
}
/**
* Returns the current thread's {@code ThreadLocalRandom}.
*
* @return the current thread's {@code ThreadLocalRandom}
*/
public static ThreadLocalRandom current() {
return localRandom.get();
}
/**
* Throws {@code UnsupportedOperationException}. Setting seeds in
* this generator is not supported.
*
* @throws UnsupportedOperationException always
*/
public void setSeed(long seed) {
if (initialized)
throw new UnsupportedOperationException();
rnd = (seed ^ multiplier) & mask;
}
protected int next(int bits) {
rnd = (rnd * multiplier + addend) & mask;
return (int) (rnd >>> (48-bits));
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
*
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @throws IllegalArgumentException if least greater than or equal
* to bound
* @return the next value
*/
public int nextInt(int least, int bound) {
if (least >= bound)
throw new IllegalArgumentException();
return nextInt(bound - least) + least;
}
/**
* Returns a pseudorandom, uniformly distributed value
* between 0 (inclusive) and the specified value (exclusive).
*
* @param n the bound on the random number to be returned. Must be
* positive.
* @return the next value
* @throws IllegalArgumentException if n is not positive
*/
public long nextLong(long n) {
if (n <= 0)
throw new IllegalArgumentException("n must be positive");
// Divide n by two until small enough for nextInt. On each
// iteration (at most 31 of them but usually much less),
// randomly choose both whether to include high bit in result
// (offset) and whether to continue with the lower vs upper
// half (which makes a difference only if odd).
long offset = 0;
while (n >= Integer.MAX_VALUE) {
int bits = next(2);
long half = n >>> 1;
long nextn = ((bits & 2) == 0) ? half : n - half;
if ((bits & 1) == 0)
offset += n - nextn;
n = nextn;
}
return offset + nextInt((int) n);
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
*
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @return the next value
* @throws IllegalArgumentException if least greater than or equal
* to bound
*/
public long nextLong(long least, long bound) {
if (least >= bound)
throw new IllegalArgumentException();
return nextLong(bound - least) + least;
}
/**
* Returns a pseudorandom, uniformly distributed {@code double} value
* between 0 (inclusive) and the specified value (exclusive).
*
* @param n the bound on the random number to be returned. Must be
* positive.
* @return the next value
* @throws IllegalArgumentException if n is not positive
*/
public double nextDouble(double n) {
if (n <= 0)
throw new IllegalArgumentException("n must be positive");
return nextDouble() * n;
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
*
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @return the next value
* @throws IllegalArgumentException if least greater than or equal
* to bound
*/
public double nextDouble(double least, double bound) {
if (least >= bound)
throw new IllegalArgumentException();
return nextDouble() * (bound - least) + least;
}
private static final long serialVersionUID = -5851777807851030925L;
}
| Fix checkstyle
| common/src/main/java/io/netty/util/internal/ThreadLocalRandom.java | Fix checkstyle |
|
Java | apache-2.0 | 8f5386b99a7bca02fe5c260bc86bd3e5f92bd2b4 | 0 | alinvasile/httpcore,bimargulies/httpcore,ok2c/httpcore,apache/httpcore,alinvasile/httpcore,bimargulies/httpcore,apache/httpcomponents-core,apache/httpcore | /*
* $HeadURL$
* $Revision$
* $Date$
*
* ====================================================================
*
* Copyright 1999-2006 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.contrib.benchmark;
import java.io.IOException;
import java.io.InputStream;
import org.apache.http.ConnectionReuseStrategy;
import org.apache.http.Header;
import org.apache.http.HttpClientConnection;
import org.apache.http.HttpEntity;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.impl.DefaultConnectionReuseStrategy;
import org.apache.http.protocol.HTTP;
import org.apache.http.protocol.HttpRequestExecutor;
import org.apache.http.util.EntityUtils;
/**
* <p>
* </p>
* @author <a href="mailto:oleg at ural.ru">Oleg Kalnichevski</a>
*
* @version $Revision$
*
* @since 4.0
*/
public class BenchmarkWorker {
private byte[] buffer = new byte[4096];
private final int verbosity;
private final HttpRequestExecutor httpexecutor;
private final ConnectionReuseStrategy connstrategy;
public BenchmarkWorker(final HttpRequestExecutor httpexecutor, int verbosity) {
super();
this.httpexecutor = httpexecutor;
this.connstrategy = new DefaultConnectionReuseStrategy();
this.verbosity = verbosity;
}
public Stats execute(
final HttpRequest request,
final HttpClientConnection conn,
int count,
boolean keepalive) throws HttpException {
HttpResponse response = null;
Stats stats = new Stats();
stats.start();
for (int i = 0; i < count; i++) {
try {
response = this.httpexecutor.execute(request, conn);
if (this.verbosity >= 4) {
System.out.println(">> " + request.getRequestLine().toString());
Header[] headers = request.getAllHeaders();
for (int h = 0; h < headers.length; h++) {
System.out.println(">> " + headers[h].toString());
}
System.out.println();
}
if (this.verbosity >= 3) {
System.out.println(response.getStatusLine().getStatusCode());
}
if (this.verbosity >= 4) {
System.out.println("<< " + response.getStatusLine().toString());
Header[] headers = response.getAllHeaders();
for (int h = 0; h < headers.length; h++) {
System.out.println("<< " + headers[h].toString());
}
System.out.println();
}
HttpEntity entity = response.getEntity();
String charset = EntityUtils.getContentCharSet(entity);
if (charset == null) {
charset = HTTP.DEFAULT_CONTENT_CHARSET;
}
long contentlen = 0;
if (entity != null) {
InputStream instream = entity.getContent();
int l = 0;
while ((l = instream.read(this.buffer)) != -1) {
stats.incTotal(l);
contentlen += l;
if (this.verbosity >= 4) {
String s = new String(this.buffer, 0, l, charset);
System.out.print(s);
}
}
}
if (this.verbosity >= 4) {
System.out.println();
System.out.println();
}
if (!keepalive || !this.connstrategy.keepAlive(response)) {
conn.close();
}
stats.setContentLength(contentlen);
stats.incSuccessCount();
} catch (IOException ex) {
stats.incFailureCount();
if (this.verbosity >= 2) {
System.err.println("I/O error: " + ex.getMessage());
}
}
}
stats.finish();
if (response != null) {
Header header = response.getFirstHeader("Server");
if (header != null) {
stats.setServerName(header.getValue());
}
}
return stats;
}
}
| src/contrib/org/apache/http/contrib/benchmark/BenchmarkWorker.java | /*
* $HeadURL$
* $Revision$
* $Date$
*
* ====================================================================
*
* Copyright 1999-2006 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.contrib.benchmark;
import java.io.IOException;
import java.io.InputStream;
import org.apache.http.ConnectionReuseStrategy;
import org.apache.http.Header;
import org.apache.http.HttpClientConnection;
import org.apache.http.HttpEntity;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.impl.DefaultConnectionReuseStrategy;
import org.apache.http.protocol.HttpRequestExecutor;
/**
* <p>
* </p>
* @author <a href="mailto:oleg at ural.ru">Oleg Kalnichevski</a>
*
* @version $Revision$
*
* @since 4.0
*/
public class BenchmarkWorker {
private byte[] buffer = new byte[4096];
private final int verbosity;
private final HttpRequestExecutor httpexecutor;
private final ConnectionReuseStrategy connstrategy;
public BenchmarkWorker(final HttpRequestExecutor httpexecutor, int verbosity) {
super();
this.httpexecutor = httpexecutor;
this.connstrategy = new DefaultConnectionReuseStrategy();
this.verbosity = verbosity;
}
public Stats execute(
final HttpRequest request,
final HttpClientConnection conn,
int count,
boolean keepalive) throws HttpException {
HttpResponse response = null;
Stats stats = new Stats();
stats.start();
for (int i = 0; i < count; i++) {
try {
response = this.httpexecutor.execute(request, conn);
if (this.verbosity >= 4) {
System.out.println(">> " + request.getRequestLine().toString());
Header[] headers = request.getAllHeaders();
for (int h = 0; h < headers.length; h++) {
System.out.println(">> " + headers[h].toString());
}
System.out.println();
}
if (this.verbosity >= 3) {
System.out.println(response.getStatusLine().getStatusCode());
}
if (this.verbosity >= 4) {
System.out.println("<< " + response.getStatusLine().toString());
Header[] headers = response.getAllHeaders();
for (int h = 0; h < headers.length; h++) {
System.out.println("<< " + headers[h].toString());
}
System.out.println();
}
HttpEntity entity = response.getEntity();
long contentlen = 0;
if (entity != null) {
InputStream instream = entity.getContent();
int l = 0;
while ((l = instream.read(this.buffer)) != -1) {
stats.incTotal(l);
contentlen += l;
}
}
if (!keepalive || !this.connstrategy.keepAlive(response)) {
conn.close();
}
stats.setContentLength(contentlen);
stats.incSuccessCount();
} catch (IOException ex) {
stats.incFailureCount();
if (this.verbosity >= 2) {
System.err.println("I/O error: " + ex.getMessage());
}
}
}
stats.finish();
if (response != null) {
Header header = response.getFirstHeader("Server");
if (header != null) {
stats.setServerName(header.getValue());
}
}
return stats;
}
}
| Print out response body if the debug level is 4
git-svn-id: 9689f759f60f1118327eab294c99d759009aaa48@408156 13f79535-47bb-0310-9956-ffa450edef68
| src/contrib/org/apache/http/contrib/benchmark/BenchmarkWorker.java | Print out response body if the debug level is 4 |
|
Java | apache-2.0 | 4394028d6fccc3751f87bda076088a96916f3aee | 0 | tomakehurst/wiremock,dlaha21/wiremock,Mahoney/wiremock,tricker/wiremock,dlaha21/wiremock,Mahoney/wiremock,tomakehurst/wiremock,tricker/wiremock,dlaha21/wiremock,tricker/wiremock,tomakehurst/wiremock,dlaha21/wiremock,tomakehurst/wiremock,tomakehurst/wiremock,Mahoney/wiremock,tricker/wiremock,Mahoney/wiremock,Mahoney/wiremock,tricker/wiremock,dlaha21/wiremock | /*
* Copyright (C) 2011 Thomas Akehurst
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.tomakehurst.wiremock.common;
import java.util.Base64;
public class Encoding {
public static byte[] decodeBase64(String base64) {
return base64 != null ?
Base64.getDecoder().decode(base64) :
null;
}
public static String encodeBase64(byte[] content) {
return content != null ?
Base64.getEncoder().encodeToString(content) :
null;
}
}
| src/main/java/com/github/tomakehurst/wiremock/common/Encoding.java | /*
* Copyright (C) 2011 Thomas Akehurst
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.tomakehurst.wiremock.common;
import com.google.common.io.BaseEncoding;
public class Encoding {
public static byte[] decodeBase64(String base64) {
return base64 != null ?
BaseEncoding.base64().decode(base64) :
null;
}
public static String encodeBase64(byte[] content) {
return content != null ?
BaseEncoding.base64().encode(content) :
null;
}
}
| Switched base64 encoding library to better performing native Java 8 (http://java-performance.info/base64-encoding-and-decoding-performance/)
| src/main/java/com/github/tomakehurst/wiremock/common/Encoding.java | Switched base64 encoding library to better performing native Java 8 (http://java-performance.info/base64-encoding-and-decoding-performance/) |
|
Java | apache-2.0 | 9f238bd79ebf94a26c55b8fd2148a72ac66f6a03 | 0 | Apache9/hbase,mahak/hbase,mahak/hbase,ndimiduk/hbase,mahak/hbase,mahak/hbase,ndimiduk/hbase,Apache9/hbase,ndimiduk/hbase,ndimiduk/hbase,Apache9/hbase,Apache9/hbase,ndimiduk/hbase,Apache9/hbase,apurtell/hbase,Apache9/hbase,ndimiduk/hbase,mahak/hbase,Apache9/hbase,apurtell/hbase,apurtell/hbase,ndimiduk/hbase,apurtell/hbase,ndimiduk/hbase,mahak/hbase,Apache9/hbase,apurtell/hbase,Apache9/hbase,apurtell/hbase,mahak/hbase,mahak/hbase,mahak/hbase,Apache9/hbase,apurtell/hbase,ndimiduk/hbase,apurtell/hbase,apurtell/hbase,mahak/hbase,apurtell/hbase,ndimiduk/hbase | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.PrintStream;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.http.conf.ConfServlet;
import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
import org.apache.hadoop.hbase.http.log.LogLevel;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.util.Shell;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.org.eclipse.jetty.http.HttpVersion;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Handler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.HttpConfiguration;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.HttpConnectionFactory;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.RequestLog;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.SecureRequestCustomizer;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Server;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.ServerConnector;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.SslConnectionFactory;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.HandlerCollection;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.RequestLogHandler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.DefaultServlet;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.FilterHolder;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.FilterMapping;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.ServletContextHandler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.ServletHolder;
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.MultiException;
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.ssl.SslContextFactory;
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.apache.hbase.thirdparty.org.eclipse.jetty.webapp.WebAppContext;
import org.apache.hbase.thirdparty.org.glassfish.jersey.server.ResourceConfig;
import org.apache.hbase.thirdparty.org.glassfish.jersey.servlet.ServletContainer;
/**
* Create a Jetty embedded server to answer http requests. The primary goal
* is to serve up status information for the server.
* There are three contexts:
* "/logs/" -> points to the log directory
* "/static/" -> points to common static files (src/webapps/static)
* "/" -> the jsp server code from (src/webapps/<name>)
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class HttpServer implements FilterContainer {
private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class);
private static final String EMPTY_STRING = "";
private static final int DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
static final String FILTER_INITIALIZERS_PROPERTY
= "hbase.http.filter.initializers";
static final String HTTP_MAX_THREADS = "hbase.http.max.threads";
public static final String HTTP_UI_AUTHENTICATION = "hbase.security.authentication.ui";
static final String HTTP_AUTHENTICATION_PREFIX = "hbase.security.authentication.";
static final String HTTP_SPNEGO_AUTHENTICATION_PREFIX = HTTP_AUTHENTICATION_PREFIX
+ "spnego.";
static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX = "kerberos.principal";
public static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX;
static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX = "kerberos.keytab";
public static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX;
static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX = "kerberos.name.rules";
public static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX;
static final String HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_SUFFIX = "kerberos.proxyuser.enable";
public static final String HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_SUFFIX;
public static final boolean HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_DEFAULT = false;
static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX =
"signature.secret.file";
public static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY =
HTTP_AUTHENTICATION_PREFIX + HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX;
public static final String HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.users";
public static final String HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.groups";
public static final String HTTP_PRIVILEGED_CONF_KEY =
"hbase.security.authentication.ui.config.protected";
public static final boolean HTTP_PRIVILEGED_CONF_DEFAULT = false;
// The ServletContext attribute where the daemon Configuration
// gets stored.
public static final String CONF_CONTEXT_ATTRIBUTE = "hbase.conf";
public static final String ADMINS_ACL = "admins.acl";
public static final String BIND_ADDRESS = "bind.address";
public static final String SPNEGO_FILTER = "SpnegoFilter";
public static final String SPNEGO_PROXYUSER_FILTER = "SpnegoProxyUserFilter";
public static final String NO_CACHE_FILTER = "NoCacheFilter";
public static final String APP_DIR = "webapps";
private final AccessControlList adminsAcl;
protected final Server webServer;
protected String appDir;
protected String logDir;
private static final class ListenerInfo {
/**
* Boolean flag to determine whether the HTTP server should clean up the
* listener in stop().
*/
private final boolean isManaged;
private final ServerConnector listener;
private ListenerInfo(boolean isManaged, ServerConnector listener) {
this.isManaged = isManaged;
this.listener = listener;
}
}
private final List<ListenerInfo> listeners = Lists.newArrayList();
@VisibleForTesting
public List<ServerConnector> getServerConnectors() {
return listeners.stream().map(info -> info.listener).collect(Collectors.toList());
}
protected final WebAppContext webAppContext;
protected final boolean findPort;
protected final Map<ServletContextHandler, Boolean> defaultContexts = new HashMap<>();
protected final List<String> filterNames = new ArrayList<>();
protected final boolean authenticationEnabled;
static final String STATE_DESCRIPTION_ALIVE = " - alive";
static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
/**
* Class to construct instances of HTTP server with specific options.
*/
public static class Builder {
private ArrayList<URI> endpoints = Lists.newArrayList();
private Configuration conf;
private String[] pathSpecs;
private AccessControlList adminsAcl;
private boolean securityEnabled = false;
private String usernameConfKey;
private String keytabConfKey;
private boolean needsClientAuth;
private String hostName;
private String appDir = APP_DIR;
private String logDir;
private boolean findPort;
private String trustStore;
private String trustStorePassword;
private String trustStoreType;
private String keyStore;
private String keyStorePassword;
private String keyStoreType;
// The -keypass option in keytool
private String keyPassword;
private String kerberosNameRulesKey;
private String signatureSecretFileKey;
/**
* @see #setAppDir(String)
* @deprecated Since 0.99.0. Use builder pattern via {@link #setAppDir(String)} instead.
*/
@Deprecated
private String name;
/**
* @see #addEndpoint(URI)
* @deprecated Since 0.99.0. Use builder pattern via {@link #addEndpoint(URI)} instead.
*/
@Deprecated
private String bindAddress;
/**
* @see #addEndpoint(URI)
* @deprecated Since 0.99.0. Use builder pattern via {@link #addEndpoint(URI)} instead.
*/
@Deprecated
private int port = -1;
/**
* Add an endpoint that the HTTP server should listen to.
*
* @param endpoint
* the endpoint of that the HTTP server should listen to. The
* scheme specifies the protocol (i.e. HTTP / HTTPS), the host
* specifies the binding address, and the port specifies the
* listening port. Unspecified or zero port means that the server
* can listen to any port.
*/
public Builder addEndpoint(URI endpoint) {
endpoints.add(endpoint);
return this;
}
/**
* Set the hostname of the http server. The host name is used to resolve the
* _HOST field in Kerberos principals. The hostname of the first listener
* will be used if the name is unspecified.
*/
public Builder hostName(String hostName) {
this.hostName = hostName;
return this;
}
public Builder trustStore(String location, String password, String type) {
this.trustStore = location;
this.trustStorePassword = password;
this.trustStoreType = type;
return this;
}
public Builder keyStore(String location, String password, String type) {
this.keyStore = location;
this.keyStorePassword = password;
this.keyStoreType = type;
return this;
}
public Builder keyPassword(String password) {
this.keyPassword = password;
return this;
}
/**
* Specify whether the server should authorize the client in SSL
* connections.
*/
public Builder needsClientAuth(boolean value) {
this.needsClientAuth = value;
return this;
}
/**
* @see #setAppDir(String)
* @deprecated Since 0.99.0. Use {@link #setAppDir(String)} instead.
*/
@Deprecated
public Builder setName(String name){
this.name = name;
return this;
}
/**
* @see #addEndpoint(URI)
* @deprecated Since 0.99.0. Use {@link #addEndpoint(URI)} instead.
*/
@Deprecated
public Builder setBindAddress(String bindAddress){
this.bindAddress = bindAddress;
return this;
}
/**
* @see #addEndpoint(URI)
* @deprecated Since 0.99.0. Use {@link #addEndpoint(URI)} instead.
*/
@Deprecated
public Builder setPort(int port) {
this.port = port;
return this;
}
public Builder setFindPort(boolean findPort) {
this.findPort = findPort;
return this;
}
public Builder setConf(Configuration conf) {
this.conf = conf;
return this;
}
public Builder setPathSpec(String[] pathSpec) {
this.pathSpecs = pathSpec;
return this;
}
public Builder setACL(AccessControlList acl) {
this.adminsAcl = acl;
return this;
}
public Builder setSecurityEnabled(boolean securityEnabled) {
this.securityEnabled = securityEnabled;
return this;
}
public Builder setUsernameConfKey(String usernameConfKey) {
this.usernameConfKey = usernameConfKey;
return this;
}
public Builder setKeytabConfKey(String keytabConfKey) {
this.keytabConfKey = keytabConfKey;
return this;
}
public Builder setKerberosNameRulesKey(String kerberosNameRulesKey) {
this.kerberosNameRulesKey = kerberosNameRulesKey;
return this;
}
public Builder setSignatureSecretFileKey(String signatureSecretFileKey) {
this.signatureSecretFileKey = signatureSecretFileKey;
return this;
}
public Builder setAppDir(String appDir) {
this.appDir = appDir;
return this;
}
public Builder setLogDir(String logDir) {
this.logDir = logDir;
return this;
}
public HttpServer build() throws IOException {
// Do we still need to assert this non null name if it is deprecated?
if (this.name == null) {
throw new HadoopIllegalArgumentException("name is not set");
}
// Make the behavior compatible with deprecated interfaces
if (bindAddress != null && port != -1) {
try {
endpoints.add(0, new URI("http", "", bindAddress, port, "", "", ""));
} catch (URISyntaxException e) {
throw new HadoopIllegalArgumentException("Invalid endpoint: "+ e);
}
}
if (endpoints.isEmpty()) {
throw new HadoopIllegalArgumentException("No endpoints specified");
}
if (hostName == null) {
hostName = endpoints.get(0).getHost();
}
if (this.conf == null) {
conf = new Configuration();
}
HttpServer server = new HttpServer(this);
for (URI ep : endpoints) {
ServerConnector listener = null;
String scheme = ep.getScheme();
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSecureScheme("https");
httpConfig.setHeaderCacheSize(DEFAULT_MAX_HEADER_SIZE);
httpConfig.setResponseHeaderSize(DEFAULT_MAX_HEADER_SIZE);
httpConfig.setRequestHeaderSize(DEFAULT_MAX_HEADER_SIZE);
httpConfig.setSendServerVersion(false);
if ("http".equals(scheme)) {
listener = new ServerConnector(server.webServer, new HttpConnectionFactory(httpConfig));
} else if ("https".equals(scheme)) {
HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig);
httpsConfig.addCustomizer(new SecureRequestCustomizer());
SslContextFactory sslCtxFactory = new SslContextFactory();
sslCtxFactory.setNeedClientAuth(needsClientAuth);
sslCtxFactory.setKeyManagerPassword(keyPassword);
if (keyStore != null) {
sslCtxFactory.setKeyStorePath(keyStore);
sslCtxFactory.setKeyStoreType(keyStoreType);
sslCtxFactory.setKeyStorePassword(keyStorePassword);
}
if (trustStore != null) {
sslCtxFactory.setTrustStorePath(trustStore);
sslCtxFactory.setTrustStoreType(trustStoreType);
sslCtxFactory.setTrustStorePassword(trustStorePassword);
}
listener = new ServerConnector(server.webServer, new SslConnectionFactory(sslCtxFactory,
HttpVersion.HTTP_1_1.toString()), new HttpConnectionFactory(httpsConfig));
} else {
throw new HadoopIllegalArgumentException(
"unknown scheme for endpoint:" + ep);
}
// default settings for connector
listener.setAcceptQueueSize(128);
if (Shell.WINDOWS) {
// result of setting the SO_REUSEADDR flag is different on Windows
// http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
// without this 2 NN's can start on the same machine and listen on
// the same port with indeterminate routing of incoming requests to them
listener.setReuseAddress(false);
}
listener.setHost(ep.getHost());
listener.setPort(ep.getPort() == -1 ? 0 : ep.getPort());
server.addManagedListener(listener);
}
server.loadListeners();
return server;
}
}
/**
* @see #HttpServer(String, String, int, boolean, Configuration)
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port, boolean findPort)
throws IOException {
this(name, bindAddress, port, findPort, new Configuration());
}
/**
* Create a status server on the given port. Allows you to specify the
* path specifications that this server will be serving so that they will be
* added to the filters properly.
*
* @param name The name of the server
* @param bindAddress The address for this server
* @param port The port to use on the server
* @param findPort whether the server should start at the given port and
* increment by 1 until it finds a free port.
* @param conf Configuration
* @param pathSpecs Path specifications that this httpserver will be serving.
* These will be added to any filters.
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
this(name, bindAddress, port, findPort, conf, null, pathSpecs);
}
/**
* Create a status server on the given port.
* The jsp scripts are taken from src/webapps/<name>.
* @param name The name of the server
* @param port The port to use on the server
* @param findPort whether the server should start at the given port and
* increment by 1 until it finds a free port.
* @param conf Configuration
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf) throws IOException {
this(name, bindAddress, port, findPort, conf, null, null);
}
/**
* Creates a status server on the given port. The JSP scripts are taken
* from src/webapp<name>.
*
* @param name the name of the server
* @param bindAddress the address for this server
* @param port the port to use on the server
* @param findPort whether the server should start at the given port and increment by 1 until it
* finds a free port
* @param conf the configuration to use
* @param adminsAcl {@link AccessControlList} of the admins
* @throws IOException when creating the server fails
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, AccessControlList adminsAcl)
throws IOException {
this(name, bindAddress, port, findPort, conf, adminsAcl, null);
}
/**
* Create a status server on the given port.
* The jsp scripts are taken from src/webapps/<name>.
* @param name The name of the server
* @param bindAddress The address for this server
* @param port The port to use on the server
* @param findPort whether the server should start at the given port and
* increment by 1 until it finds a free port.
* @param conf Configuration
* @param adminsAcl {@link AccessControlList} of the admins
* @param pathSpecs Path specifications that this httpserver will be serving.
* These will be added to any filters.
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, AccessControlList adminsAcl,
String[] pathSpecs) throws IOException {
this(new Builder().setName(name)
.addEndpoint(URI.create("http://" + bindAddress + ":" + port))
.setFindPort(findPort).setConf(conf).setACL(adminsAcl)
.setPathSpec(pathSpecs));
}
private HttpServer(final Builder b) throws IOException {
this.appDir = b.appDir;
this.logDir = b.logDir;
final String appDir = getWebAppsPath(b.name);
int maxThreads = b.conf.getInt(HTTP_MAX_THREADS, 16);
// If HTTP_MAX_THREADS is less than or equal to 0, QueueThreadPool() will use the
// default value (currently 200).
QueuedThreadPool threadPool = maxThreads <= 0 ? new QueuedThreadPool()
: new QueuedThreadPool(maxThreads);
threadPool.setDaemon(true);
this.webServer = new Server(threadPool);
this.adminsAcl = b.adminsAcl;
this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir);
this.findPort = b.findPort;
this.authenticationEnabled = b.securityEnabled;
initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs, b);
}
private void initializeWebServer(String name, String hostName,
Configuration conf, String[] pathSpecs, HttpServer.Builder b)
throws FileNotFoundException, IOException {
Preconditions.checkNotNull(webAppContext);
HandlerCollection handlerCollection = new HandlerCollection();
ContextHandlerCollection contexts = new ContextHandlerCollection();
RequestLog requestLog = HttpRequestLog.getRequestLog(name);
if (requestLog != null) {
RequestLogHandler requestLogHandler = new RequestLogHandler();
requestLogHandler.setRequestLog(requestLog);
handlerCollection.addHandler(requestLogHandler);
}
final String appDir = getWebAppsPath(name);
handlerCollection.addHandler(contexts);
handlerCollection.addHandler(webAppContext);
webServer.setHandler(handlerCollection);
webAppContext.setAttribute(ADMINS_ACL, adminsAcl);
// Default apps need to be set first, so that all filters are applied to them.
// Because they're added to defaultContexts, we need them there before we start
// adding filters
addDefaultApps(contexts, appDir, conf);
addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
addGlobalFilter("clickjackingprevention",
ClickjackingPreventionFilter.class.getName(),
ClickjackingPreventionFilter.getDefaultParameters(conf));
addGlobalFilter("securityheaders",
SecurityHeadersFilter.class.getName(),
SecurityHeadersFilter.getDefaultParameters(conf));
// But security needs to be enabled prior to adding the other servlets
if (authenticationEnabled) {
initSpnego(conf, hostName, b.usernameConfKey, b.keytabConfKey, b.kerberosNameRulesKey,
b.signatureSecretFileKey);
}
final FilterInitializer[] initializers = getFilterInitializers(conf);
if (initializers != null) {
conf = new Configuration(conf);
conf.set(BIND_ADDRESS, hostName);
for (FilterInitializer c : initializers) {
c.initFilter(this, conf);
}
}
addDefaultServlets(contexts, conf);
if (pathSpecs != null) {
for (String path : pathSpecs) {
LOG.info("adding path spec: " + path);
addFilterPathMapping(path, webAppContext);
}
}
}
private void addManagedListener(ServerConnector connector) {
listeners.add(new ListenerInfo(true, connector));
}
private static WebAppContext createWebAppContext(String name,
Configuration conf, AccessControlList adminsAcl, final String appDir) {
WebAppContext ctx = new WebAppContext();
ctx.setDisplayName(name);
ctx.setContextPath("/");
ctx.setWar(appDir + "/" + name);
ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
// for org.apache.hadoop.metrics.MetricsServlet
ctx.getServletContext().setAttribute(
org.apache.hadoop.http.HttpServer2.CONF_CONTEXT_ATTRIBUTE, conf);
ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
addNoCacheFilter(ctx);
return ctx;
}
private static void addNoCacheFilter(WebAppContext ctxt) {
defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(),
Collections.<String, String> emptyMap(), new String[] { "/*" });
}
/** Get an array of FilterConfiguration specified in the conf */
private static FilterInitializer[] getFilterInitializers(Configuration conf) {
if (conf == null) {
return null;
}
Class<?>[] classes = conf.getClasses(FILTER_INITIALIZERS_PROPERTY);
if (classes == null) {
return null;
}
FilterInitializer[] initializers = new FilterInitializer[classes.length];
for(int i = 0; i < classes.length; i++) {
initializers[i] = (FilterInitializer)ReflectionUtils.newInstance(classes[i]);
}
return initializers;
}
/**
* Add default apps.
* @param appDir The application directory
*/
protected void addDefaultApps(ContextHandlerCollection parent,
final String appDir, Configuration conf) {
// set up the context for "/logs/" if "hadoop.log.dir" property is defined.
String logDir = this.logDir;
if (logDir == null) {
logDir = System.getProperty("hadoop.log.dir");
}
if (logDir != null) {
ServletContextHandler logContext = new ServletContextHandler(parent, "/logs");
logContext.addServlet(AdminAuthorizedServlet.class, "/*");
logContext.setResourceBase(logDir);
if (conf.getBoolean(
ServerConfigurationKeys.HBASE_JETTY_LOGS_SERVE_ALIASES,
ServerConfigurationKeys.DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES)) {
Map<String, String> params = logContext.getInitParams();
params.put(
"org.mortbay.jetty.servlet.Default.aliases", "true");
}
logContext.setDisplayName("logs");
setContextAttributes(logContext, conf);
defaultContexts.put(logContext, true);
}
// set up the context for "/static/*"
ServletContextHandler staticContext = new ServletContextHandler(parent, "/static");
staticContext.setResourceBase(appDir + "/static");
staticContext.addServlet(DefaultServlet.class, "/*");
staticContext.setDisplayName("static");
setContextAttributes(staticContext, conf);
defaultContexts.put(staticContext, true);
}
private void setContextAttributes(ServletContextHandler context, Configuration conf) {
context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
}
/**
* Add default servlets.
*/
protected void addDefaultServlets(
ContextHandlerCollection contexts, Configuration conf) throws IOException {
// set up default servlets
addPrivilegedServlet("stacks", "/stacks", StackServlet.class);
addPrivilegedServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
// Hadoop3 has moved completely to metrics2, and dropped support for Metrics v1's
// MetricsServlet (see HADOOP-12504). We'll using reflection to load if against hadoop2.
// Remove when we drop support for hbase on hadoop2.x.
try {
Class<?> clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet");
addPrivilegedServlet("metrics", "/metrics", clz.asSubclass(HttpServlet.class));
} catch (Exception e) {
// do nothing
}
addPrivilegedServlet("jmx", "/jmx", JMXJsonServlet.class);
// While we don't expect users to have sensitive information in their configuration, they
// might. Give them an option to not expose the service configuration to all users.
if (conf.getBoolean(HTTP_PRIVILEGED_CONF_KEY, HTTP_PRIVILEGED_CONF_DEFAULT)) {
addPrivilegedServlet("conf", "/conf", ConfServlet.class);
} else {
addUnprivilegedServlet("conf", "/conf", ConfServlet.class);
}
final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome();
if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) {
addPrivilegedServlet("prof", "/prof", ProfileServlet.class);
Path tmpDir = Paths.get(ProfileServlet.OUTPUT_DIR);
if (Files.notExists(tmpDir)) {
Files.createDirectories(tmpDir);
}
ServletContextHandler genCtx = new ServletContextHandler(contexts, "/prof-output");
genCtx.addServlet(ProfileOutputServlet.class, "/*");
genCtx.setResourceBase(tmpDir.toAbsolutePath().toString());
genCtx.setDisplayName("prof-output");
} else {
addUnprivilegedServlet("prof", "/prof", ProfileServlet.DisabledServlet.class);
LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property " +
"not specified. Disabling /prof endpoint.");
}
}
/**
* Set a value in the webapp context. These values are available to the jsp
* pages as "application.getAttribute(name)".
* @param name The name of the attribute
* @param value The value of the attribute
*/
public void setAttribute(String name, Object value) {
webAppContext.setAttribute(name, value);
}
/**
* Add a Jersey resource package.
* @param packageName The Java package name containing the Jersey resource.
* @param pathSpec The path spec for the servlet
*/
public void addJerseyResourcePackage(final String packageName,
final String pathSpec) {
LOG.info("addJerseyResourcePackage: packageName=" + packageName
+ ", pathSpec=" + pathSpec);
ResourceConfig application = new ResourceConfig().packages(packageName);
final ServletHolder sh = new ServletHolder(new ServletContainer(application));
webAppContext.addServlet(sh, pathSpec);
}
/**
* Adds a servlet in the server that any user can access. This method differs from
* {@link #addPrivilegedServlet(String, String, Class)} in that any authenticated user
* can interact with the servlet added by this method.
* @param name The name of the servlet (can be passed as null)
* @param pathSpec The path spec for the servlet
* @param clazz The servlet class
*/
public void addUnprivilegedServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz) {
addServletWithAuth(name, pathSpec, clazz, false);
}
/**
* Adds a servlet in the server that only administrators can access. This method differs from
* {@link #addUnprivilegedServlet(String, String, Class)} in that only those authenticated user
* who are identified as administrators can interact with the servlet added by this method.
*/
public void addPrivilegedServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz) {
addServletWithAuth(name, pathSpec, clazz, true);
}
/**
* Internal method to add a servlet to the HTTP server. Developers should not call this method
* directly, but invoke it via {@link #addUnprivilegedServlet(String, String, Class)} or
* {@link #addPrivilegedServlet(String, String, Class)}.
*/
void addServletWithAuth(String name, String pathSpec,
Class<? extends HttpServlet> clazz, boolean requireAuthz) {
addInternalServlet(name, pathSpec, clazz, requireAuthz);
addFilterPathMapping(pathSpec, webAppContext);
}
/**
* Add an internal servlet in the server, specifying whether or not to
* protect with Kerberos authentication.
* Note: This method is to be used for adding servlets that facilitate
* internal communication and not for user facing functionality. For
* servlets added using this method, filters (except internal Kerberos
* filters) are not enabled.
*
* @param name The name of the servlet (can be passed as null)
* @param pathSpec The path spec for the servlet
* @param clazz The servlet class
* @param requireAuth Require Kerberos authenticate to access servlet
*/
void addInternalServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz, boolean requireAuthz) {
ServletHolder holder = new ServletHolder(clazz);
if (name != null) {
holder.setName(name);
}
if (authenticationEnabled && requireAuthz) {
FilterHolder filter = new FilterHolder(AdminAuthorizedFilter.class);
filter.setName(AdminAuthorizedFilter.class.getSimpleName());
FilterMapping fmap = new FilterMapping();
fmap.setPathSpec(pathSpec);
fmap.setDispatches(FilterMapping.ALL);
fmap.setFilterName(AdminAuthorizedFilter.class.getSimpleName());
webAppContext.getServletHandler().addFilter(filter, fmap);
}
webAppContext.getSessionHandler().getSessionCookieConfig().setHttpOnly(true);
webAppContext.getSessionHandler().getSessionCookieConfig().setSecure(true);
webAppContext.addServlet(holder, pathSpec);
}
@Override
public void addFilter(String name, String classname, Map<String, String> parameters) {
final String[] USER_FACING_URLS = { "*.html", "*.jsp" };
defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS);
LOG.info("Added filter " + name + " (class=" + classname
+ ") to context " + webAppContext.getDisplayName());
final String[] ALL_URLS = { "/*" };
for (Map.Entry<ServletContextHandler, Boolean> e : defaultContexts.entrySet()) {
if (e.getValue()) {
ServletContextHandler handler = e.getKey();
defineFilter(handler, name, classname, parameters, ALL_URLS);
LOG.info("Added filter " + name + " (class=" + classname
+ ") to context " + handler.getDisplayName());
}
}
filterNames.add(name);
}
@Override
public void addGlobalFilter(String name, String classname, Map<String, String> parameters) {
final String[] ALL_URLS = { "/*" };
defineFilter(webAppContext, name, classname, parameters, ALL_URLS);
for (ServletContextHandler ctx : defaultContexts.keySet()) {
defineFilter(ctx, name, classname, parameters, ALL_URLS);
}
LOG.info("Added global filter '" + name + "' (class=" + classname + ")");
}
/**
* Define a filter for a context and set up default url mappings.
*/
public static void defineFilter(ServletContextHandler handler, String name,
String classname, Map<String,String> parameters, String[] urls) {
FilterHolder holder = new FilterHolder();
holder.setName(name);
holder.setClassName(classname);
if (parameters != null) {
holder.setInitParameters(parameters);
}
FilterMapping fmap = new FilterMapping();
fmap.setPathSpecs(urls);
fmap.setDispatches(FilterMapping.ALL);
fmap.setFilterName(name);
handler.getServletHandler().addFilter(holder, fmap);
}
/**
* Add the path spec to the filter path mapping.
* @param pathSpec The path spec
* @param webAppCtx The WebApplicationContext to add to
*/
protected void addFilterPathMapping(String pathSpec,
WebAppContext webAppCtx) {
for(String name : filterNames) {
FilterMapping fmap = new FilterMapping();
fmap.setPathSpec(pathSpec);
fmap.setFilterName(name);
fmap.setDispatches(FilterMapping.ALL);
webAppCtx.getServletHandler().addFilterMapping(fmap);
}
}
/**
* Get the value in the webapp context.
* @param name The name of the attribute
* @return The value of the attribute
*/
public Object getAttribute(String name) {
return webAppContext.getAttribute(name);
}
public WebAppContext getWebAppContext(){
return this.webAppContext;
}
public String getWebAppsPath(String appName) throws FileNotFoundException {
return getWebAppsPath(this.appDir, appName);
}
/**
* Get the pathname to the webapps files.
* @param appName eg "secondary" or "datanode"
* @return the pathname as a URL
* @throws FileNotFoundException if 'webapps' directory cannot be found on CLASSPATH.
*/
protected String getWebAppsPath(String webapps, String appName) throws FileNotFoundException {
URL url = getClass().getClassLoader().getResource(webapps + "/" + appName);
if (url == null) {
throw new FileNotFoundException(webapps + "/" + appName
+ " not found in CLASSPATH");
}
String urlString = url.toString();
return urlString.substring(0, urlString.lastIndexOf('/'));
}
/**
* Get the port that the server is on
* @return the port
* @deprecated Since 0.99.0
*/
@Deprecated
public int getPort() {
return ((ServerConnector)webServer.getConnectors()[0]).getLocalPort();
}
/**
* Get the address that corresponds to a particular connector.
*
* @return the corresponding address for the connector, or null if there's no
* such connector or the connector is not bounded.
*/
public InetSocketAddress getConnectorAddress(int index) {
Preconditions.checkArgument(index >= 0);
if (index > webServer.getConnectors().length) {
return null;
}
ServerConnector c = (ServerConnector)webServer.getConnectors()[index];
if (c.getLocalPort() == -1 || c.getLocalPort() == -2) {
// -1 if the connector has not been opened
// -2 if it has been closed
return null;
}
return new InetSocketAddress(c.getHost(), c.getLocalPort());
}
/**
* Set the min, max number of worker threads (simultaneous connections).
*/
public void setThreads(int min, int max) {
QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool();
pool.setMinThreads(min);
pool.setMaxThreads(max);
}
private void initSpnego(Configuration conf, String hostName,
String usernameConfKey, String keytabConfKey, String kerberosNameRuleKey,
String signatureSecretKeyFileKey) throws IOException {
Map<String, String> params = new HashMap<>();
String principalInConf = getOrEmptyString(conf, usernameConfKey);
if (!principalInConf.isEmpty()) {
params.put(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX, SecurityUtil.getServerPrincipal(
principalInConf, hostName));
}
String httpKeytab = getOrEmptyString(conf, keytabConfKey);
if (!httpKeytab.isEmpty()) {
params.put(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX, httpKeytab);
}
String kerberosNameRule = getOrEmptyString(conf, kerberosNameRuleKey);
if (!kerberosNameRule.isEmpty()) {
params.put(HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX, kerberosNameRule);
}
String signatureSecretKeyFile = getOrEmptyString(conf, signatureSecretKeyFileKey);
if (!signatureSecretKeyFile.isEmpty()) {
params.put(HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX,
signatureSecretKeyFile);
}
params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
// Verify that the required options were provided
if (isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX)) ||
isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX))) {
throw new IllegalArgumentException(usernameConfKey + " and "
+ keytabConfKey + " are both required in the configuration "
+ "to enable SPNEGO/Kerberos authentication for the Web UI");
}
if (conf.getBoolean(HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_KEY,
HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_DEFAULT)) {
//Copy/rename standard hadoop proxyuser settings to filter
for(Map.Entry<String, String> proxyEntry :
conf.getPropsWithPrefix(ProxyUsers.CONF_HADOOP_PROXYUSER).entrySet()) {
params.put(ProxyUserAuthenticationFilter.PROXYUSER_PREFIX + proxyEntry.getKey(),
proxyEntry.getValue());
}
addGlobalFilter(SPNEGO_PROXYUSER_FILTER, ProxyUserAuthenticationFilter.class.getName(), params);
} else {
addGlobalFilter(SPNEGO_FILTER, AuthenticationFilter.class.getName(), params);
}
}
/**
* Returns true if the argument is non-null and not whitespace
*/
private boolean isMissing(String value) {
if (null == value) {
return true;
}
return value.trim().isEmpty();
}
/**
* Extracts the value for the given key from the configuration of returns a string of
* zero length.
*/
private String getOrEmptyString(Configuration conf, String key) {
if (null == key) {
return EMPTY_STRING;
}
final String value = conf.get(key.trim());
return null == value ? EMPTY_STRING : value;
}
/**
* Start the server. Does not wait for the server to start.
*/
public void start() throws IOException {
try {
try {
openListeners();
webServer.start();
} catch (IOException ex) {
LOG.info("HttpServer.start() threw a non Bind IOException", ex);
throw ex;
} catch (MultiException ex) {
LOG.info("HttpServer.start() threw a MultiException", ex);
throw ex;
}
// Make sure there is no handler failures.
Handler[] handlers = webServer.getHandlers();
for (int i = 0; i < handlers.length; i++) {
if (handlers[i].isFailed()) {
throw new IOException(
"Problem in starting http server. Server handlers failed");
}
}
// Make sure there are no errors initializing the context.
Throwable unavailableException = webAppContext.getUnavailableException();
if (unavailableException != null) {
// Have to stop the webserver, or else its non-daemon threads
// will hang forever.
webServer.stop();
throw new IOException("Unable to initialize WebAppContext",
unavailableException);
}
} catch (IOException e) {
throw e;
} catch (InterruptedException e) {
throw (IOException) new InterruptedIOException(
"Interrupted while starting HTTP server").initCause(e);
} catch (Exception e) {
throw new IOException("Problem starting http server", e);
}
}
private void loadListeners() {
for (ListenerInfo li : listeners) {
webServer.addConnector(li.listener);
}
}
/**
* Open the main listener for the server
* @throws Exception if the listener cannot be opened or the appropriate port is already in use
*/
@VisibleForTesting
void openListeners() throws Exception {
for (ListenerInfo li : listeners) {
ServerConnector listener = li.listener;
if (!li.isManaged || (li.listener.getLocalPort() != -1 && li.listener.getLocalPort() != -2)) {
// This listener is either started externally, or has not been opened, or has been closed
continue;
}
int port = listener.getPort();
while (true) {
// jetty has a bug where you can't reopen a listener that previously
// failed to open w/o issuing a close first, even if the port is changed
try {
listener.close();
listener.open();
LOG.info("Jetty bound to port " + listener.getLocalPort());
break;
} catch (IOException ex) {
if(!(ex instanceof BindException) && !(ex.getCause() instanceof BindException)) {
throw ex;
}
if (port == 0 || !findPort) {
BindException be = new BindException("Port in use: "
+ listener.getHost() + ":" + listener.getPort());
be.initCause(ex);
throw be;
}
}
// try the next port number
listener.setPort(++port);
Thread.sleep(100);
}
}
}
/**
* stop the server
*/
public void stop() throws Exception {
MultiException exception = null;
for (ListenerInfo li : listeners) {
if (!li.isManaged) {
continue;
}
try {
li.listener.close();
} catch (Exception e) {
LOG.error(
"Error while stopping listener for webapp"
+ webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
}
try {
// clear & stop webAppContext attributes to avoid memory leaks.
webAppContext.clearAttributes();
webAppContext.stop();
} catch (Exception e) {
LOG.error("Error while stopping web app context for webapp "
+ webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
try {
webServer.stop();
} catch (Exception e) {
LOG.error("Error while stopping web server for webapp "
+ webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
if (exception != null) {
exception.ifExceptionThrow();
}
}
private MultiException addMultiException(MultiException exception, Exception e) {
if(exception == null){
exception = new MultiException();
}
exception.add(e);
return exception;
}
public void join() throws InterruptedException {
webServer.join();
}
/**
* Test for the availability of the web server
* @return true if the web server is started, false otherwise
*/
public boolean isAlive() {
return webServer != null && webServer.isStarted();
}
/**
* Return the host and port of the HttpServer, if live
* @return the classname and any HTTP URL
*/
@Override
public String toString() {
if (listeners.isEmpty()) {
return "Inactive HttpServer";
} else {
StringBuilder sb = new StringBuilder("HttpServer (")
.append(isAlive() ? STATE_DESCRIPTION_ALIVE :
STATE_DESCRIPTION_NOT_LIVE).append("), listening at:");
for (ListenerInfo li : listeners) {
ServerConnector l = li.listener;
sb.append(l.getHost()).append(":").append(l.getPort()).append("/,");
}
return sb.toString();
}
}
/**
* Checks the user has privileges to access to instrumentation servlets.
* <p>
* If <code>hadoop.security.instrumentation.requires.admin</code> is set to FALSE
* (default value) it always returns TRUE.
* </p><p>
* If <code>hadoop.security.instrumentation.requires.admin</code> is set to TRUE
* it will check that if the current user is in the admin ACLS. If the user is
* in the admin ACLs it returns TRUE, otherwise it returns FALSE.
* </p>
*
* @param servletContext the servlet context.
* @param request the servlet request.
* @param response the servlet response.
* @return TRUE/FALSE based on the logic decribed above.
*/
public static boolean isInstrumentationAccessAllowed(
ServletContext servletContext, HttpServletRequest request,
HttpServletResponse response) throws IOException {
Configuration conf =
(Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
boolean access = true;
boolean adminAccess = conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
false);
if (adminAccess) {
access = hasAdministratorAccess(servletContext, request, response);
}
return access;
}
/**
* Does the user sending the HttpServletRequest has the administrator ACLs? If
* it isn't the case, response will be modified to send an error to the user.
*
* @param servletContext the {@link ServletContext} to use
* @param request the {@link HttpServletRequest} to check
* @param response used to send the error response if user does not have admin access.
* @return true if admin-authorized, false otherwise
* @throws IOException if an unauthenticated or unauthorized user tries to access the page
*/
public static boolean hasAdministratorAccess(
ServletContext servletContext, HttpServletRequest request,
HttpServletResponse response) throws IOException {
Configuration conf =
(Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
AccessControlList acl = (AccessControlList) servletContext.getAttribute(ADMINS_ACL);
return hasAdministratorAccess(conf, acl, request, response);
}
public static boolean hasAdministratorAccess(Configuration conf, AccessControlList acl,
HttpServletRequest request, HttpServletResponse response) throws IOException {
// If there is no authorization, anybody has administrator access.
if (!conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
return true;
}
String remoteUser = request.getRemoteUser();
if (remoteUser == null) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
"Unauthenticated users are not " +
"authorized to access this page.");
return false;
}
if (acl != null && !userHasAdministratorAccess(acl, remoteUser)) {
response.sendError(HttpServletResponse.SC_FORBIDDEN, "User "
+ remoteUser + " is unauthorized to access this page.");
return false;
}
return true;
}
/**
* Get the admin ACLs from the given ServletContext and check if the given
* user is in the ACL.
*
* @param servletContext the context containing the admin ACL.
* @param remoteUser the remote user to check for.
* @return true if the user is present in the ACL, false if no ACL is set or
* the user is not present
*/
public static boolean userHasAdministratorAccess(ServletContext servletContext,
String remoteUser) {
AccessControlList adminsAcl = (AccessControlList) servletContext
.getAttribute(ADMINS_ACL);
return userHasAdministratorAccess(adminsAcl, remoteUser);
}
public static boolean userHasAdministratorAccess(AccessControlList acl, String remoteUser) {
UserGroupInformation remoteUserUGI =
UserGroupInformation.createRemoteUser(remoteUser);
return acl != null && acl.isUserAllowed(remoteUserUGI);
}
/**
* A very simple servlet to serve up a text representation of the current
* stack traces. It both returns the stacks to the caller and logs them.
* Currently the stack traces are done sequentially rather than exactly the
* same data.
*/
public static class StackServlet extends HttpServlet {
private static final long serialVersionUID = -6284183679759467039L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
request, response)) {
return;
}
response.setContentType("text/plain; charset=UTF-8");
try (PrintStream out = new PrintStream(
response.getOutputStream(), false, "UTF-8")) {
Threads.printThreadInfo(out, "");
out.flush();
}
ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
}
}
/**
* A Servlet input filter that quotes all HTML active characters in the
* parameter names and values. The goal is to quote the characters to make
* all of the servlets resistant to cross-site scripting attacks.
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public static class QuotingInputFilter implements Filter {
private FilterConfig config;
public static class RequestQuoter extends HttpServletRequestWrapper {
private final HttpServletRequest rawRequest;
public RequestQuoter(HttpServletRequest rawRequest) {
super(rawRequest);
this.rawRequest = rawRequest;
}
/**
* Return the set of parameter names, quoting each name.
*/
@Override
public Enumeration<String> getParameterNames() {
return new Enumeration<String>() {
private Enumeration<String> rawIterator =
rawRequest.getParameterNames();
@Override
public boolean hasMoreElements() {
return rawIterator.hasMoreElements();
}
@Override
public String nextElement() {
return HtmlQuoting.quoteHtmlChars(rawIterator.nextElement());
}
};
}
/**
* Unquote the name and quote the value.
*/
@Override
public String getParameter(String name) {
return HtmlQuoting.quoteHtmlChars(rawRequest.getParameter(
HtmlQuoting.unquoteHtmlChars(name)));
}
@Override
public String[] getParameterValues(String name) {
String unquoteName = HtmlQuoting.unquoteHtmlChars(name);
String[] unquoteValue = rawRequest.getParameterValues(unquoteName);
if (unquoteValue == null) {
return null;
}
String[] result = new String[unquoteValue.length];
for(int i=0; i < result.length; ++i) {
result[i] = HtmlQuoting.quoteHtmlChars(unquoteValue[i]);
}
return result;
}
@Override
public Map<String, String[]> getParameterMap() {
Map<String, String[]> result = new HashMap<>();
Map<String, String[]> raw = rawRequest.getParameterMap();
for (Map.Entry<String,String[]> item: raw.entrySet()) {
String[] rawValue = item.getValue();
String[] cookedValue = new String[rawValue.length];
for(int i=0; i< rawValue.length; ++i) {
cookedValue[i] = HtmlQuoting.quoteHtmlChars(rawValue[i]);
}
result.put(HtmlQuoting.quoteHtmlChars(item.getKey()), cookedValue);
}
return result;
}
/**
* Quote the url so that users specifying the HOST HTTP header
* can't inject attacks.
*/
@Override
public StringBuffer getRequestURL(){
String url = rawRequest.getRequestURL().toString();
return new StringBuffer(HtmlQuoting.quoteHtmlChars(url));
}
/**
* Quote the server name so that users specifying the HOST HTTP header
* can't inject attacks.
*/
@Override
public String getServerName() {
return HtmlQuoting.quoteHtmlChars(rawRequest.getServerName());
}
}
@Override
public void init(FilterConfig config) throws ServletException {
this.config = config;
}
@Override
public void destroy() {
}
@Override
public void doFilter(ServletRequest request,
ServletResponse response,
FilterChain chain
) throws IOException, ServletException {
HttpServletRequestWrapper quoted =
new RequestQuoter((HttpServletRequest) request);
HttpServletResponse httpResponse = (HttpServletResponse) response;
String mime = inferMimeType(request);
if (mime == null) {
httpResponse.setContentType("text/plain; charset=utf-8");
} else if (mime.startsWith("text/html")) {
// HTML with unspecified encoding, we want to
// force HTML with utf-8 encoding
// This is to avoid the following security issue:
// http://openmya.hacker.jp/hasegawa/security/utf7cs.html
httpResponse.setContentType("text/html; charset=utf-8");
} else if (mime.startsWith("application/xml")) {
httpResponse.setContentType("text/xml; charset=utf-8");
}
chain.doFilter(quoted, httpResponse);
}
/**
* Infer the mime type for the response based on the extension of the request
* URI. Returns null if unknown.
*/
private String inferMimeType(ServletRequest request) {
String path = ((HttpServletRequest)request).getRequestURI();
ServletContext context = config.getServletContext();
return context.getMimeType(path);
}
}
}
| hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.PrintStream;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.http.conf.ConfServlet;
import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
import org.apache.hadoop.hbase.http.log.LogLevel;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.util.Shell;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.org.eclipse.jetty.http.HttpVersion;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Handler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.HttpConfiguration;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.HttpConnectionFactory;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.RequestLog;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.SecureRequestCustomizer;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Server;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.ServerConnector;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.SslConnectionFactory;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.HandlerCollection;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.RequestLogHandler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.DefaultServlet;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.FilterHolder;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.FilterMapping;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.ServletContextHandler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.ServletHolder;
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.MultiException;
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.ssl.SslContextFactory;
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.apache.hbase.thirdparty.org.eclipse.jetty.webapp.WebAppContext;
import org.apache.hbase.thirdparty.org.glassfish.jersey.server.ResourceConfig;
import org.apache.hbase.thirdparty.org.glassfish.jersey.servlet.ServletContainer;
/**
* Create a Jetty embedded server to answer http requests. The primary goal
* is to serve up status information for the server.
* There are three contexts:
* "/logs/" -> points to the log directory
* "/static/" -> points to common static files (src/webapps/static)
* "/" -> the jsp server code from (src/webapps/<name>)
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class HttpServer implements FilterContainer {
private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class);
private static final String EMPTY_STRING = "";
private static final int DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
static final String FILTER_INITIALIZERS_PROPERTY
= "hbase.http.filter.initializers";
static final String HTTP_MAX_THREADS = "hbase.http.max.threads";
public static final String HTTP_UI_AUTHENTICATION = "hbase.security.authentication.ui";
static final String HTTP_AUTHENTICATION_PREFIX = "hbase.security.authentication.";
static final String HTTP_SPNEGO_AUTHENTICATION_PREFIX = HTTP_AUTHENTICATION_PREFIX
+ "spnego.";
static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX = "kerberos.principal";
public static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX;
static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX = "kerberos.keytab";
public static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX;
static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX = "kerberos.name.rules";
public static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX;
static final String HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_SUFFIX = "kerberos.proxyuser.enable";
public static final String HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_SUFFIX;
public static final boolean HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_DEFAULT = false;
static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX =
"signature.secret.file";
public static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY =
HTTP_AUTHENTICATION_PREFIX + HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX;
public static final String HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.users";
public static final String HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY =
HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.groups";
public static final String HTTP_PRIVILEGED_CONF_KEY =
"hbase.security.authentication.ui.config.protected";
public static final boolean HTTP_PRIVILEGED_CONF_DEFAULT = false;
// The ServletContext attribute where the daemon Configuration
// gets stored.
public static final String CONF_CONTEXT_ATTRIBUTE = "hbase.conf";
public static final String ADMINS_ACL = "admins.acl";
public static final String BIND_ADDRESS = "bind.address";
public static final String SPNEGO_FILTER = "SpnegoFilter";
public static final String SPNEGO_PROXYUSER_FILTER = "SpnegoProxyUserFilter";
public static final String NO_CACHE_FILTER = "NoCacheFilter";
public static final String APP_DIR = "webapps";
private final AccessControlList adminsAcl;
protected final Server webServer;
protected String appDir;
protected String logDir;
private static final class ListenerInfo {
/**
* Boolean flag to determine whether the HTTP server should clean up the
* listener in stop().
*/
private final boolean isManaged;
private final ServerConnector listener;
private ListenerInfo(boolean isManaged, ServerConnector listener) {
this.isManaged = isManaged;
this.listener = listener;
}
}
private final List<ListenerInfo> listeners = Lists.newArrayList();
@VisibleForTesting
public List<ServerConnector> getServerConnectors() {
return listeners.stream().map(info -> info.listener).collect(Collectors.toList());
}
protected final WebAppContext webAppContext;
protected final boolean findPort;
protected final Map<ServletContextHandler, Boolean> defaultContexts = new HashMap<>();
protected final List<String> filterNames = new ArrayList<>();
protected final boolean authenticationEnabled;
static final String STATE_DESCRIPTION_ALIVE = " - alive";
static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
/**
* Class to construct instances of HTTP server with specific options.
*/
public static class Builder {
private ArrayList<URI> endpoints = Lists.newArrayList();
private Configuration conf;
private String[] pathSpecs;
private AccessControlList adminsAcl;
private boolean securityEnabled = false;
private String usernameConfKey;
private String keytabConfKey;
private boolean needsClientAuth;
private String hostName;
private String appDir = APP_DIR;
private String logDir;
private boolean findPort;
private String trustStore;
private String trustStorePassword;
private String trustStoreType;
private String keyStore;
private String keyStorePassword;
private String keyStoreType;
// The -keypass option in keytool
private String keyPassword;
private String kerberosNameRulesKey;
private String signatureSecretFileKey;
/**
* @see #setAppDir(String)
* @deprecated Since 0.99.0. Use builder pattern via {@link #setAppDir(String)} instead.
*/
@Deprecated
private String name;
/**
* @see #addEndpoint(URI)
* @deprecated Since 0.99.0. Use builder pattern via {@link #addEndpoint(URI)} instead.
*/
@Deprecated
private String bindAddress;
/**
* @see #addEndpoint(URI)
* @deprecated Since 0.99.0. Use builder pattern via {@link #addEndpoint(URI)} instead.
*/
@Deprecated
private int port = -1;
/**
* Add an endpoint that the HTTP server should listen to.
*
* @param endpoint
* the endpoint of that the HTTP server should listen to. The
* scheme specifies the protocol (i.e. HTTP / HTTPS), the host
* specifies the binding address, and the port specifies the
* listening port. Unspecified or zero port means that the server
* can listen to any port.
*/
public Builder addEndpoint(URI endpoint) {
endpoints.add(endpoint);
return this;
}
/**
* Set the hostname of the http server. The host name is used to resolve the
* _HOST field in Kerberos principals. The hostname of the first listener
* will be used if the name is unspecified.
*/
public Builder hostName(String hostName) {
this.hostName = hostName;
return this;
}
public Builder trustStore(String location, String password, String type) {
this.trustStore = location;
this.trustStorePassword = password;
this.trustStoreType = type;
return this;
}
public Builder keyStore(String location, String password, String type) {
this.keyStore = location;
this.keyStorePassword = password;
this.keyStoreType = type;
return this;
}
public Builder keyPassword(String password) {
this.keyPassword = password;
return this;
}
/**
* Specify whether the server should authorize the client in SSL
* connections.
*/
public Builder needsClientAuth(boolean value) {
this.needsClientAuth = value;
return this;
}
/**
* @see #setAppDir(String)
* @deprecated Since 0.99.0. Use {@link #setAppDir(String)} instead.
*/
@Deprecated
public Builder setName(String name){
this.name = name;
return this;
}
/**
* @see #addEndpoint(URI)
* @deprecated Since 0.99.0. Use {@link #addEndpoint(URI)} instead.
*/
@Deprecated
public Builder setBindAddress(String bindAddress){
this.bindAddress = bindAddress;
return this;
}
/**
* @see #addEndpoint(URI)
* @deprecated Since 0.99.0. Use {@link #addEndpoint(URI)} instead.
*/
@Deprecated
public Builder setPort(int port) {
this.port = port;
return this;
}
public Builder setFindPort(boolean findPort) {
this.findPort = findPort;
return this;
}
public Builder setConf(Configuration conf) {
this.conf = conf;
return this;
}
public Builder setPathSpec(String[] pathSpec) {
this.pathSpecs = pathSpec;
return this;
}
public Builder setACL(AccessControlList acl) {
this.adminsAcl = acl;
return this;
}
public Builder setSecurityEnabled(boolean securityEnabled) {
this.securityEnabled = securityEnabled;
return this;
}
public Builder setUsernameConfKey(String usernameConfKey) {
this.usernameConfKey = usernameConfKey;
return this;
}
public Builder setKeytabConfKey(String keytabConfKey) {
this.keytabConfKey = keytabConfKey;
return this;
}
public Builder setKerberosNameRulesKey(String kerberosNameRulesKey) {
this.kerberosNameRulesKey = kerberosNameRulesKey;
return this;
}
public Builder setSignatureSecretFileKey(String signatureSecretFileKey) {
this.signatureSecretFileKey = signatureSecretFileKey;
return this;
}
public Builder setAppDir(String appDir) {
this.appDir = appDir;
return this;
}
public Builder setLogDir(String logDir) {
this.logDir = logDir;
return this;
}
public HttpServer build() throws IOException {
// Do we still need to assert this non null name if it is deprecated?
if (this.name == null) {
throw new HadoopIllegalArgumentException("name is not set");
}
// Make the behavior compatible with deprecated interfaces
if (bindAddress != null && port != -1) {
try {
endpoints.add(0, new URI("http", "", bindAddress, port, "", "", ""));
} catch (URISyntaxException e) {
throw new HadoopIllegalArgumentException("Invalid endpoint: "+ e);
}
}
if (endpoints.isEmpty()) {
throw new HadoopIllegalArgumentException("No endpoints specified");
}
if (hostName == null) {
hostName = endpoints.get(0).getHost();
}
if (this.conf == null) {
conf = new Configuration();
}
HttpServer server = new HttpServer(this);
for (URI ep : endpoints) {
ServerConnector listener = null;
String scheme = ep.getScheme();
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSecureScheme("https");
httpConfig.setHeaderCacheSize(DEFAULT_MAX_HEADER_SIZE);
httpConfig.setResponseHeaderSize(DEFAULT_MAX_HEADER_SIZE);
httpConfig.setRequestHeaderSize(DEFAULT_MAX_HEADER_SIZE);
if ("http".equals(scheme)) {
listener = new ServerConnector(server.webServer, new HttpConnectionFactory(httpConfig));
} else if ("https".equals(scheme)) {
HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig);
httpsConfig.addCustomizer(new SecureRequestCustomizer());
SslContextFactory sslCtxFactory = new SslContextFactory();
sslCtxFactory.setNeedClientAuth(needsClientAuth);
sslCtxFactory.setKeyManagerPassword(keyPassword);
if (keyStore != null) {
sslCtxFactory.setKeyStorePath(keyStore);
sslCtxFactory.setKeyStoreType(keyStoreType);
sslCtxFactory.setKeyStorePassword(keyStorePassword);
}
if (trustStore != null) {
sslCtxFactory.setTrustStorePath(trustStore);
sslCtxFactory.setTrustStoreType(trustStoreType);
sslCtxFactory.setTrustStorePassword(trustStorePassword);
}
listener = new ServerConnector(server.webServer, new SslConnectionFactory(sslCtxFactory,
HttpVersion.HTTP_1_1.toString()), new HttpConnectionFactory(httpsConfig));
} else {
throw new HadoopIllegalArgumentException(
"unknown scheme for endpoint:" + ep);
}
// default settings for connector
listener.setAcceptQueueSize(128);
if (Shell.WINDOWS) {
// result of setting the SO_REUSEADDR flag is different on Windows
// http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
// without this 2 NN's can start on the same machine and listen on
// the same port with indeterminate routing of incoming requests to them
listener.setReuseAddress(false);
}
listener.setHost(ep.getHost());
listener.setPort(ep.getPort() == -1 ? 0 : ep.getPort());
server.addManagedListener(listener);
}
server.loadListeners();
return server;
}
}
/**
* @see #HttpServer(String, String, int, boolean, Configuration)
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port, boolean findPort)
throws IOException {
this(name, bindAddress, port, findPort, new Configuration());
}
/**
* Create a status server on the given port. Allows you to specify the
* path specifications that this server will be serving so that they will be
* added to the filters properly.
*
* @param name The name of the server
* @param bindAddress The address for this server
* @param port The port to use on the server
* @param findPort whether the server should start at the given port and
* increment by 1 until it finds a free port.
* @param conf Configuration
* @param pathSpecs Path specifications that this httpserver will be serving.
* These will be added to any filters.
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
this(name, bindAddress, port, findPort, conf, null, pathSpecs);
}
/**
* Create a status server on the given port.
* The jsp scripts are taken from src/webapps/<name>.
* @param name The name of the server
* @param port The port to use on the server
* @param findPort whether the server should start at the given port and
* increment by 1 until it finds a free port.
* @param conf Configuration
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf) throws IOException {
this(name, bindAddress, port, findPort, conf, null, null);
}
/**
* Creates a status server on the given port. The JSP scripts are taken
* from src/webapp<name>.
*
* @param name the name of the server
* @param bindAddress the address for this server
* @param port the port to use on the server
* @param findPort whether the server should start at the given port and increment by 1 until it
* finds a free port
* @param conf the configuration to use
* @param adminsAcl {@link AccessControlList} of the admins
* @throws IOException when creating the server fails
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, AccessControlList adminsAcl)
throws IOException {
this(name, bindAddress, port, findPort, conf, adminsAcl, null);
}
/**
* Create a status server on the given port.
* The jsp scripts are taken from src/webapps/<name>.
* @param name The name of the server
* @param bindAddress The address for this server
* @param port The port to use on the server
* @param findPort whether the server should start at the given port and
* increment by 1 until it finds a free port.
* @param conf Configuration
* @param adminsAcl {@link AccessControlList} of the admins
* @param pathSpecs Path specifications that this httpserver will be serving.
* These will be added to any filters.
* @deprecated Since 0.99.0
*/
@Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, AccessControlList adminsAcl,
String[] pathSpecs) throws IOException {
this(new Builder().setName(name)
.addEndpoint(URI.create("http://" + bindAddress + ":" + port))
.setFindPort(findPort).setConf(conf).setACL(adminsAcl)
.setPathSpec(pathSpecs));
}
private HttpServer(final Builder b) throws IOException {
this.appDir = b.appDir;
this.logDir = b.logDir;
final String appDir = getWebAppsPath(b.name);
int maxThreads = b.conf.getInt(HTTP_MAX_THREADS, 16);
// If HTTP_MAX_THREADS is less than or equal to 0, QueueThreadPool() will use the
// default value (currently 200).
QueuedThreadPool threadPool = maxThreads <= 0 ? new QueuedThreadPool()
: new QueuedThreadPool(maxThreads);
threadPool.setDaemon(true);
this.webServer = new Server(threadPool);
this.adminsAcl = b.adminsAcl;
this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir);
this.findPort = b.findPort;
this.authenticationEnabled = b.securityEnabled;
initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs, b);
}
private void initializeWebServer(String name, String hostName,
Configuration conf, String[] pathSpecs, HttpServer.Builder b)
throws FileNotFoundException, IOException {
Preconditions.checkNotNull(webAppContext);
HandlerCollection handlerCollection = new HandlerCollection();
ContextHandlerCollection contexts = new ContextHandlerCollection();
RequestLog requestLog = HttpRequestLog.getRequestLog(name);
if (requestLog != null) {
RequestLogHandler requestLogHandler = new RequestLogHandler();
requestLogHandler.setRequestLog(requestLog);
handlerCollection.addHandler(requestLogHandler);
}
final String appDir = getWebAppsPath(name);
handlerCollection.addHandler(contexts);
handlerCollection.addHandler(webAppContext);
webServer.setHandler(handlerCollection);
webAppContext.setAttribute(ADMINS_ACL, adminsAcl);
// Default apps need to be set first, so that all filters are applied to them.
// Because they're added to defaultContexts, we need them there before we start
// adding filters
addDefaultApps(contexts, appDir, conf);
addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
addGlobalFilter("clickjackingprevention",
ClickjackingPreventionFilter.class.getName(),
ClickjackingPreventionFilter.getDefaultParameters(conf));
addGlobalFilter("securityheaders",
SecurityHeadersFilter.class.getName(),
SecurityHeadersFilter.getDefaultParameters(conf));
// But security needs to be enabled prior to adding the other servlets
if (authenticationEnabled) {
initSpnego(conf, hostName, b.usernameConfKey, b.keytabConfKey, b.kerberosNameRulesKey,
b.signatureSecretFileKey);
}
final FilterInitializer[] initializers = getFilterInitializers(conf);
if (initializers != null) {
conf = new Configuration(conf);
conf.set(BIND_ADDRESS, hostName);
for (FilterInitializer c : initializers) {
c.initFilter(this, conf);
}
}
addDefaultServlets(contexts, conf);
if (pathSpecs != null) {
for (String path : pathSpecs) {
LOG.info("adding path spec: " + path);
addFilterPathMapping(path, webAppContext);
}
}
}
private void addManagedListener(ServerConnector connector) {
listeners.add(new ListenerInfo(true, connector));
}
private static WebAppContext createWebAppContext(String name,
Configuration conf, AccessControlList adminsAcl, final String appDir) {
WebAppContext ctx = new WebAppContext();
ctx.setDisplayName(name);
ctx.setContextPath("/");
ctx.setWar(appDir + "/" + name);
ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
// for org.apache.hadoop.metrics.MetricsServlet
ctx.getServletContext().setAttribute(
org.apache.hadoop.http.HttpServer2.CONF_CONTEXT_ATTRIBUTE, conf);
ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
addNoCacheFilter(ctx);
return ctx;
}
private static void addNoCacheFilter(WebAppContext ctxt) {
defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(),
Collections.<String, String> emptyMap(), new String[] { "/*" });
}
/** Get an array of FilterConfiguration specified in the conf */
private static FilterInitializer[] getFilterInitializers(Configuration conf) {
if (conf == null) {
return null;
}
Class<?>[] classes = conf.getClasses(FILTER_INITIALIZERS_PROPERTY);
if (classes == null) {
return null;
}
FilterInitializer[] initializers = new FilterInitializer[classes.length];
for(int i = 0; i < classes.length; i++) {
initializers[i] = (FilterInitializer)ReflectionUtils.newInstance(classes[i]);
}
return initializers;
}
/**
* Add default apps.
* @param appDir The application directory
*/
protected void addDefaultApps(ContextHandlerCollection parent,
final String appDir, Configuration conf) {
// set up the context for "/logs/" if "hadoop.log.dir" property is defined.
String logDir = this.logDir;
if (logDir == null) {
logDir = System.getProperty("hadoop.log.dir");
}
if (logDir != null) {
ServletContextHandler logContext = new ServletContextHandler(parent, "/logs");
logContext.addServlet(AdminAuthorizedServlet.class, "/*");
logContext.setResourceBase(logDir);
if (conf.getBoolean(
ServerConfigurationKeys.HBASE_JETTY_LOGS_SERVE_ALIASES,
ServerConfigurationKeys.DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES)) {
Map<String, String> params = logContext.getInitParams();
params.put(
"org.mortbay.jetty.servlet.Default.aliases", "true");
}
logContext.setDisplayName("logs");
setContextAttributes(logContext, conf);
defaultContexts.put(logContext, true);
}
// set up the context for "/static/*"
ServletContextHandler staticContext = new ServletContextHandler(parent, "/static");
staticContext.setResourceBase(appDir + "/static");
staticContext.addServlet(DefaultServlet.class, "/*");
staticContext.setDisplayName("static");
setContextAttributes(staticContext, conf);
defaultContexts.put(staticContext, true);
}
private void setContextAttributes(ServletContextHandler context, Configuration conf) {
context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
}
/**
* Add default servlets.
*/
protected void addDefaultServlets(
ContextHandlerCollection contexts, Configuration conf) throws IOException {
// set up default servlets
addPrivilegedServlet("stacks", "/stacks", StackServlet.class);
addPrivilegedServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
// Hadoop3 has moved completely to metrics2, and dropped support for Metrics v1's
// MetricsServlet (see HADOOP-12504). We'll using reflection to load if against hadoop2.
// Remove when we drop support for hbase on hadoop2.x.
try {
Class<?> clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet");
addPrivilegedServlet("metrics", "/metrics", clz.asSubclass(HttpServlet.class));
} catch (Exception e) {
// do nothing
}
addPrivilegedServlet("jmx", "/jmx", JMXJsonServlet.class);
// While we don't expect users to have sensitive information in their configuration, they
// might. Give them an option to not expose the service configuration to all users.
if (conf.getBoolean(HTTP_PRIVILEGED_CONF_KEY, HTTP_PRIVILEGED_CONF_DEFAULT)) {
addPrivilegedServlet("conf", "/conf", ConfServlet.class);
} else {
addUnprivilegedServlet("conf", "/conf", ConfServlet.class);
}
final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome();
if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) {
addPrivilegedServlet("prof", "/prof", ProfileServlet.class);
Path tmpDir = Paths.get(ProfileServlet.OUTPUT_DIR);
if (Files.notExists(tmpDir)) {
Files.createDirectories(tmpDir);
}
ServletContextHandler genCtx = new ServletContextHandler(contexts, "/prof-output");
genCtx.addServlet(ProfileOutputServlet.class, "/*");
genCtx.setResourceBase(tmpDir.toAbsolutePath().toString());
genCtx.setDisplayName("prof-output");
} else {
addUnprivilegedServlet("prof", "/prof", ProfileServlet.DisabledServlet.class);
LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property " +
"not specified. Disabling /prof endpoint.");
}
}
/**
* Set a value in the webapp context. These values are available to the jsp
* pages as "application.getAttribute(name)".
* @param name The name of the attribute
* @param value The value of the attribute
*/
public void setAttribute(String name, Object value) {
webAppContext.setAttribute(name, value);
}
/**
* Add a Jersey resource package.
* @param packageName The Java package name containing the Jersey resource.
* @param pathSpec The path spec for the servlet
*/
public void addJerseyResourcePackage(final String packageName,
final String pathSpec) {
LOG.info("addJerseyResourcePackage: packageName=" + packageName
+ ", pathSpec=" + pathSpec);
ResourceConfig application = new ResourceConfig().packages(packageName);
final ServletHolder sh = new ServletHolder(new ServletContainer(application));
webAppContext.addServlet(sh, pathSpec);
}
/**
* Adds a servlet in the server that any user can access. This method differs from
* {@link #addPrivilegedServlet(String, String, Class)} in that any authenticated user
* can interact with the servlet added by this method.
* @param name The name of the servlet (can be passed as null)
* @param pathSpec The path spec for the servlet
* @param clazz The servlet class
*/
public void addUnprivilegedServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz) {
addServletWithAuth(name, pathSpec, clazz, false);
}
/**
* Adds a servlet in the server that only administrators can access. This method differs from
* {@link #addUnprivilegedServlet(String, String, Class)} in that only those authenticated user
* who are identified as administrators can interact with the servlet added by this method.
*/
public void addPrivilegedServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz) {
addServletWithAuth(name, pathSpec, clazz, true);
}
/**
* Internal method to add a servlet to the HTTP server. Developers should not call this method
* directly, but invoke it via {@link #addUnprivilegedServlet(String, String, Class)} or
* {@link #addPrivilegedServlet(String, String, Class)}.
*/
void addServletWithAuth(String name, String pathSpec,
Class<? extends HttpServlet> clazz, boolean requireAuthz) {
addInternalServlet(name, pathSpec, clazz, requireAuthz);
addFilterPathMapping(pathSpec, webAppContext);
}
/**
* Add an internal servlet in the server, specifying whether or not to
* protect with Kerberos authentication.
* Note: This method is to be used for adding servlets that facilitate
* internal communication and not for user facing functionality. For
* servlets added using this method, filters (except internal Kerberos
* filters) are not enabled.
*
* @param name The name of the servlet (can be passed as null)
* @param pathSpec The path spec for the servlet
* @param clazz The servlet class
* @param requireAuth Require Kerberos authenticate to access servlet
*/
void addInternalServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz, boolean requireAuthz) {
ServletHolder holder = new ServletHolder(clazz);
if (name != null) {
holder.setName(name);
}
if (authenticationEnabled && requireAuthz) {
FilterHolder filter = new FilterHolder(AdminAuthorizedFilter.class);
filter.setName(AdminAuthorizedFilter.class.getSimpleName());
FilterMapping fmap = new FilterMapping();
fmap.setPathSpec(pathSpec);
fmap.setDispatches(FilterMapping.ALL);
fmap.setFilterName(AdminAuthorizedFilter.class.getSimpleName());
webAppContext.getServletHandler().addFilter(filter, fmap);
}
webAppContext.getSessionHandler().getSessionCookieConfig().setHttpOnly(true);
webAppContext.getSessionHandler().getSessionCookieConfig().setSecure(true);
webAppContext.addServlet(holder, pathSpec);
}
@Override
public void addFilter(String name, String classname, Map<String, String> parameters) {
final String[] USER_FACING_URLS = { "*.html", "*.jsp" };
defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS);
LOG.info("Added filter " + name + " (class=" + classname
+ ") to context " + webAppContext.getDisplayName());
final String[] ALL_URLS = { "/*" };
for (Map.Entry<ServletContextHandler, Boolean> e : defaultContexts.entrySet()) {
if (e.getValue()) {
ServletContextHandler handler = e.getKey();
defineFilter(handler, name, classname, parameters, ALL_URLS);
LOG.info("Added filter " + name + " (class=" + classname
+ ") to context " + handler.getDisplayName());
}
}
filterNames.add(name);
}
@Override
public void addGlobalFilter(String name, String classname, Map<String, String> parameters) {
final String[] ALL_URLS = { "/*" };
defineFilter(webAppContext, name, classname, parameters, ALL_URLS);
for (ServletContextHandler ctx : defaultContexts.keySet()) {
defineFilter(ctx, name, classname, parameters, ALL_URLS);
}
LOG.info("Added global filter '" + name + "' (class=" + classname + ")");
}
/**
* Define a filter for a context and set up default url mappings.
*/
public static void defineFilter(ServletContextHandler handler, String name,
String classname, Map<String,String> parameters, String[] urls) {
FilterHolder holder = new FilterHolder();
holder.setName(name);
holder.setClassName(classname);
if (parameters != null) {
holder.setInitParameters(parameters);
}
FilterMapping fmap = new FilterMapping();
fmap.setPathSpecs(urls);
fmap.setDispatches(FilterMapping.ALL);
fmap.setFilterName(name);
handler.getServletHandler().addFilter(holder, fmap);
}
/**
* Add the path spec to the filter path mapping.
* @param pathSpec The path spec
* @param webAppCtx The WebApplicationContext to add to
*/
protected void addFilterPathMapping(String pathSpec,
WebAppContext webAppCtx) {
for(String name : filterNames) {
FilterMapping fmap = new FilterMapping();
fmap.setPathSpec(pathSpec);
fmap.setFilterName(name);
fmap.setDispatches(FilterMapping.ALL);
webAppCtx.getServletHandler().addFilterMapping(fmap);
}
}
/**
* Get the value in the webapp context.
* @param name The name of the attribute
* @return The value of the attribute
*/
public Object getAttribute(String name) {
return webAppContext.getAttribute(name);
}
public WebAppContext getWebAppContext(){
return this.webAppContext;
}
public String getWebAppsPath(String appName) throws FileNotFoundException {
return getWebAppsPath(this.appDir, appName);
}
/**
* Get the pathname to the webapps files.
* @param appName eg "secondary" or "datanode"
* @return the pathname as a URL
* @throws FileNotFoundException if 'webapps' directory cannot be found on CLASSPATH.
*/
protected String getWebAppsPath(String webapps, String appName) throws FileNotFoundException {
URL url = getClass().getClassLoader().getResource(webapps + "/" + appName);
if (url == null) {
throw new FileNotFoundException(webapps + "/" + appName
+ " not found in CLASSPATH");
}
String urlString = url.toString();
return urlString.substring(0, urlString.lastIndexOf('/'));
}
/**
* Get the port that the server is on
* @return the port
* @deprecated Since 0.99.0
*/
@Deprecated
public int getPort() {
return ((ServerConnector)webServer.getConnectors()[0]).getLocalPort();
}
/**
* Get the address that corresponds to a particular connector.
*
* @return the corresponding address for the connector, or null if there's no
* such connector or the connector is not bounded.
*/
public InetSocketAddress getConnectorAddress(int index) {
Preconditions.checkArgument(index >= 0);
if (index > webServer.getConnectors().length) {
return null;
}
ServerConnector c = (ServerConnector)webServer.getConnectors()[index];
if (c.getLocalPort() == -1 || c.getLocalPort() == -2) {
// -1 if the connector has not been opened
// -2 if it has been closed
return null;
}
return new InetSocketAddress(c.getHost(), c.getLocalPort());
}
/**
* Set the min, max number of worker threads (simultaneous connections).
*/
public void setThreads(int min, int max) {
QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool();
pool.setMinThreads(min);
pool.setMaxThreads(max);
}
private void initSpnego(Configuration conf, String hostName,
String usernameConfKey, String keytabConfKey, String kerberosNameRuleKey,
String signatureSecretKeyFileKey) throws IOException {
Map<String, String> params = new HashMap<>();
String principalInConf = getOrEmptyString(conf, usernameConfKey);
if (!principalInConf.isEmpty()) {
params.put(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX, SecurityUtil.getServerPrincipal(
principalInConf, hostName));
}
String httpKeytab = getOrEmptyString(conf, keytabConfKey);
if (!httpKeytab.isEmpty()) {
params.put(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX, httpKeytab);
}
String kerberosNameRule = getOrEmptyString(conf, kerberosNameRuleKey);
if (!kerberosNameRule.isEmpty()) {
params.put(HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX, kerberosNameRule);
}
String signatureSecretKeyFile = getOrEmptyString(conf, signatureSecretKeyFileKey);
if (!signatureSecretKeyFile.isEmpty()) {
params.put(HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX,
signatureSecretKeyFile);
}
params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
// Verify that the required options were provided
if (isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX)) ||
isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX))) {
throw new IllegalArgumentException(usernameConfKey + " and "
+ keytabConfKey + " are both required in the configuration "
+ "to enable SPNEGO/Kerberos authentication for the Web UI");
}
if (conf.getBoolean(HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_KEY,
HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_DEFAULT)) {
//Copy/rename standard hadoop proxyuser settings to filter
for(Map.Entry<String, String> proxyEntry :
conf.getPropsWithPrefix(ProxyUsers.CONF_HADOOP_PROXYUSER).entrySet()) {
params.put(ProxyUserAuthenticationFilter.PROXYUSER_PREFIX + proxyEntry.getKey(),
proxyEntry.getValue());
}
addGlobalFilter(SPNEGO_PROXYUSER_FILTER, ProxyUserAuthenticationFilter.class.getName(), params);
} else {
addGlobalFilter(SPNEGO_FILTER, AuthenticationFilter.class.getName(), params);
}
}
/**
* Returns true if the argument is non-null and not whitespace
*/
private boolean isMissing(String value) {
if (null == value) {
return true;
}
return value.trim().isEmpty();
}
/**
* Extracts the value for the given key from the configuration of returns a string of
* zero length.
*/
private String getOrEmptyString(Configuration conf, String key) {
if (null == key) {
return EMPTY_STRING;
}
final String value = conf.get(key.trim());
return null == value ? EMPTY_STRING : value;
}
/**
* Start the server. Does not wait for the server to start.
*/
public void start() throws IOException {
try {
try {
openListeners();
webServer.start();
} catch (IOException ex) {
LOG.info("HttpServer.start() threw a non Bind IOException", ex);
throw ex;
} catch (MultiException ex) {
LOG.info("HttpServer.start() threw a MultiException", ex);
throw ex;
}
// Make sure there is no handler failures.
Handler[] handlers = webServer.getHandlers();
for (int i = 0; i < handlers.length; i++) {
if (handlers[i].isFailed()) {
throw new IOException(
"Problem in starting http server. Server handlers failed");
}
}
// Make sure there are no errors initializing the context.
Throwable unavailableException = webAppContext.getUnavailableException();
if (unavailableException != null) {
// Have to stop the webserver, or else its non-daemon threads
// will hang forever.
webServer.stop();
throw new IOException("Unable to initialize WebAppContext",
unavailableException);
}
} catch (IOException e) {
throw e;
} catch (InterruptedException e) {
throw (IOException) new InterruptedIOException(
"Interrupted while starting HTTP server").initCause(e);
} catch (Exception e) {
throw new IOException("Problem starting http server", e);
}
}
private void loadListeners() {
for (ListenerInfo li : listeners) {
webServer.addConnector(li.listener);
}
}
/**
* Open the main listener for the server
* @throws Exception if the listener cannot be opened or the appropriate port is already in use
*/
@VisibleForTesting
void openListeners() throws Exception {
for (ListenerInfo li : listeners) {
ServerConnector listener = li.listener;
if (!li.isManaged || (li.listener.getLocalPort() != -1 && li.listener.getLocalPort() != -2)) {
// This listener is either started externally, or has not been opened, or has been closed
continue;
}
int port = listener.getPort();
while (true) {
// jetty has a bug where you can't reopen a listener that previously
// failed to open w/o issuing a close first, even if the port is changed
try {
listener.close();
listener.open();
LOG.info("Jetty bound to port " + listener.getLocalPort());
break;
} catch (IOException ex) {
if(!(ex instanceof BindException) && !(ex.getCause() instanceof BindException)) {
throw ex;
}
if (port == 0 || !findPort) {
BindException be = new BindException("Port in use: "
+ listener.getHost() + ":" + listener.getPort());
be.initCause(ex);
throw be;
}
}
// try the next port number
listener.setPort(++port);
Thread.sleep(100);
}
}
}
/**
* stop the server
*/
public void stop() throws Exception {
MultiException exception = null;
for (ListenerInfo li : listeners) {
if (!li.isManaged) {
continue;
}
try {
li.listener.close();
} catch (Exception e) {
LOG.error(
"Error while stopping listener for webapp"
+ webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
}
try {
// clear & stop webAppContext attributes to avoid memory leaks.
webAppContext.clearAttributes();
webAppContext.stop();
} catch (Exception e) {
LOG.error("Error while stopping web app context for webapp "
+ webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
try {
webServer.stop();
} catch (Exception e) {
LOG.error("Error while stopping web server for webapp "
+ webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
if (exception != null) {
exception.ifExceptionThrow();
}
}
private MultiException addMultiException(MultiException exception, Exception e) {
if(exception == null){
exception = new MultiException();
}
exception.add(e);
return exception;
}
public void join() throws InterruptedException {
webServer.join();
}
/**
* Test for the availability of the web server
* @return true if the web server is started, false otherwise
*/
public boolean isAlive() {
return webServer != null && webServer.isStarted();
}
/**
* Return the host and port of the HttpServer, if live
* @return the classname and any HTTP URL
*/
@Override
public String toString() {
if (listeners.isEmpty()) {
return "Inactive HttpServer";
} else {
StringBuilder sb = new StringBuilder("HttpServer (")
.append(isAlive() ? STATE_DESCRIPTION_ALIVE :
STATE_DESCRIPTION_NOT_LIVE).append("), listening at:");
for (ListenerInfo li : listeners) {
ServerConnector l = li.listener;
sb.append(l.getHost()).append(":").append(l.getPort()).append("/,");
}
return sb.toString();
}
}
/**
* Checks the user has privileges to access to instrumentation servlets.
* <p>
* If <code>hadoop.security.instrumentation.requires.admin</code> is set to FALSE
* (default value) it always returns TRUE.
* </p><p>
* If <code>hadoop.security.instrumentation.requires.admin</code> is set to TRUE
* it will check that if the current user is in the admin ACLS. If the user is
* in the admin ACLs it returns TRUE, otherwise it returns FALSE.
* </p>
*
* @param servletContext the servlet context.
* @param request the servlet request.
* @param response the servlet response.
* @return TRUE/FALSE based on the logic decribed above.
*/
public static boolean isInstrumentationAccessAllowed(
ServletContext servletContext, HttpServletRequest request,
HttpServletResponse response) throws IOException {
Configuration conf =
(Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
boolean access = true;
boolean adminAccess = conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
false);
if (adminAccess) {
access = hasAdministratorAccess(servletContext, request, response);
}
return access;
}
/**
* Does the user sending the HttpServletRequest has the administrator ACLs? If
* it isn't the case, response will be modified to send an error to the user.
*
* @param servletContext the {@link ServletContext} to use
* @param request the {@link HttpServletRequest} to check
* @param response used to send the error response if user does not have admin access.
* @return true if admin-authorized, false otherwise
* @throws IOException if an unauthenticated or unauthorized user tries to access the page
*/
public static boolean hasAdministratorAccess(
ServletContext servletContext, HttpServletRequest request,
HttpServletResponse response) throws IOException {
Configuration conf =
(Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
AccessControlList acl = (AccessControlList) servletContext.getAttribute(ADMINS_ACL);
return hasAdministratorAccess(conf, acl, request, response);
}
public static boolean hasAdministratorAccess(Configuration conf, AccessControlList acl,
HttpServletRequest request, HttpServletResponse response) throws IOException {
// If there is no authorization, anybody has administrator access.
if (!conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
return true;
}
String remoteUser = request.getRemoteUser();
if (remoteUser == null) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
"Unauthenticated users are not " +
"authorized to access this page.");
return false;
}
if (acl != null && !userHasAdministratorAccess(acl, remoteUser)) {
response.sendError(HttpServletResponse.SC_FORBIDDEN, "User "
+ remoteUser + " is unauthorized to access this page.");
return false;
}
return true;
}
/**
* Get the admin ACLs from the given ServletContext and check if the given
* user is in the ACL.
*
* @param servletContext the context containing the admin ACL.
* @param remoteUser the remote user to check for.
* @return true if the user is present in the ACL, false if no ACL is set or
* the user is not present
*/
public static boolean userHasAdministratorAccess(ServletContext servletContext,
String remoteUser) {
AccessControlList adminsAcl = (AccessControlList) servletContext
.getAttribute(ADMINS_ACL);
return userHasAdministratorAccess(adminsAcl, remoteUser);
}
public static boolean userHasAdministratorAccess(AccessControlList acl, String remoteUser) {
UserGroupInformation remoteUserUGI =
UserGroupInformation.createRemoteUser(remoteUser);
return acl != null && acl.isUserAllowed(remoteUserUGI);
}
/**
* A very simple servlet to serve up a text representation of the current
* stack traces. It both returns the stacks to the caller and logs them.
* Currently the stack traces are done sequentially rather than exactly the
* same data.
*/
public static class StackServlet extends HttpServlet {
private static final long serialVersionUID = -6284183679759467039L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
request, response)) {
return;
}
response.setContentType("text/plain; charset=UTF-8");
try (PrintStream out = new PrintStream(
response.getOutputStream(), false, "UTF-8")) {
Threads.printThreadInfo(out, "");
out.flush();
}
ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
}
}
/**
* A Servlet input filter that quotes all HTML active characters in the
* parameter names and values. The goal is to quote the characters to make
* all of the servlets resistant to cross-site scripting attacks.
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public static class QuotingInputFilter implements Filter {
private FilterConfig config;
public static class RequestQuoter extends HttpServletRequestWrapper {
private final HttpServletRequest rawRequest;
public RequestQuoter(HttpServletRequest rawRequest) {
super(rawRequest);
this.rawRequest = rawRequest;
}
/**
* Return the set of parameter names, quoting each name.
*/
@Override
public Enumeration<String> getParameterNames() {
return new Enumeration<String>() {
private Enumeration<String> rawIterator =
rawRequest.getParameterNames();
@Override
public boolean hasMoreElements() {
return rawIterator.hasMoreElements();
}
@Override
public String nextElement() {
return HtmlQuoting.quoteHtmlChars(rawIterator.nextElement());
}
};
}
/**
* Unquote the name and quote the value.
*/
@Override
public String getParameter(String name) {
return HtmlQuoting.quoteHtmlChars(rawRequest.getParameter(
HtmlQuoting.unquoteHtmlChars(name)));
}
@Override
public String[] getParameterValues(String name) {
String unquoteName = HtmlQuoting.unquoteHtmlChars(name);
String[] unquoteValue = rawRequest.getParameterValues(unquoteName);
if (unquoteValue == null) {
return null;
}
String[] result = new String[unquoteValue.length];
for(int i=0; i < result.length; ++i) {
result[i] = HtmlQuoting.quoteHtmlChars(unquoteValue[i]);
}
return result;
}
@Override
public Map<String, String[]> getParameterMap() {
Map<String, String[]> result = new HashMap<>();
Map<String, String[]> raw = rawRequest.getParameterMap();
for (Map.Entry<String,String[]> item: raw.entrySet()) {
String[] rawValue = item.getValue();
String[] cookedValue = new String[rawValue.length];
for(int i=0; i< rawValue.length; ++i) {
cookedValue[i] = HtmlQuoting.quoteHtmlChars(rawValue[i]);
}
result.put(HtmlQuoting.quoteHtmlChars(item.getKey()), cookedValue);
}
return result;
}
/**
* Quote the url so that users specifying the HOST HTTP header
* can't inject attacks.
*/
@Override
public StringBuffer getRequestURL(){
String url = rawRequest.getRequestURL().toString();
return new StringBuffer(HtmlQuoting.quoteHtmlChars(url));
}
/**
* Quote the server name so that users specifying the HOST HTTP header
* can't inject attacks.
*/
@Override
public String getServerName() {
return HtmlQuoting.quoteHtmlChars(rawRequest.getServerName());
}
}
@Override
public void init(FilterConfig config) throws ServletException {
this.config = config;
}
@Override
public void destroy() {
}
@Override
public void doFilter(ServletRequest request,
ServletResponse response,
FilterChain chain
) throws IOException, ServletException {
HttpServletRequestWrapper quoted =
new RequestQuoter((HttpServletRequest) request);
HttpServletResponse httpResponse = (HttpServletResponse) response;
String mime = inferMimeType(request);
if (mime == null) {
httpResponse.setContentType("text/plain; charset=utf-8");
} else if (mime.startsWith("text/html")) {
// HTML with unspecified encoding, we want to
// force HTML with utf-8 encoding
// This is to avoid the following security issue:
// http://openmya.hacker.jp/hasegawa/security/utf7cs.html
httpResponse.setContentType("text/html; charset=utf-8");
} else if (mime.startsWith("application/xml")) {
httpResponse.setContentType("text/xml; charset=utf-8");
}
chain.doFilter(quoted, httpResponse);
}
/**
* Infer the mime type for the response based on the extension of the request
* URI. Returns null if unknown.
*/
private String inferMimeType(ServletRequest request) {
String path = ((HttpServletRequest)request).getRequestURI();
ServletContext context = config.getServletContext();
return context.getMimeType(path);
}
}
}
| HBASE-24054 To be safe, jetty's version number should be blocked.
| hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java | HBASE-24054 To be safe, jetty's version number should be blocked. |
|
Java | apache-2.0 | 661f5e4aa94aaf3c89c290817f590d8ec9386e20 | 0 | apache/solr,apache/solr,apache/solr,apache/solr,apache/solr | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.servlet;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.io.IOUtils;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.MultiMapSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.SolrQueryRequest;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class SolrRequestParserTest extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig.xml", "schema.xml");
parser = new SolrRequestParsers( h.getCore().getSolrConfig() );
}
static SolrRequestParsers parser;
@AfterClass
public static void afterClass() {
parser = null;
}
@Test
public void testStreamBody() throws Exception
{
String body1 = "AMANAPLANPANAMA";
String body2 = "qwertasdfgzxcvb";
String body3 = "1234567890";
SolrCore core = h.getCore();
Map<String,String[]> args = new HashMap<String, String[]>();
args.put( CommonParams.STREAM_BODY, new String[] {body1} );
// Make sure it got a single stream in and out ok
List<ContentStream> streams = new ArrayList<ContentStream>();
SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
assertEquals( 1, streams.size() );
assertEquals( body1, IOUtils.toString( streams.get(0).getReader() ) );
req.close();
// Now add three and make sure they come out ok
streams = new ArrayList<ContentStream>();
args.put( CommonParams.STREAM_BODY, new String[] {body1,body2,body3} );
req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
assertEquals( 3, streams.size() );
ArrayList<String> input = new ArrayList<String>();
ArrayList<String> output = new ArrayList<String>();
input.add( body1 );
input.add( body2 );
input.add( body3 );
output.add( IOUtils.toString( streams.get(0).getReader() ) );
output.add( IOUtils.toString( streams.get(1).getReader() ) );
output.add( IOUtils.toString( streams.get(2).getReader() ) );
// sort them so the output is consistent
Collections.sort( input );
Collections.sort( output );
assertEquals( input.toString(), output.toString() );
req.close();
// set the contentType and make sure tat gets set
String ctype = "text/xxx";
streams = new ArrayList<ContentStream>();
args.put( CommonParams.STREAM_CONTENTTYPE, new String[] {ctype} );
req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
for( ContentStream s : streams ) {
assertEquals( ctype, s.getContentType() );
}
req.close();
}
@Test
public void testStreamURL() throws Exception
{
boolean ok = false;
String url = "http://www.apache.org/dist/lucene/solr/";
byte[] bytes = null;
try {
URL u = new URL(url);
HttpURLConnection connection = (HttpURLConnection)u.openConnection();
connection.setConnectTimeout(5000);
connection.setReadTimeout(5000);
connection.connect();
int code = connection.getResponseCode();
assumeTrue("wrong response code from server: " + code, 200 == code);
bytes = IOUtils.toByteArray( connection.getInputStream());
}
catch( Exception ex ) {
assumeNoException("Unable to connect to " + url + " to run the test.", ex);
return;
}
SolrCore core = h.getCore();
Map<String,String[]> args = new HashMap<String, String[]>();
args.put( CommonParams.STREAM_URL, new String[] {url} );
// Make sure it got a single stream in and out ok
List<ContentStream> streams = new ArrayList<ContentStream>();
SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
assertEquals( 1, streams.size() );
try {
assertArrayEquals( bytes, IOUtils.toByteArray( streams.get(0).getStream() ) );
} catch (SocketTimeoutException ex) {
assumeNoException("Problems retrieving from " + url + " to run the test.", ex);
} finally {
req.close();
}
}
@Test
public void testUrlParamParsing()
{
String[][] teststr = new String[][] {
{ "this is simple", "this%20is%20simple" },
{ "this is simple", "this+is+simple" },
{ "\u00FC", "%C3%BC" }, // lower-case "u" with diaeresis/umlaut
{ "\u0026", "%26" }, // &
{ "\u20AC", "%E2%82%AC" } // euro
};
for( String[] tst : teststr ) {
MultiMapSolrParams params = SolrRequestParsers.parseQueryString( "val="+tst[1] );
assertEquals( tst[0], params.get( "val" ) );
}
}
@Test
public void testStandardParseParamsAndFillStreams() throws Exception
{
ArrayList<ContentStream> streams = new ArrayList<ContentStream>();
Map<String,String[]> params = new HashMap<String, String[]>();
params.put( "q", new String[] { "hello" } );
// Set up the expected behavior
String[] ct = new String[] {
"application/x-www-form-urlencoded",
"Application/x-www-form-urlencoded",
"application/x-www-form-urlencoded; charset=utf-8",
"application/x-www-form-urlencoded;"
};
for( String contentType : ct ) {
HttpServletRequest request = createMock(HttpServletRequest.class);
expect(request.getMethod()).andReturn("POST").anyTimes();
expect(request.getContentType()).andReturn( contentType ).anyTimes();
expect(request.getParameterMap()).andReturn(params).anyTimes();
replay(request);
MultipartRequestParser multipart = new MultipartRequestParser( 1000000 );
RawRequestParser raw = new RawRequestParser();
StandardRequestParser standard = new StandardRequestParser( multipart, raw );
SolrParams p = standard.parseParamsAndFillStreams( request, streams );
assertEquals( "contentType: "+contentType, "hello", p.get("q") );
}
}
}
| solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.servlet;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.io.IOUtils;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.MultiMapSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.SolrQueryRequest;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class SolrRequestParserTest extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig.xml", "schema.xml");
parser = new SolrRequestParsers( h.getCore().getSolrConfig() );
}
static SolrRequestParsers parser;
@AfterClass
public static void afterClass() {
parser = null;
}
@Test
public void testStreamBody() throws Exception
{
String body1 = "AMANAPLANPANAMA";
String body2 = "qwertasdfgzxcvb";
String body3 = "1234567890";
SolrCore core = h.getCore();
Map<String,String[]> args = new HashMap<String, String[]>();
args.put( CommonParams.STREAM_BODY, new String[] {body1} );
// Make sure it got a single stream in and out ok
List<ContentStream> streams = new ArrayList<ContentStream>();
SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
assertEquals( 1, streams.size() );
assertEquals( body1, IOUtils.toString( streams.get(0).getReader() ) );
req.close();
// Now add three and make sure they come out ok
streams = new ArrayList<ContentStream>();
args.put( CommonParams.STREAM_BODY, new String[] {body1,body2,body3} );
req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
assertEquals( 3, streams.size() );
ArrayList<String> input = new ArrayList<String>();
ArrayList<String> output = new ArrayList<String>();
input.add( body1 );
input.add( body2 );
input.add( body3 );
output.add( IOUtils.toString( streams.get(0).getReader() ) );
output.add( IOUtils.toString( streams.get(1).getReader() ) );
output.add( IOUtils.toString( streams.get(2).getReader() ) );
// sort them so the output is consistent
Collections.sort( input );
Collections.sort( output );
assertEquals( input.toString(), output.toString() );
req.close();
// set the contentType and make sure tat gets set
String ctype = "text/xxx";
streams = new ArrayList<ContentStream>();
args.put( CommonParams.STREAM_CONTENTTYPE, new String[] {ctype} );
req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
for( ContentStream s : streams ) {
assertEquals( ctype, s.getContentType() );
}
req.close();
}
@Test
public void testStreamURL() throws Exception
{
boolean ok = false;
String url = "http://www.apache.org/dist/lucene/solr/";
byte[] bytes = null;
try {
URLConnection connection = new URL(url).openConnection();
connection.setConnectTimeout(5000);
connection.setReadTimeout(5000);
connection.connect();
bytes = IOUtils.toByteArray( connection.getInputStream());
}
catch( Exception ex ) {
assumeNoException("Unable to connect to " + url + " to run the test.", ex);
return;
}
SolrCore core = h.getCore();
Map<String,String[]> args = new HashMap<String, String[]>();
args.put( CommonParams.STREAM_URL, new String[] {url} );
// Make sure it got a single stream in and out ok
List<ContentStream> streams = new ArrayList<ContentStream>();
SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
assertEquals( 1, streams.size() );
assertArrayEquals( bytes, IOUtils.toByteArray( streams.get(0).getStream() ) );
req.close();
}
@Test
public void testUrlParamParsing()
{
String[][] teststr = new String[][] {
{ "this is simple", "this%20is%20simple" },
{ "this is simple", "this+is+simple" },
{ "\u00FC", "%C3%BC" }, // lower-case "u" with diaeresis/umlaut
{ "\u0026", "%26" }, // &
{ "\u20AC", "%E2%82%AC" } // euro
};
for( String[] tst : teststr ) {
MultiMapSolrParams params = SolrRequestParsers.parseQueryString( "val="+tst[1] );
assertEquals( tst[0], params.get( "val" ) );
}
}
@Test
public void testStandardParseParamsAndFillStreams() throws Exception
{
ArrayList<ContentStream> streams = new ArrayList<ContentStream>();
Map<String,String[]> params = new HashMap<String, String[]>();
params.put( "q", new String[] { "hello" } );
// Set up the expected behavior
String[] ct = new String[] {
"application/x-www-form-urlencoded",
"Application/x-www-form-urlencoded",
"application/x-www-form-urlencoded; charset=utf-8",
"application/x-www-form-urlencoded;"
};
for( String contentType : ct ) {
HttpServletRequest request = createMock(HttpServletRequest.class);
expect(request.getMethod()).andReturn("POST").anyTimes();
expect(request.getContentType()).andReturn( contentType ).anyTimes();
expect(request.getParameterMap()).andReturn(params).anyTimes();
replay(request);
MultipartRequestParser multipart = new MultipartRequestParser( 1000000 );
RawRequestParser raw = new RawRequestParser();
StandardRequestParser standard = new StandardRequestParser( multipart, raw );
SolrParams p = standard.parseParamsAndFillStreams( request, streams );
assertEquals( "contentType: "+contentType, "hello", p.get("q") );
}
}
}
| harden test against internet problems
git-svn-id: 308d55f399f3bd9aa0560a10e81a003040006c48@1370605 13f79535-47bb-0310-9956-ffa450edef68
| solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java | harden test against internet problems |
|
Java | apache-2.0 | f125eb16a7f7d62f1da961f947a99b933dd05c3a | 0 | RichJackson/cogstack,CogStack/cogstack,RichJackson/turbo-laser,RichJackson/turbo-laser,RichJackson/cogstack,RichJackson/cogstack,CogStack/cogstack,CogStack/cogstack | /*
* Copyright 2016 King's College London, Richard Jackson <[email protected]>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.kcl.itemProcessors;
import org.apache.tika.config.TikaConfig;
import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.parser.AutoDetectParser;
import org.apache.tika.parser.ParseContext;
import org.apache.tika.sax.BodyContentHandler;
import org.apache.tika.sax.ToXMLContentHandler;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import uk.ac.kcl.model.Document;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import javax.annotation.PostConstruct;
/**
*
* @author rich
*/
@Profile("tika")
@Service("tikaDocumentItemProcessor")
public class TikaDocumentItemProcessor extends TLItemProcessor implements ItemProcessor<Document, Document> {
private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TikaDocumentItemProcessor.class);
private boolean keepTags;
private String binaryFieldName;
private AutoDetectParser parser;
private TikaConfig config;
public boolean isKeepTags() {
return keepTags;
}
public void setKeepTags(boolean keepTags) {
this.keepTags = keepTags;
}
@Autowired
Environment env;
@PostConstruct
public void init() throws IOException, SAXException, TikaException{
this.keepTags = env.getProperty("keepTags").equalsIgnoreCase("true");
setFieldName(env.getProperty("tikaFieldName"));
config = new TikaConfig(this.getClass().getClassLoader()
.getResourceAsStream("tika-config.xml"));
parser = new AutoDetectParser(config);
}
@Override
public Document process(final Document doc) throws Exception {
LOG.debug("starting " + this.getClass().getSimpleName() +" on doc " +doc.getDocName());
ContentHandler handler;
if (keepTags) {
handler = new ToXMLContentHandler();
} else {
handler = new BodyContentHandler();
}
Metadata metadata = new Metadata();
try (InputStream stream = new ByteArrayInputStream(doc.getBinaryContent())) {
ParseContext context = new ParseContext();
context.set(TikaConfig.class, config);
parser.parse(stream, handler, metadata, context);
Set<String> metaKeys = new HashSet<String>(Arrays.asList(
metadata.names()));
extractOCRMetadata(doc, metaKeys, metadata);
extractContentTypeMetadata(doc, metaKeys, metadata);
extractPageCountMetadata(doc, metaKeys, metadata);
addField(doc, handler.toString());
} catch (Exception ex) {
addField(doc, ex.getMessage());
}
LOG.debug("finished " + this.getClass().getSimpleName() +" on doc " +doc.getDocName());
return doc;
}
private void extractOCRMetadata(Document doc, Set<String> metaKeys,
Metadata metadata) {
if (metaKeys.contains("X-PDFPREPROC-OCR-APPLIED")) {
doc.getAssociativeArray().put("X-PDFPREPROC-OCR-APPLIED",
metadata.get("X-PDFPREPROC-OCR-APPLIED"));
}
if (metaKeys.contains("X-PDFPREPROC-ORIGINAL")) {
doc.getAssociativeArray().put("X-PDFPREPROC-ORIGINAL",
metadata.get("X-PDFPREPROC-ORIGINAL"));
}
}
private void extractContentTypeMetadata(Document doc, Set<String> metaKeys,
Metadata metadata) {
if (metaKeys.contains("Content-Type")) {
doc.getAssociativeArray().put("X-TL-CONTENT-TYPE",
metadata.get("Content-Type"));
} else {
doc.getAssociativeArray().put("X-TL-CONTENT-TYPE",
"TL_CONTENT_TYPE_UNKNOWN");
}
}
private void extractPageCountMetadata(Document doc, Set<String> metaKeys,
Metadata metadata) {
if (metaKeys.contains("xmpTPg:NPages")) {
doc.getAssociativeArray().put("X-TL-PAGE-COUNT",
metadata.get("xmpTPg:NPages"));
} else if (metaKeys.contains("Page-Count")) {
doc.getAssociativeArray().put("X-TL-PAGE-COUNT",
metadata.get("Page-Count"));
} else if (metaKeys.contains("meta:page-count")) {
doc.getAssociativeArray().put("X-TL-PAGE-COUNT",
metadata.get("meta:page-count"));
} else {
doc.getAssociativeArray().put("X-TL-PAGE-COUNT",
"TL_PAGE_COUNT_UNKNOWN");
}
}
}
| src/main/java/uk/ac/kcl/itemProcessors/TikaDocumentItemProcessor.java | /*
* Copyright 2016 King's College London, Richard Jackson <[email protected]>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.kcl.itemProcessors;
import org.apache.tika.config.TikaConfig;
import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.parser.AutoDetectParser;
import org.apache.tika.parser.ParseContext;
import org.apache.tika.sax.BodyContentHandler;
import org.apache.tika.sax.ToXMLContentHandler;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import uk.ac.kcl.model.Document;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import javax.annotation.PostConstruct;
/**
*
* @author rich
*/
@Profile("tika")
@Service("tikaDocumentItemProcessor")
public class TikaDocumentItemProcessor extends TLItemProcessor implements ItemProcessor<Document, Document> {
private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TikaDocumentItemProcessor.class);
private boolean keepTags;
private String binaryFieldName;
private AutoDetectParser parser;
private TikaConfig config;
public boolean isKeepTags() {
return keepTags;
}
public void setKeepTags(boolean keepTags) {
this.keepTags = keepTags;
}
@Autowired
Environment env;
@PostConstruct
public void init() throws IOException, SAXException, TikaException{
this.keepTags = env.getProperty("keepTags").equalsIgnoreCase("true");
setFieldName(env.getProperty("tikaFieldName"));
config = new TikaConfig(this.getClass().getClassLoader()
.getResourceAsStream("tika-config.xml"));
parser = new AutoDetectParser(config);
}
@Override
public Document process(final Document doc) throws Exception {
LOG.debug("starting " + this.getClass().getSimpleName() +" on doc " +doc.getDocName());
ContentHandler handler;
if (keepTags) {
handler = new ToXMLContentHandler();
} else {
handler = new BodyContentHandler();
}
Metadata metadata = new Metadata();
try (InputStream stream = new ByteArrayInputStream(doc.getBinaryContent())) {
ParseContext context = new ParseContext();
context.set(TikaConfig.class, config);
parser.parse(stream, handler, metadata, context);
Set<String> metaKeys = new HashSet<String>(Arrays.asList(
metadata.names()));
extractOCRMetadata(doc, metaKeys, metadata);
extractContentTypeMetadata(doc, metaKeys, metadata);
addField(doc, handler.toString());
} catch (Exception ex) {
addField(doc, ex.getMessage());
}
LOG.debug("finished " + this.getClass().getSimpleName() +" on doc " +doc.getDocName());
return doc;
}
private void extractOCRMetadata(Document doc, Set<String> metaKeys,
Metadata metadata) {
if (metaKeys.contains("X-PDFPREPROC-OCR-APPLIED")) {
doc.getAssociativeArray().put("X-PDFPREPROC-OCR-APPLIED",
metadata.get("X-PDFPREPROC-OCR-APPLIED"));
}
if (metaKeys.contains("X-PDFPREPROC-ORIGINAL")) {
doc.getAssociativeArray().put("X-PDFPREPROC-ORIGINAL",
metadata.get("X-PDFPREPROC-ORIGINAL"));
}
}
private void extractContentTypeMetadata(Document doc, Set<String> metaKeys,
Metadata metadata) {
if (metaKeys.contains("Content-Type")) {
doc.getAssociativeArray().put("X-TL-CONTENT-TYPE",
metadata.get("Content-Type"));
} else {
doc.getAssociativeArray().put("X-TL-CONTENT-TYPE",
"TL_CONTENT_TYPE_UNKNOWN");
}
}
}
| #6 Extract page count from Tika metadata
| src/main/java/uk/ac/kcl/itemProcessors/TikaDocumentItemProcessor.java | #6 Extract page count from Tika metadata |
|
Java | apache-2.0 | bfa94b4ee9d014bdcd62e656dffec8f094e1c9b1 | 0 | reportportal/commons-model | /*
* Copyright 2017 EPAM Systems
*
*
* This file is part of EPAM Report Portal.
* https://github.com/reportportal/commons-model
*
* Report Portal is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Report Portal is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Report Portal. If not, see <http://www.gnu.org/licenses/>.
*/
package com.epam.ta.reportportal.ws.model;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Test item parameters representation
*
* @author Pavel_Bortnik
*/
public class ParameterResource {
@JsonProperty(value = "key")
private String key;
@JsonProperty(value = "value")
private String value;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ParameterResource that = (ParameterResource) o;
if (key != null ? !key.equals(that.key) : that.key != null) return false;
return value != null ? value.equals(that.value) : that.value == null;
}
@Override
public int hashCode() {
int result = key != null ? key.hashCode() : 0;
result = 31 * result + (value != null ? value.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(key != null ? key + "=" : "");
sb.append(value);
return sb.toString();
}
}
| src/main/java/com/epam/ta/reportportal/ws/model/ParameterResource.java | /*
* Copyright 2017 EPAM Systems
*
*
* This file is part of EPAM Report Portal.
* https://github.com/reportportal/commons-model
*
* Report Portal is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Report Portal is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Report Portal. If not, see <http://www.gnu.org/licenses/>.
*/
package com.epam.ta.reportportal.ws.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import javax.validation.constraints.Size;
/**
* Test item parameters representation
*
* @author Pavel_Bortnik
*/
public class ParameterResource {
@JsonProperty(value = "key")
@Size(max = ValidationConstraints.MAX_PARAMETERS_LENGTH)
private String key;
@JsonProperty(value = "value")
private String value;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ParameterResource that = (ParameterResource) o;
if (key != null ? !key.equals(that.key) : that.key != null) return false;
return value != null ? value.equals(that.value) : that.value == null;
}
@Override
public int hashCode() {
int result = key != null ? key.hashCode() : 0;
result = 31 * result + (value != null ? value.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(key != null ? key + "=" : "");
sb.append(value);
return sb.toString();
}
}
| remove restriction on parameter key
| src/main/java/com/epam/ta/reportportal/ws/model/ParameterResource.java | remove restriction on parameter key |
|
Java | apache-2.0 | 2d314dc721b7dfc22fbb1a06fa106d290c5deb0d | 0 | krujos/willitconnect,TomG713/willitconnect,gambtho/willitconnect,gambtho/willitconnect,TomG713/willitconnect,gambtho/willitconnect,krujos/willitconnect,krujos/willitconnect,TomG713/willitconnect | package willitconnect.controller;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import willitconnect.model.CheckedEntry;
import willitconnect.service.VcapServicesChecker;
import willitconnect.service.util.Connection;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.verify;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
public class WillItConnectControllerTest {
private MockMvc mockMvc;
@Mock
VcapServicesChecker checker;
@PrepareForTest(Connection.class)
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
mockMvc = MockMvcBuilders.standaloneSetup(
new WillItConnectController(checker)).build();
}
@Test
public void resultsShouldReturnEmptyJsonWithNoServices() throws Exception {
when(checker.getConnectionResults()).thenReturn(new ArrayList<>());
mockMvc.perform(get("/serviceresults").accept(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$").isArray())
.andExpect(jsonPath("$", hasSize(0)));
}
@Test
public void resultsShouldContainOneServiceWithVcapServices() throws Exception{
List<CheckedEntry> entryList = new ArrayList<>();
entryList.add(new CheckedEntry("foo"));
entryList.add(new CheckedEntry("bar"));
when(checker.getConnectionResults()).thenReturn(entryList);
mockMvc.perform(get("/serviceresults").accept(MediaType
.APPLICATION_JSON))
.andExpect(jsonPath("$").isArray())
.andExpect(jsonPath("$", hasSize(0)));
// It's false because we default everything to false before
// parsing
//TODO: fix test too
//.andExpect(jsonPath("$[0].canConnect", is(false)));
}
@Test
public void itSavesAProxy() throws Exception {
mockMvc.perform(put("/proxy")
.param("proxy", "proxy.example.com")
.param("proxyPort", "80")
.param("proxyType", "http")
).andExpect(status().isOk());
verify(checker).setProxy("proxy.example.com", 80, "http");
}
} | src/test/java/willitconnect/controller/WillItConnectControllerTest.java | package willitconnect.controller;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import willitconnect.model.CheckedEntry;
import willitconnect.service.VcapServicesChecker;
import willitconnect.service.util.Connection;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.verify;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
public class WillItConnectControllerTest {
private MockMvc mockMvc;
@Mock
VcapServicesChecker checker;
@PrepareForTest(Connection.class)
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
mockMvc = MockMvcBuilders.standaloneSetup(
new WillItConnectController(checker)).build();
}
@Test
public void resultsShouldReturnEmptyJsonWithNoServices() throws Exception {
when(checker.getConnectionResults()).thenReturn(new ArrayList<>());
mockMvc.perform(get("/serviceresults").accept(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$").isArray())
.andExpect(jsonPath("$", hasSize(0)));
}
@Test
public void resultsShouldContainOneServiceWithVcapServices() throws Exception{
List<CheckedEntry> entryList = new ArrayList<>();
entryList.add(new CheckedEntry("foo"));
entryList.add(new CheckedEntry("bar"));
when(checker.getConnectionResults()).thenReturn(entryList);
mockMvc.perform(get("/serviceresults").accept(MediaType
.APPLICATION_JSON))
.andExpect(jsonPath("$").isArray())
.andExpect(jsonPath("$", hasSize(2)))
// It's false because we default everything to false before
// parsing
.andExpect(jsonPath("$[0].canConnect", is(false)));
}
@Test
public void itSavesAProxy() throws Exception {
mockMvc.perform(put("/proxy")
.param("proxy", "proxy.example.com")
.param("proxyPort", "80")
.param("proxyType", "http")
).andExpect(status().isOk());
verify(checker).setProxy("proxy.example.com", 80, "http");
}
} | updated test to reflect temporary fix
| src/test/java/willitconnect/controller/WillItConnectControllerTest.java | updated test to reflect temporary fix |
|
Java | apache-2.0 | d02d25f9d96e10929290329c6db56c1fcca49bf4 | 0 | signed/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,ibinti/intellij-community,apixandru/intellij-community,semonte/intellij-community,fitermay/intellij-community,da1z/intellij-community,FHannes/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,signed/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,da1z/intellij-community,allotria/intellij-community,signed/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,da1z/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,asedunov/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,vvv1559/intellij-community,da1z/intellij-community,allotria/intellij-community,signed/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,allotria/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,signed/intellij-community,asedunov/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,hurricup/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,fitermay/intellij-community,signed/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,ibinti/intellij-community,allotria/intellij-community,signed/intellij-community,da1z/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,da1z/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,fitermay/intellij-community,allotria/intellij-community,FHannes/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,ibinti/intellij-community,semonte/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,hurricup/intellij-community,asedunov/intellij-community,signed/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,ibinti/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,semonte/intellij-community,xfournet/intellij-community,FHannes/intellij-community,xfournet/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,apixandru/intellij-community,FHannes/intellij-community,semonte/intellij-community,hurricup/intellij-community,da1z/intellij-community,fitermay/intellij-community,FHannes/intellij-community,hurricup/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,xfournet/intellij-community,fitermay/intellij-community,FHannes/intellij-community,FHannes/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,semonte/intellij-community,hurricup/intellij-community,semonte/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,youdonghai/intellij-community,semonte/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,semonte/intellij-community,ibinti/intellij-community,apixandru/intellij-community,allotria/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,da1z/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,signed/intellij-community,asedunov/intellij-community,signed/intellij-community,youdonghai/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,semonte/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,apixandru/intellij-community,apixandru/intellij-community,fitermay/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,signed/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.ui;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.InputValidatorEx;
import com.intellij.openapi.ui.Messages;
import com.intellij.ui.classFilter.ClassFilter;
import com.intellij.ui.classFilter.ClassFilterEditor;
import com.intellij.util.IconUtil;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
/**
* User: lex
* Date: Aug 29, 2003
* Time: 2:38:30 PM
*/
public class InstanceFilterEditor extends ClassFilterEditor {
public InstanceFilterEditor(Project project) {
super(project);
}
protected void addClassFilter() {
String idString = Messages.showInputDialog(myProject,
DebuggerBundle.message("add.instance.filter.dialog.prompt"),
DebuggerBundle.message("add.instance.filter.dialog.title"),
Messages.getQuestionIcon(),
null,
new InputValidatorEx() {
@Nullable
@Override
public String getErrorText(String inputString) {
try {
//noinspection ResultOfMethodCallIgnored
Long.parseLong(inputString);
return null;
} catch (NumberFormatException e) {
return DebuggerBundle.message("add.instance.filter.dialog.error.numeric.value.expected");
}
}
@Override
public boolean checkInput(String inputString) {
return getErrorText(inputString) == null;
}
@Override
public boolean canClose(String inputString) {
return getErrorText(inputString) == null;
}
});
if (idString != null) {
ClassFilter filter = createFilter(idString);
myTableModel.addRow(filter);
int row = myTableModel.getRowCount() - 1;
myTable.getSelectionModel().setSelectionInterval(row, row);
myTable.scrollRectToVisible(myTable.getCellRect(row, 0, true));
myTable.requestFocus();
}
}
protected String getAddButtonText() {
return DebuggerBundle.message("button.add");
}
@Override
protected Icon getAddButtonIcon() {
return IconUtil.getAddIcon();
}
@Override
protected boolean addPatternButtonVisible() {
return false;
}
}
| java/debugger/impl/src/com/intellij/debugger/ui/InstanceFilterEditor.java | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.ui;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.ui.classFilter.ClassFilter;
import com.intellij.ui.classFilter.ClassFilterEditor;
import com.intellij.util.IconUtil;
import javax.swing.*;
/**
* User: lex
* Date: Aug 29, 2003
* Time: 2:38:30 PM
*/
public class InstanceFilterEditor extends ClassFilterEditor {
public InstanceFilterEditor(Project project) {
super(project);
}
protected void addClassFilter() {
String idString = Messages.showInputDialog(myProject, DebuggerBundle.message("add.instance.filter.dialog.prompt"), DebuggerBundle.message("add.instance.filter.dialog.title"), Messages.getQuestionIcon());
if (idString != null) {
ClassFilter filter = createFilter(idString);
myTableModel.addRow(filter);
int row = myTableModel.getRowCount() - 1;
myTable.getSelectionModel().setSelectionInterval(row, row);
myTable.scrollRectToVisible(myTable.getCellRect(row, 0, true));
myTable.requestFocus();
}
}
protected String getAddButtonText() {
return DebuggerBundle.message("button.add");
}
@Override
protected Icon getAddButtonIcon() {
return IconUtil.getAddIcon();
}
@Override
protected boolean addPatternButtonVisible() {
return false;
}
protected ClassFilter createFilter(String pattern) {
try {
Long.parseLong(pattern);
return super.createFilter(pattern);
} catch (NumberFormatException e) {
Messages.showMessageDialog(this, DebuggerBundle.message("add.instance.filter.dialog.error.numeric.value.expected"), DebuggerBundle.message("add.instance.filter.dialog.title"), Messages.getErrorIcon());
return null;
}
}
}
| EA-84907 - NPE: ClassFilterEditor$FilterTableModel.getValueAt
| java/debugger/impl/src/com/intellij/debugger/ui/InstanceFilterEditor.java | EA-84907 - NPE: ClassFilterEditor$FilterTableModel.getValueAt |
|
Java | apache-2.0 | ba238a74b8aa5708dd46a1616cf0a8e8fa4cecaa | 0 | nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/wfswarm-example-arjuna-old,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch | import java.util.*;
public class RepairDroid
{
public static final String INITIAL_INPUT = Integer.toString(DroidMovement.NORTH);
public RepairDroid (Vector<String> instructions, boolean debug)
{
_debug = debug;
_theComputer = new Intcode(instructions, INITIAL_INPUT, _debug);
_currentLocation = new Coordinate(0, 0); // starting location
_theMap = new Maze();
_theMap.addContent(_currentLocation, TileId.TRAVERSE);
}
public final int moveToOxygenStation ()
{
int numberOfSteps = 0;
// create a map first!
explore();
return numberOfSteps;
}
public void printGrid ()
{
System.out.println(_theMap.printWithDroid(_currentLocation));
}
/*
* If we run into a wall then try a different direction.
* If we can't move other than backwards then do that.
* Don't move into areas we've already been.
*/
private boolean explore ()
{
while (!_theComputer.hasHalted())
{
boolean needToBackup = false;
Coordinate[] moves = DroidMovement.getNextPositions(_currentLocation); // get all possible moves (Coordinates)
Coordinate loc = _currentLocation;
int backupDirection = DroidMovement.backupDirection(DroidMovement.NORTH);
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
/*
* We search N, E, S and then W.
*/
if (!tryToMove(String.valueOf(DroidMovement.NORTH), moves[0]))
{
System.out.println("**Failed to move NORTH");
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
backupDirection = DroidMovement.backupDirection(DroidMovement.EAST);
if (!tryToMove(String.valueOf(DroidMovement.EAST), moves[1]))
{
System.out.println("**Failed to move EAST");
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
backupDirection = DroidMovement.backupDirection(DroidMovement.SOUTH);
if (!tryToMove(String.valueOf(DroidMovement.SOUTH), moves[2]))
{
System.out.println("**Failed to move SOUTH");
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
backupDirection = DroidMovement.backupDirection(DroidMovement.WEST);
if (!tryToMove(String.valueOf(DroidMovement.WEST), moves[3]))
{
System.out.println("**Failed to move WEST");
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
System.out.println("**NEED TO BACKUP");
needToBackup = true;
}
}
}
}
if (needToBackup)
backtrack(DroidMovement.toString(backupDirection), loc);
}
return _theMap.isOxygenStation(_currentLocation);
}
private boolean tryToMove (String direction, Coordinate to)
{
System.out.println("**Trying to move from: "+_currentLocation+" to "+to+" with direction "+DroidMovement.toString(direction));
// if we've already been there then don't move!
if (_theMap.isExplored(to))
{
System.out.println("**Been there already.");
return false;
}
_theComputer.setInput(direction);
_theComputer.executeUntilInput();
if (_theComputer.hasOutput())
{
int response = Integer.parseInt(_theComputer.getOutput());
System.out.println("**Response is "+DroidStatus.toString(response));
switch (response)
{
case DroidStatus.ARRIVED: // arrived at the station!!
{
_theMap.addContent(to, TileId.OXYGEN_STATION);
_currentLocation = to;
return true;
}
case DroidStatus.COLLISION:
{
_theMap.addContent(to, TileId.WALL); // didn't move as we hit a wall
return false;
}
case DroidStatus.MOVED:
{
/*
* Droid moved so let's try to move again.
*/
_theMap.addContent(to, TileId.TRAVERSE);
_currentLocation = to;
return explore();
}
default:
System.out.println("Unknown response: "+response);
}
}
else
System.out.println("Error - no output after move instruction!");
return false;
}
private boolean backtrack (String direction, Coordinate to)
{
boolean moved = false;
System.out.println("**Trying to backup from: "+_currentLocation+" to "+to+" with direction "+DroidMovement.toString(direction));
_theComputer.setInput(direction);
_theComputer.executeUntilInput();
if (_theComputer.hasOutput())
{
int response = Integer.parseInt(_theComputer.getOutput());
if (response == DroidStatus.MOVED)
{
_currentLocation = to;
moved = true;
}
else
System.out.println("**Unexpected backup response: "+response);
}
else
System.out.println("Error - no output after move instruction!");
return moved;
}
private boolean _debug;
private Intcode _theComputer;
private Coordinate _currentLocation;
private Maze _theMap;
} | AdventOfCode/2019/day15/RepairDroid.java | import java.util.*;
public class RepairDroid
{
public static final String INITIAL_INPUT = Integer.toString(DroidMovement.NORTH);
public RepairDroid (Vector<String> instructions, boolean debug)
{
_debug = debug;
_theComputer = new Intcode(instructions, INITIAL_INPUT, _debug);
_currentLocation = new Coordinate(0, 0); // starting location
_theMap = new Maze();
_theMap.addContent(_currentLocation, TileId.TRAVERSE);
}
public final int moveToOxygenStation ()
{
int numberOfSteps = 0;
// create a map first!
explore();
return numberOfSteps;
}
public void printGrid ()
{
System.out.println(_theMap.printWithDroid(_currentLocation));
}
/*
* If we run into a wall then try a different direction.
* If we can't move other than backwards then do that.
* Don't move into areas we've already been.
*/
private boolean explore ()
{
while (!_theComputer.hasHalted())
{
boolean needToBackup = false;
Coordinate[] moves = DroidMovement.getNextPositions(_currentLocation); // get all possible moves (Coordinates)
Coordinate loc = _currentLocation;
int backupDirection = DroidMovement.backupDirection(DroidMovement.NORTH);
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
/*
* We search N, E, S and then W.
*/
if (!tryToMove(String.valueOf(DroidMovement.NORTH), moves[0]))
{
System.out.println("**Failed to move NORTH");
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
backupDirection = DroidMovement.backupDirection(DroidMovement.EAST);
if (!tryToMove(String.valueOf(DroidMovement.EAST), moves[1]))
{
System.out.println("**Failed to move EAST");
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
backupDirection = DroidMovement.backupDirection(DroidMovement.SOUTH);
if (!tryToMove(String.valueOf(DroidMovement.SOUTH), moves[2]))
{
System.out.println("**Failed to move SOUTH");
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
backupDirection = DroidMovement.backupDirection(DroidMovement.WEST);
if (!tryToMove(String.valueOf(DroidMovement.WEST), moves[3]))
{
System.out.println("**Failed to move WEST");
System.out.println("\n"+_theMap.printWithDroid(_currentLocation));
System.out.println("**NEED TO BACKUP");
needToBackup = true;
}
}
}
}
if (needToBackup)
backupDirection(backupDirection, loc);
}
return _theMap.isOxygenStation(_currentLocation);
}
private boolean tryToMove (String direction, Coordinate to)
{
System.out.println("**Trying to move from: "+_currentLocation+" to "+to+" with direction "+DroidMovement.toString(direction));
// if we've already been there then don't move!
if (_theMap.isExplored(to))
{
System.out.println("**Been there already.");
return false;
}
_theComputer.setInput(direction);
_theComputer.executeUntilInput();
if (_theComputer.hasOutput())
{
int response = Integer.parseInt(_theComputer.getOutput());
System.out.println("**Response is "+DroidStatus.toString(response));
switch (response)
{
case DroidStatus.ARRIVED: // arrived at the station!!
{
_theMap.addContent(to, TileId.OXYGEN_STATION);
_currentLocation = to;
return true;
}
case DroidStatus.COLLISION:
{
_theMap.addContent(to, TileId.WALL); // didn't move as we hit a wall
return false;
}
case DroidStatus.MOVED:
{
/*
* Droid moved so let's try to move again.
*/
_theMap.addContent(to, TileId.TRAVERSE);
_currentLocation = to;
return explore();
}
default:
System.out.println("Unknown response: "+response);
}
}
else
System.out.println("Error - no output after move instruction!");
return false;
}
private boolean backup (String direction, Coordinate to)
{
boolean moved = false;
System.out.println("**Trying to backup from: "+_currentLocation+" to "+to+" with direction "+DroidMovement.toString(direction));
_theComputer.setInput(direction);
_theComputer.executeUntilInput();
if (_theComputer.hasOutput())
{
int response = Integer.parseInt(_theComputer.getOutput());
if (response == DroidStatus.MOVED)
{
_currentLocation = to;
moved = true;
}
else
System.out.println("**Unexpected backup response: "+resonse);
}
else
System.out.println("Error - no output after move instruction!");
return moved;
}
private boolean _debug;
private Intcode _theComputer;
private Coordinate _currentLocation;
private Maze _theMap;
} | Update RepairDroid.java
| AdventOfCode/2019/day15/RepairDroid.java | Update RepairDroid.java |
|
Java | bsd-2-clause | ead39f9b68292f2830ae4c783628bcad341393fb | 0 | tfennelly/stapler,aldaris/stapler,tempbottle/stapler,tfennelly/stapler,vjuranek/stapler,stapler/stapler,tfennelly/stapler,tfennelly/stapler,vjuranek/stapler,christ66/stapler,christ66/stapler,aldaris/stapler,aldaris/stapler,stapler/stapler,ohtake/stapler,vjuranek/stapler,ohtake/stapler,christ66/stapler,christ66/stapler,ohtake/stapler,aldaris/stapler,tempbottle/stapler,ohtake/stapler,tfennelly/stapler,vjuranek/stapler,stapler/stapler,christ66/stapler,stapler/stapler,tempbottle/stapler,vjuranek/stapler,aldaris/stapler,stapler/stapler,ohtake/stapler,tempbottle/stapler,tempbottle/stapler | /*
* Copyright (c) 2004-2010, Kohsuke Kawaguchi
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.kohsuke.stapler;
import net.sf.json.JSON;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.export.Flavor;
import javax.servlet.ServletException;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Collection;
/**
* Pluggable interface that takes the return value from request handling
* methods and convert that to HTTP responses.
*
* @author Kohsuke Kawaguchi
*/
public abstract class HttpResponseRenderer {
/**
*
* @param node
* Object that handled the request.
* @param response
* The return value or the exception from the method.
* @return
* true if the response object was understood and rendered by this method.
* false otherwise, in which case the next {@link HttpResponseRenderer}
* will be consulted.
*/
public abstract boolean generateResponse(StaplerRequest req, StaplerResponse rsp, Object node, Object response) throws IOException, ServletException;
/**
* Default {@link HttpResponseRenderer}.
*/
public static class Default extends HttpResponseRenderer {
@Override
public boolean generateResponse(StaplerRequest req, StaplerResponse rsp, Object node, Object response) throws IOException, ServletException {
return handleHttpResponse(req, rsp, node, response)
|| handleJSON(rsp, response)
|| handleJavaScriptProxyMethodCall(req,rsp,response)
|| handlePrimitive(rsp, response);
}
protected boolean handleJavaScriptProxyMethodCall(StaplerRequest req, StaplerResponse rsp, Object response) throws IOException {
if (req.isJavaScriptProxyCall()) {
rsp.setContentType(Flavor.JSON.contentType);
PrintWriter w = rsp.getWriter();
// handle other primitive types as JSON response
if (response instanceof String) {
w.print('"'+response.toString().replace("\"","\\\"").replace("\n","\\\n")+'"');
} else
if (response instanceof Number || response instanceof Boolean) {
w.print(response);
} else
if (response instanceof Collection || (response!=null && response.getClass().isArray())) {
JSONArray.fromObject(response).write(w);
} else {
// last fall back
JSONObject.fromObject(response).write(w);
}
return true;
}
return false;
}
protected boolean handlePrimitive(StaplerResponse rsp, Object response) throws IOException {
if (response instanceof String || response instanceof Integer) {
rsp.setContentType("text/plain;charset=UTF-8");
rsp.getWriter().print(response);
return true;
}
return false;
}
protected boolean handleHttpResponse(StaplerRequest req, StaplerResponse rsp, Object node, Object response) throws IOException, ServletException {
if (response instanceof HttpResponse) {
// let the result render the response
HttpResponse r = (HttpResponse) response;
r.generateResponse(req,rsp,node);
return true;
}
return false;
}
protected boolean handleJSON(StaplerResponse rsp, Object response) throws IOException {
if (response instanceof JSON) {
rsp.setContentType(Flavor.JSON.contentType);
((JSON)response).write(rsp.getWriter());
return true;
}
return false;
}
}
}
| core/src/main/java/org/kohsuke/stapler/HttpResponseRenderer.java | /*
* Copyright (c) 2004-2010, Kohsuke Kawaguchi
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.kohsuke.stapler;
import net.sf.json.JSON;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.export.Flavor;
import javax.servlet.ServletException;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Collection;
/**
* Pluggable interface that takes the return value from request handling
* methods and convert that to HTTP responses.
*
* @author Kohsuke Kawaguchi
*/
public abstract class HttpResponseRenderer {
/**
*
* @param node
* Object that handled the request.
* @param response
* The return value or the exception from the method.
* @return
* true if the response object was understood and rendered by this method.
* false otherwise, in which case the next {@link HttpResponseRenderer}
* will be consulted.
*/
public abstract boolean generateResponse(StaplerRequest req, StaplerResponse rsp, Object node, Object response) throws IOException, ServletException;
/**
* Default {@link HttpResponseRenderer}.
*/
public static class Default extends HttpResponseRenderer {
@Override
public boolean generateResponse(StaplerRequest req, StaplerResponse rsp, Object node, Object response) throws IOException, ServletException {
return handleHttpResponse(req, rsp, node, response)
|| handleJSON(rsp, response)
|| handleJavaScriptProxyMethodCall(req,rsp,response)
|| handlePrimitive(rsp, response);
}
protected boolean handleJavaScriptProxyMethodCall(StaplerRequest req, StaplerResponse rsp, Object response) throws IOException {
if (req.isJavaScriptProxyCall()) {
rsp.setContentType(Flavor.JSON.contentType);
PrintWriter w = rsp.getWriter();
// handle other primitive types as JSON response
if (response instanceof String) {
w.print('"'+response.toString().replace("\"","\\\"")+'"');
} else
if (response instanceof Number || response instanceof Boolean) {
w.print(response);
} else
if (response instanceof Collection || (response!=null && response.getClass().isArray())) {
JSONArray.fromObject(response).write(w);
} else {
// last fall back
JSONObject.fromObject(response).write(w);
}
return true;
}
return false;
}
protected boolean handlePrimitive(StaplerResponse rsp, Object response) throws IOException {
if (response instanceof String || response instanceof Integer) {
rsp.setContentType("text/plain;charset=UTF-8");
rsp.getWriter().print(response);
return true;
}
return false;
}
protected boolean handleHttpResponse(StaplerRequest req, StaplerResponse rsp, Object node, Object response) throws IOException, ServletException {
if (response instanceof HttpResponse) {
// let the result render the response
HttpResponse r = (HttpResponse) response;
r.generateResponse(req,rsp,node);
return true;
}
return false;
}
protected boolean handleJSON(StaplerResponse rsp, Object response) throws IOException {
if (response instanceof JSON) {
rsp.setContentType(Flavor.JSON.contentType);
((JSON)response).write(rsp.getWriter());
return true;
}
return false;
}
}
}
| \n needs to be escaped as well
| core/src/main/java/org/kohsuke/stapler/HttpResponseRenderer.java | \n needs to be escaped as well |
|
Java | bsd-2-clause | 938e3d9f006a7d051daad2f2cd554c151b795d60 | 0 | chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio | /*
* Copyright (c) 2009-2010 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.bullet.collision;
import com.jme3.math.Vector3f;
import com.jme3.scene.Spatial;
import java.util.EventObject;
/**
* A CollisionEvent stores all information about a collision in the PhysicsWorld.
* Do not store this Object, as it will be reused after the collision() method has been called.
* Get/reference all data you need in the collide method.
* @author normenhansen
*/
public class PhysicsCollisionEvent extends EventObject {
public static final int TYPE_ADDED = 0;
public static final int TYPE_PROCESSED = 1;
public static final int TYPE_DESTROYED = 2;
private int type;
private PhysicsCollisionObject nodeA;
private PhysicsCollisionObject nodeB;
private long manifoldPointObjectId = 0;
public PhysicsCollisionEvent(int type, PhysicsCollisionObject nodeA, PhysicsCollisionObject nodeB, long manifoldPointObjectId) {
super(nodeA);
this.manifoldPointObjectId = manifoldPointObjectId;
}
/**
* used by event factory, called when event is destroyed
*/
public void clean() {
source = null;
this.type = 0;
this.nodeA = null;
this.nodeB = null;
this.manifoldPointObjectId = 0;
}
/**
* used by event factory, called when event reused
*/
public void refactor(int type, PhysicsCollisionObject source, PhysicsCollisionObject nodeB, long manifoldPointObjectId) {
this.source = source;
this.type = type;
this.nodeA = source;
this.nodeB = nodeB;
this.manifoldPointObjectId = manifoldPointObjectId;
}
public int getType() {
return type;
}
/**
* @return A Spatial if the UserObject of the PhysicsCollisionObject is a Spatial
*/
public Spatial getNodeA() {
if (nodeA.getUserObject() instanceof Spatial) {
return (Spatial) nodeA.getUserObject();
}
return null;
}
/**
* @return A Spatial if the UserObject of the PhysicsCollisionObject is a Spatial
*/
public Spatial getNodeB() {
if (nodeB.getUserObject() instanceof Spatial) {
return (Spatial) nodeB.getUserObject();
}
return null;
}
public PhysicsCollisionObject getObjectA() {
return nodeA;
}
public PhysicsCollisionObject getObjectB() {
return nodeB;
}
public float getAppliedImpulse() {
return getAppliedImpulse(manifoldPointObjectId);
}
private native float getAppliedImpulse(long manifoldPointObjectId);
public float getAppliedImpulseLateral1() {
return getAppliedImpulseLateral1(manifoldPointObjectId);
}
private native float getAppliedImpulseLateral1(long manifoldPointObjectId);
public float getAppliedImpulseLateral2() {
return getAppliedImpulseLateral2(manifoldPointObjectId);
}
private native float getAppliedImpulseLateral2(long manifoldPointObjectId);
public float getCombinedFriction() {
return getCombinedFriction(manifoldPointObjectId);
}
private native float getCombinedFriction(long manifoldPointObjectId);
public float getCombinedRestitution() {
return getCombinedRestitution(manifoldPointObjectId);
}
private native float getCombinedRestitution(long manifoldPointObjectId);
public float getDistance1() {
return getDistance1(manifoldPointObjectId);
}
private native float getDistance1(long manifoldPointObjectId);
public int getIndex0() {
return getIndex0(manifoldPointObjectId);
}
private native int getIndex0(long manifoldPointObjectId);
public int getIndex1() {
return getIndex1(manifoldPointObjectId);
}
private native int getIndex1(long manifoldPointObjectId);
public Vector3f getLateralFrictionDir1() {
return getLateralFrictionDir1(new Vector3f());
}
public Vector3f getLateralFrictionDir1(Vector3f lateralFrictionDir1) {
getLateralFrictionDir1(manifoldPointObjectId, lateralFrictionDir1);
return lateralFrictionDir1;
}
private native void getLateralFrictionDir1(long manifoldPointObjectId, Vector3f lateralFrictionDir1);
public Vector3f getLateralFrictionDir2() {
return getLateralFrictionDir2(new Vector3f());
}
public Vector3f getLateralFrictionDir2(Vector3f lateralFrictionDir2) {
getLateralFrictionDir2(manifoldPointObjectId, lateralFrictionDir2);
return lateralFrictionDir2;
}
private native void getLateralFrictionDir2(long manifoldPointObjectId, Vector3f lateralFrictionDir2);
public boolean isLateralFrictionInitialized() {
return isLateralFrictionInitialized(manifoldPointObjectId);
}
private native boolean isLateralFrictionInitialized(long manifoldPointObjectId);
public int getLifeTime() {
return getLifeTime(manifoldPointObjectId);
}
private native int getLifeTime(long manifoldPointObjectId);
public Vector3f getLocalPointA() {
return getLocalPointA(new Vector3f());
}
public Vector3f getLocalPointA(Vector3f localPointA) {
getLocalPointA(manifoldPointObjectId, localPointA);
return localPointA;
}
private native void getLocalPointA(long manifoldPointObjectId, Vector3f localPointA);
public Vector3f getLocalPointB() {
return getLocalPointB(new Vector3f());
}
public Vector3f getLocalPointB(Vector3f localPointB) {
getLocalPointB(manifoldPointObjectId, localPointB);
return localPointB;
}
private native void getLocalPointB(long manifoldPointObjectId, Vector3f localPointB);
public Vector3f getNormalWorldOnB() {
return getNormalWorldOnB(new Vector3f());
}
public Vector3f getNormalWorldOnB(Vector3f normalWorldOnB) {
getNormalWorldOnB(manifoldPointObjectId, normalWorldOnB);
return normalWorldOnB;
}
private native void getNormalWorldOnB(long manifoldPointObjectId, Vector3f normalWorldOnB);
public int getPartId0() {
return getPartId0(manifoldPointObjectId);
}
private native int getPartId0(long manifoldPointObjectId);
public int getPartId1() {
return getPartId1(manifoldPointObjectId);
}
private native int getPartId1(long manifoldPointObjectId);
public Vector3f getPositionWorldOnA() {
return getPositionWorldOnA(new Vector3f());
}
public Vector3f getPositionWorldOnA(Vector3f positionWorldOnA) {
getPositionWorldOnA(positionWorldOnA);
return positionWorldOnA;
}
private native void getPositionWorldOnA(long manifoldPointObjectId, Vector3f positionWorldOnA);
public Vector3f getPositionWorldOnB() {
return getPositionWorldOnB(new Vector3f());
}
public Vector3f getPositionWorldOnB(Vector3f positionWorldOnB) {
getPositionWorldOnB(manifoldPointObjectId, positionWorldOnB);
return positionWorldOnB;
}
private native void getPositionWorldOnB(long manifoldPointObjectId, Vector3f positionWorldOnB);
// public Object getUserPersistentData() {
// return userPersistentData;
// }
}
| engine/src/bullet/com/jme3/bullet/collision/PhysicsCollisionEvent.java | /*
* Copyright (c) 2009-2010 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.bullet.collision;
import com.jme3.math.Vector3f;
import com.jme3.scene.Spatial;
import java.util.EventObject;
/**
* A CollisionEvent stores all information about a collision in the PhysicsWorld.
* Do not store this Object, as it will be reused after the collision() method has been called.
* Get/reference all data you need in the collide method.
* @author normenhansen
*/
public class PhysicsCollisionEvent extends EventObject {
public static final int TYPE_ADDED = 0;
public static final int TYPE_PROCESSED = 1;
public static final int TYPE_DESTROYED = 2;
private int type;
private PhysicsCollisionObject nodeA;
private PhysicsCollisionObject nodeB;
private long manifoldPointObjectId = 0;
public PhysicsCollisionEvent(int type, PhysicsCollisionObject nodeA, PhysicsCollisionObject nodeB, long manifoldPointObjectId) {
super(nodeA);
this.manifoldPointObjectId = manifoldPointObjectId;
}
/**
* used by event factory, called when event is destroyed
*/
public void clean() {
source = null;
this.type = 0;
this.nodeA = null;
this.nodeB = null;
this.manifoldPointObjectId = 0;
}
/**
* used by event factory, called when event reused
*/
public void refactor(int type, PhysicsCollisionObject source, PhysicsCollisionObject nodeB, long manifoldPointObjectId) {
this.source = source;
this.type = type;
this.nodeA = source;
this.nodeB = nodeB;
this.manifoldPointObjectId = manifoldPointObjectId;
}
public int getType() {
return type;
}
/**
* @return A Spatial if the UserObject of the PhysicsCollisionObject is a Spatial
*/
public Spatial getNodeA() {
if (nodeA.getUserObject() instanceof Spatial) {
return (Spatial) nodeA.getUserObject();
}
return null;
}
/**
* @return A Spatial if the UserObject of the PhysicsCollisionObject is a Spatial
*/
public Spatial getNodeB() {
if (nodeB.getUserObject() instanceof Spatial) {
return (Spatial) nodeB.getUserObject();
}
return null;
}
public PhysicsCollisionObject getObjectA() {
return nodeA;
}
public PhysicsCollisionObject getObjectB() {
return nodeB;
}
public float getAppliedImpulse() {
return getAppliedImpulse(manifoldPointObjectId);
}
private native float getAppliedImpulse(long manifoldPointObjectId);
public float getAppliedImpulseLateral1() {
return getAppliedImpulseLateral1(manifoldPointObjectId);
}
private native float getAppliedImpulseLateral1(long manifoldPointObjectId);
public float getAppliedImpulseLateral2() {
return getAppliedImpulseLateral2(manifoldPointObjectId);
}
private native float getAppliedImpulseLateral2(long manifoldPointObjectId);
public float getCombinedFriction() {
return getCombinedFriction(manifoldPointObjectId);
}
private native float getCombinedFriction(long manifoldPointObjectId);
public float getCombinedRestitution() {
return getCombinedRestitution(manifoldPointObjectId);
}
private native float getCombinedRestitution(long manifoldPointObjectId);
public float getDistance1() {
return getDistance1(manifoldPointObjectId);
}
private native float getDistance1(long manifoldPointObjectId);
public int getIndex0() {
return getIndex0(manifoldPointObjectId);
}
private native int getIndex0(long manifoldPointObjectId);
public int getIndex1() {
return getIndex1(manifoldPointObjectId);
}
private native int getIndex1(long manifoldPointObjectId);
public Vector3f getLateralFrictionDir1(Vector3f lateralFrictionDir1) {
getLateralFrictionDir1(manifoldPointObjectId, lateralFrictionDir1);
return lateralFrictionDir1;
}
private native void getLateralFrictionDir1(long manifoldPointObjectId, Vector3f lateralFrictionDir1);
public Vector3f getLateralFrictionDir2(Vector3f lateralFrictionDir2) {
getLateralFrictionDir2(manifoldPointObjectId, lateralFrictionDir2);
return lateralFrictionDir2;
}
private native void getLateralFrictionDir2(long manifoldPointObjectId, Vector3f lateralFrictionDir2);
public boolean isLateralFrictionInitialized() {
return isLateralFrictionInitialized(manifoldPointObjectId);
}
private native boolean isLateralFrictionInitialized(long manifoldPointObjectId);
public int getLifeTime() {
return getLifeTime(manifoldPointObjectId);
}
private native int getLifeTime(long manifoldPointObjectId);
public Vector3f getLocalPointA(Vector3f localPointA) {
getLocalPointA(manifoldPointObjectId, localPointA);
return localPointA;
}
private native void getLocalPointA(long manifoldPointObjectId, Vector3f localPointA);
public Vector3f getLocalPointB(Vector3f localPointB) {
getLocalPointB(manifoldPointObjectId, localPointB);
return localPointB;
}
private native void getLocalPointB(long manifoldPointObjectId, Vector3f localPointB);
public Vector3f getNormalWorldOnB(Vector3f normalWorldOnB) {
getNormalWorldOnB(manifoldPointObjectId, normalWorldOnB);
return normalWorldOnB;
}
private native void getNormalWorldOnB(long manifoldPointObjectId, Vector3f normalWorldOnB);
public int getPartId0() {
return getPartId0(manifoldPointObjectId);
}
private native int getPartId0(long manifoldPointObjectId);
public int getPartId1() {
return getPartId1(manifoldPointObjectId);
}
private native int getPartId1(long manifoldPointObjectId);
public Vector3f getPositionWorldOnA(Vector3f positionWorldOnA) {
getPositionWorldOnA(positionWorldOnA);
return positionWorldOnA;
}
private native void getPositionWorldOnA(long manifoldPointObjectId, Vector3f positionWorldOnA);
public Vector3f getPositionWorldOnB(Vector3f positionWorldOnB) {
getPositionWorldOnB(manifoldPointObjectId, positionWorldOnB);
return positionWorldOnB;
}
private native void getPositionWorldOnB(long manifoldPointObjectId, Vector3f positionWorldOnB);
// public Object getUserPersistentData() {
// return userPersistentData;
// }
}
| - bullet native: fix changed signature for PhysicsCollisionEvent methods
git-svn-id: 5afc437a751a4ff2ced778146f5faadda0b504ab@8403 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
| engine/src/bullet/com/jme3/bullet/collision/PhysicsCollisionEvent.java | - bullet native: fix changed signature for PhysicsCollisionEvent methods |
|
Java | bsd-3-clause | 63f2f8ae1b6a770eaf83c004b8593611b46ac8f8 | 0 | CBIIT/caaers,NCIP/caaers,NCIP/caaers,CBIIT/caaers,NCIP/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers | package gov.nih.nci.cabig.caaers.domain;
import gov.nih.nci.cabig.ctms.domain.AbstractImmutableDomainObject;
import javax.persistence.Entity;
import javax.persistence.ManyToOne;
import javax.persistence.Column;
import javax.persistence.Transient;
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import java.util.List;
import java.util.Arrays;
import java.util.Collections;
/**
* This class represents the CtcTerm domain object associated with the Adverse event report.
*
* @author Rhett Sutphin
*/
@Entity
public class CtcTerm extends AbstractImmutableDomainObject {
private String term;
private String select;
private String ctepTerm;
private String ctepCode;
private CtcCategory category;
private boolean otherRequired;
private List<CtcGrade> contextualGrades;
// //// LOGIC
@Transient
public String getFullName() {
return getTerm();
}
@Transient
public String getFullNameWithMedDRA() {
if (select == null) {
return getTerm() + " - " + getCtepCode();
} else {
return getTerm() + " - " + getSelect() + " - " + getCtepCode();
}
}
@Transient
public List<CodedGrade> getGrades() {
if (getContextualGrades() == null || getContextualGrades().size() == 0) {
return Arrays.<CodedGrade> asList(Grade.values());
} else {
// this rigamarole is just to change the generic type without creating a new list
return Collections.<CodedGrade> unmodifiableList(getContextualGrades());
}
}
// //// BEAN PROPERTIES
public String getTerm() {
return term;
}
public void setTerm(String term) {
this.term = term;
}
@Column(name = "select_ae")
public String getSelect() {
return select;
}
public void setSelect(String select) {
this.select = select;
}
public String getCtepTerm() {
return ctepTerm;
}
public void setCtepTerm(String ctepTerm) {
this.ctepTerm = ctepTerm;
}
public String getCtepCode() {
return ctepCode;
}
public void setCtepCode(String ctepCode) {
this.ctepCode = ctepCode;
}
@ManyToOne
public CtcCategory getCategory() {
return category;
}
public void setCategory(CtcCategory category) {
this.category = category;
}
public boolean isOtherRequired() {
return otherRequired;
}
public void setOtherRequired(boolean otherRequired) {
this.otherRequired = otherRequired;
}
@OneToMany(mappedBy = "term")
@Cascade(value={CascadeType.ALL})
@OrderBy("grade")
public List<CtcGrade> getContextualGrades() {
return contextualGrades;
}
public void setContextualGrades(List<CtcGrade> contextualGrades) {
this.contextualGrades = contextualGrades;
}
}
| projects/core/src/main/java/gov/nih/nci/cabig/caaers/domain/CtcTerm.java | package gov.nih.nci.cabig.caaers.domain;
import gov.nih.nci.cabig.ctms.domain.AbstractImmutableDomainObject;
import javax.persistence.Entity;
import javax.persistence.ManyToOne;
import javax.persistence.Column;
import javax.persistence.Transient;
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import java.util.List;
import java.util.Arrays;
import java.util.Collections;
/**
* This class represents the CtcTerm domain object associated with the Adverse event report.
*
* @author Rhett Sutphin
*/
@Entity
public class CtcTerm extends AbstractImmutableDomainObject {
private String term;
private String select;
private String ctepTerm;
private String ctepCode;
private CtcCategory category;
private boolean otherRequired;
private List<CtcGrade> contextualGrades;
// //// LOGIC
@Transient
public String getFullName() {
if (select == null) {
return getTerm();
} else {
return getTerm() + " - " + getSelect();
}
}
@Transient
public String getFullNameWithMedDRA() {
if (select == null) {
return getTerm() + " - " + getCtepCode();
} else {
return getTerm() + " - " + getSelect() + " - " + getCtepCode();
}
}
@Transient
public List<CodedGrade> getGrades() {
if (getContextualGrades() == null || getContextualGrades().size() == 0) {
return Arrays.<CodedGrade> asList(Grade.values());
} else {
// this rigamarole is just to change the generic type without creating a new list
return Collections.<CodedGrade> unmodifiableList(getContextualGrades());
}
}
// //// BEAN PROPERTIES
public String getTerm() {
return term;
}
public void setTerm(String term) {
this.term = term;
}
@Column(name = "select_ae")
public String getSelect() {
return select;
}
public void setSelect(String select) {
this.select = select;
}
public String getCtepTerm() {
return ctepTerm;
}
public void setCtepTerm(String ctepTerm) {
this.ctepTerm = ctepTerm;
}
public String getCtepCode() {
return ctepCode;
}
public void setCtepCode(String ctepCode) {
this.ctepCode = ctepCode;
}
@ManyToOne
public CtcCategory getCategory() {
return category;
}
public void setCategory(CtcCategory category) {
this.category = category;
}
public boolean isOtherRequired() {
return otherRequired;
}
public void setOtherRequired(boolean otherRequired) {
this.otherRequired = otherRequired;
}
@OneToMany(mappedBy = "term")
@Cascade(value={CascadeType.ALL})
@OrderBy("grade")
public List<CtcGrade> getContextualGrades() {
return contextualGrades;
}
public void setContextualGrades(List<CtcGrade> contextualGrades) {
this.contextualGrades = contextualGrades;
}
}
| CAAERS-846
SVN-Revision: 6675
| projects/core/src/main/java/gov/nih/nci/cabig/caaers/domain/CtcTerm.java | CAAERS-846 |
|
Java | mit | a498eeec9971a562c6eb5664dcbbc5fcc5a2e1da | 0 | CS2103JAN2017-W14-B3/main,CS2103JAN2017-W14-B3/main | src/main/java/seedu/doit/ui/BrowserPanel.java | package seedu.doit.ui;
import javafx.event.Event;
import javafx.fxml.FXML;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.Region;
import javafx.scene.web.WebView;
import seedu.doit.commons.util.FxViewUtil;
import seedu.doit.model.task.ReadOnlyTask;
/**
* The Browser Panel of the App.
*/
public class BrowserPanel extends UiPart<Region> {
private static final String FXML = "BrowserPanel.fxml";
@FXML
private WebView browser;
/**
* @param placeholder The AnchorPane where the BrowserPanel must be inserted
*/
public BrowserPanel(AnchorPane placeholder) {
super(FXML);
placeholder.setOnKeyPressed(Event::consume); // To prevent triggering events for typing inside the
// loaded Web page.
FxViewUtil.applyAnchorBoundaryParameters(browser, 0.0, 0.0, 0.0, 0.0);
placeholder.getChildren().add(browser);
}
public void loadTaskPage(ReadOnlyTask task) {
loadPage("https://www.google.com.sg/#safe=off&q=" + task.getName().fullName.replaceAll(" ", "+"));
}
public void loadPage(String url) {
browser.getEngine().load(url);
}
/**
* Frees resources allocated to the browser.
*/
public void freeResources() {
browser = null;
}
}
| Delete browser panel
| src/main/java/seedu/doit/ui/BrowserPanel.java | Delete browser panel |
||
Java | mit | 3735b22a4087b0955e6d3773bb29f82fa0cdc934 | 0 | dwslab/RoCA | package de.dwslab.risk.gui.jgraphx.actions;
import static de.dwslab.ai.util.Utils.createTempFile;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static org.apache.logging.log4j.Level.INFO;
import java.awt.event.ActionEvent;
import java.io.File;
import java.nio.file.Files;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import javax.swing.AbstractAction;
import javax.swing.JOptionPane;
import javax.swing.ProgressMonitor;
import javax.swing.SwingUtilities;
import javax.swing.SwingWorker;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.googlecode.rockit.app.RockItAPI;
import com.googlecode.rockit.app.result.RockItResult;
import de.dwslab.ai.riskmanagement.existential.ExistentialApi;
import de.dwslab.risk.gui.RoCA;
import de.dwslab.risk.gui.exception.RoCAException;
import de.dwslab.risk.gui.model.Entity;
import de.dwslab.risk.gui.model.Grounding;
import de.dwslab.risk.gui.model.Predicate;
public class RootCauseAnalysisAction extends AbstractAction {
private static final long serialVersionUID = -8773333133469858955L;
private static final Logger logger = LogManager.getLogger();
private RoCA roca;
public RootCauseAnalysisAction(RoCA roca) {
this.roca = roca;
}
@Override
public void actionPerformed(ActionEvent e) {
RootCauseAnalysisWorker worker = new RootCauseAnalysisWorker(roca);
worker.execute();
}
private static class RootCauseAnalysisWorker extends SwingWorker<List<Grounding>, Integer> {
private final RoCA roca;
private final ProgressMonitor monitor;
private Exception e;
public RootCauseAnalysisWorker(RoCA roca) {
this.roca = roca;
monitor = new ProgressMonitor(SwingUtilities.getWindowAncestor(roca),
"Running Root Cause Analysis...", "Please wait...", 0, 100);
monitor.setMillisToPopup(0);
monitor.setMillisToDecideToPopup(0);
}
@Override
protected List<Grounding> doInBackground() throws Exception {
try {
// Export the background knowledge to temporary files
logger.log(INFO, "exporting to temporary file");
monitor.setProgress(1);
monitor.setNote("Generating temporary MLN files...");
File mlnFile = createTempFile("mln-", ".mln");
File evidenceFile = createTempFile("evidence-", ".db");
roca.getBackgroundKnowledge().exportAsMln(mlnFile.toPath(), evidenceFile.toPath());
monitor.setProgress(10);
// Extend the MLN for abductive reasoning
logger.log(INFO, "extending MLN");
monitor.setNote("Extending MLN for root cause analysis...");
ExistentialApi api = new ExistentialApi();
File mlnExtFile = createTempFile("existential-mln-", ".mln");
api.existentialApi(mlnFile.getAbsolutePath(), evidenceFile.getAbsolutePath(),
mlnExtFile.getAbsolutePath(),
createTempFile("existential-evidence-", ".db").getAbsolutePath());
monitor.setProgress(20);
// Run RockIt
logger.log(INFO, "running map inference");
monitor.setNote("Executing MAP inference...");
RockItAPI rockit = new RockItAPI();
List<RockItResult> mapState = rockit.doMapState(mlnExtFile.getAbsolutePath(),
evidenceFile.getAbsolutePath());
monitor.setProgress(90);
Set<String> provided = Files.lines(evidenceFile.toPath())
.map(String::trim)
.collect(toSet());
// Process the result
logger.log(INFO, "processing result");
monitor.setNote("Processing MAP result...");
List<Grounding> rootCause = mapState
.stream()
.filter(m -> !provided.contains(m.getStatement()))
.map(m -> {
Predicate p = new Predicate(m.getPredicate());
List<Entity> e = m
.getObjects()
.stream()
.map(s -> Entity.get(
Integer.valueOf(s.substring(s.lastIndexOf('_') + 1))))
.collect(toList());
return new Grounding(p, e);
}).collect(toList());
monitor.setProgress(100);
Collections.sort(rootCause, (o1, o2) -> o1.toString().compareTo(o2.toString()));
return rootCause;
} catch (Exception e) {
this.e = e;
throw e;
}
}
@Override
protected void done() {
monitor.close();
if (e != null) {
throw new RoCAException("Exception during root cause analysis", e);
} else {
try {
List<Grounding> rootCause = get();
StringBuilder message = new StringBuilder();
message.append("Proposed root cause:\n");
rootCause.forEach(r -> {
message.append('\t');
message.append(r);
message.append('\n');
});
JOptionPane.showMessageDialog(roca, message);
} catch (Exception e) {
throw new RoCAException("Should Not Happen™", e);
}
}
}
}
}
| src/main/java/de/dwslab/risk/gui/jgraphx/actions/RootCauseAnalysisAction.java | package de.dwslab.risk.gui.jgraphx.actions;
import static de.dwslab.ai.util.Utils.createTempFile;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static org.apache.logging.log4j.Level.INFO;
import java.awt.event.ActionEvent;
import java.io.File;
import java.util.List;
import java.util.Set;
import javax.swing.AbstractAction;
import javax.swing.JOptionPane;
import javax.swing.ProgressMonitor;
import javax.swing.SwingUtilities;
import javax.swing.SwingWorker;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.googlecode.rockit.app.RockItAPI;
import com.googlecode.rockit.app.result.RockItResult;
import de.dwslab.ai.riskmanagement.existential.ExistentialApi;
import de.dwslab.risk.gui.RoCA;
import de.dwslab.risk.gui.exception.RoCAException;
import de.dwslab.risk.gui.model.Entity;
import de.dwslab.risk.gui.model.Grounding;
import de.dwslab.risk.gui.model.Predicate;
public class RootCauseAnalysisAction extends AbstractAction {
private static final long serialVersionUID = -8773333133469858955L;
private static final Logger logger = LogManager.getLogger();
private RoCA roca;
public RootCauseAnalysisAction(RoCA roca) {
this.roca = roca;
}
@Override
public void actionPerformed(ActionEvent e) {
RootCauseAnalysisWorker worker = new RootCauseAnalysisWorker(roca);
worker.execute();
}
private static class RootCauseAnalysisWorker extends SwingWorker<Set<Grounding>, Integer> {
private final RoCA roca;
private final ProgressMonitor monitor;
private Exception e;
public RootCauseAnalysisWorker(RoCA roca) {
this.roca = roca;
monitor = new ProgressMonitor(
SwingUtilities.getWindowAncestor(roca),
"Running Root Cause Analysis...", "Please wait...", 0, 100);
monitor.setMillisToPopup(0);
monitor.setMillisToDecideToPopup(0);
}
@Override
protected Set<Grounding> doInBackground() throws Exception {
try {
// Export the background knowledge to temporary files
logger.log(INFO, "exporting to temporary file");
monitor.setProgress(1);
monitor.setNote("Generating temporary MLN files...");
File mlnFile = createTempFile("mln-", ".mln");
File evidenceFile = createTempFile("evidence-", ".db");
roca.getBackgroundKnowledge().exportAsMln(mlnFile.toPath(), evidenceFile.toPath());
monitor.setProgress(10);
// Extend the MLN for abductive reasoning
logger.log(INFO, "extending MLN");
monitor.setNote("Extending MLN for root cause analysis...");
ExistentialApi api = new ExistentialApi();
File mlnExtFile = createTempFile("existential-mln-", ".mln");
api.existentialApi(mlnFile.getAbsolutePath(), evidenceFile.getAbsolutePath(),
mlnExtFile.getAbsolutePath(),
createTempFile("existential-evidence-", ".db").getAbsolutePath());
monitor.setProgress(20);
// Run RockIt
logger.log(INFO, "running map inference");
monitor.setNote("Executing MAP inference...");
RockItAPI rockit = new RockItAPI();
List<RockItResult> mapState = rockit.doMapState(mlnExtFile.getAbsolutePath(),
evidenceFile.getAbsolutePath());
monitor.setProgress(90);
// Process the result
logger.log(INFO, "processing result");
monitor.setNote("Processing MAP result...");
Set<Grounding> rootCause = mapState
.stream()
.filter(m -> "hasRisk".equals(m.getPredicate()))
.map(m -> {
Predicate p = new Predicate(m.getPredicate());
List<Entity> e = m
.getObjects()
.stream()
.map(s -> Entity.get(Integer.valueOf(s.substring(s
.lastIndexOf('_') + 1)))
).collect(toList());
return new Grounding(p, e);
}).collect(toSet());
monitor.setProgress(100);
return rootCause;
} catch (Exception e) {
this.e = e;
throw e;
}
}
@Override
protected void done() {
monitor.close();
if (e != null) {
throw new RoCAException("Exception during root cause analysis", e);
} else {
try {
Set<Grounding> rootCause = get();
JOptionPane.showMessageDialog(roca, "Proposed root cause:\n" + rootCause);
} catch (Exception e) {
throw new RoCAException("Should Not Happen™", e);
}
}
}
}
}
| Format the displayed root cause a bit more readable. | src/main/java/de/dwslab/risk/gui/jgraphx/actions/RootCauseAnalysisAction.java | Format the displayed root cause a bit more readable. |
|
Java | mit | 630df80fc195960c93e8256c431d8dd5bbebd1f7 | 0 | ppati000/visualDFA | package gui.visualgraph;
import com.mxgraph.layout.hierarchical.mxHierarchicalLayout;
import com.mxgraph.model.mxCell;
import com.mxgraph.swing.mxGraphComponent;
import com.mxgraph.view.mxGraph;
import dfa.framework.AbstractBlock;
import dfa.framework.BlockState;
import dfa.framework.DFAExecution;
import gui.*;
import dfa.framework.AnalysisState;
import javax.imageio.ImageIO;
import javax.swing.*;
import javax.swing.border.LineBorder;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseWheelEvent;
import java.awt.event.MouseWheelListener;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.List;
/**
* @author Patrick Petrovic
*
* Panel used to display the visual graph.
*/
public class VisualGraphPanel extends JPanel {
private List<UIBasicBlock> basicBlocks;
private List<UIEdge> edges;
private mxGraphComponent graphComponent;
private JToggleButton jumpToAction;
private JButton graphExport;
private mxGraph graph;
private Frame parentFrame = null;
private Map<AbstractBlock, UIAbstractBlock> blockMap;
private GraphExporter graphExporter;
private UIAbstractBlock selectedBlock;
private boolean hasRendered = false;
private boolean isExportInProgress = false;
private final String outputPath = System.getProperty("user.home") + File.separator + "visualDFA";
private final int fakeProgressBarMaxValue = 42;
/**
* Creates a new {@code VisualGraphPanel}.
*/
public VisualGraphPanel() {
this.basicBlocks = new ArrayList<>();
this.edges = new ArrayList<>();
this.blockMap = new HashMap<>();
setLayout(new BorderLayout());
jumpToAction = new JToggleButton("Jump to Action");
graphExport = new JButton("Export Graph");
decorateGraphButton(jumpToAction, false);
decorateGraphButton(graphExport, true);
jumpToAction.setIcon(IconLoader.loadIcon("icons/map-marker.png", 0.2));
jumpToAction.setPreferredSize(new Dimension(145, 40));
graphExport.setIcon(IconLoader.loadIcon("icons/share-symbol.png", 0.2));
graphExport.setPreferredSize(new Dimension(130, 40));
graphExporter = new GraphExporter();
JPanel buttonGroup = new JPanel();
buttonGroup.setLayout(new FlowLayout(FlowLayout.RIGHT));
buttonGroup.setComponentOrientation(ComponentOrientation.RIGHT_TO_LEFT);
buttonGroup.setSize(getWidth(), 60);
buttonGroup.add(graphExport);
buttonGroup.add(jumpToAction);
buttonGroup.setBackground(Colors.WHITE_BACKGROUND.getColor());
add(buttonGroup, BorderLayout.NORTH);
initialGraphState();
}
/**
* Inserts a given {@code UIBasicBlock} which will be rendered when {@code renderGraph()} is called.
*
* @param block
* the block to be added
*/
public void insertBasicBlock(UIBasicBlock block) {
basicBlocks.add(block);
}
protected void setBlockMap(Map<AbstractBlock, UIAbstractBlock> map) {
this.blockMap = map;
}
/**
* Inserts a given {@code UIEdge} which will be rendered when {@code renderGraph()} is called.
*
* @param edge
* the edge to be added
*/
public void insertEdge(UIEdge edge) {
edges.add(edge);
}
/**
* Renders all previously inserted blocks and edges, and invokes the auto-layouter if first render.
*
* @param dfa
* the {@code DFAExecution} that should be used to render this graph
*/
public void renderGraph(final DFAExecution dfa) {
AnalysisState analysisState = dfa.getCurrentAnalysisState();
graph.getModel().beginUpdate();
for (UIBasicBlock block : basicBlocks) {
block.render(analysisState);
}
for (UIEdge edge : edges) {
edge.render(analysisState);
}
// Apply layout before rendering child blocks, so that the layouter doesn't mess with them.
if (!hasRendered) {
autoLayoutAndShowGraph();
hasRendered = true;
graphExport.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
GraphExportBox exportBox = new GraphExportBox(parentFrame);
if (exportBox.getOption() == Option.YES_OPTION) {
graphExport.setEnabled(false);
float scale = exportBox.getQuality().ordinal() + 1;
final long timestamp = new Date().getTime();
if (exportBox.isBatchExport()) {
new GraphBatchExportThread(graphExporter, dfa, scale, exportBox.includeLineStates(), new GraphExportProgressView(outputPath) {
private int index = 0;
@Override
public void onImageExported(BufferedImage image) {
try {
saveImage(image, timestamp, index);
index++;
} catch (IOException ex) {
showExportErrorBox();
}
}
@Override
public void done() {
graphExport.setEnabled(true);
super.done();
}
}).start();
} else {
BlockState state = selectedBlock == null ? null : dfa.getCurrentAnalysisState().getBlockState(selectedBlock.getDFABlock());
final GraphExportProgressView view = new GraphExportProgressView(outputPath);
// Fake a progress bar to the user if no batch export, so it is not too fast.
new Thread() {
@Override
public void run() {
view.setMaxStep(fakeProgressBarMaxValue);
try {
Thread.sleep(100);
for (int i = 0; i < fakeProgressBarMaxValue; i++) {
Thread.sleep(15);
view.setExportStep(i);
}
graphExport.setEnabled(true);
} catch (InterruptedException ex) {
// Ignored.
}
view.done();
}
}.start();
try {
saveImage(graphExporter.exportCurrentGraph(graph, scale, selectedBlock, state), timestamp, 0);
} catch (IOException ex) {
showExportErrorBox();
}
}
}
}
});
}
for (UIBasicBlock block : basicBlocks) {
block.renderChildren(analysisState);
}
graph.getModel().endUpdate();
if (jumpToAction.isSelected() && blockMap != null) {
graphComponent.getGraph().clearSelection();
AnalysisState currentState = dfa.getCurrentAnalysisState();
AbstractBlock currentBlock;
if (currentState.getCurrentElementaryBlockIndex() == -1) {
currentBlock = currentState.getCurrentBasicBlock();
} else {
currentBlock = currentState.getCurrentElementaryBlock();
}
if (currentBlock != null) {
mxCell currentMxCell = blockMap.get(currentBlock).getMxCell();
graph.getSelectionModel().addCell(currentMxCell);
graphComponent.scrollCellToVisible(currentMxCell);
} else {
graph.getSelectionModel().clear();
}
}
}
/**
* Makes the currently shown graph invisible and deletes it.
*/
public void deleteGraph() {
initialGraphState();
repaint();
}
/**
* Increases the graph's zoom level.
*/
public void zoomIn() {
graphComponent.zoomIn();
}
/**
* Decreases the graph's zoom level.
*/
public void zoomOut() {
graphComponent.zoomOut();
}
/**
* Sets the {@code VisualGraphPanel}'s activation state
*
* @param activated
* Iff {@code true}, user interaction is allowed.
*/
public void setActivated(boolean activated) {
jumpToAction.setEnabled(activated);
graphExport.setEnabled(activated);
}
/**
* Returns a list of {@code BasicBlock}s in the graph.
*
* @return list of {@code BasicBlock}s
*/
public List<UIBasicBlock> getBasicBlocks() {
return basicBlocks;
}
/**
* Returns a list of {@code UIEdge}s in the graph.
*
* @return list of {@code UIEdge}s
*/
public List<UIEdge> getEdges() {
return edges;
}
/**
* Returns this panel's {@code mxGraph}.
*
* @return {@code mxGraph}
*/
public mxGraph getMxGraph() {
return graph;
}
/**
* Returns this panel's {@code mxGraphComponent}.
*
* @return {@code mxGraphComponent}
*/
public mxGraphComponent getGraphComponent() {
return graphComponent;
}
/**
* Sets the parent frame (used for modals)
*
* @param frame
* the parent frame
*/
public void setParentFrame(Frame frame) {
this.parentFrame = frame;
}
/**
* Enables or disables Jump to Action
*
* @param enabled
* if it should be enabled or disabled
*/
public void setJumpToAction(boolean enabled) {
jumpToAction.setSelected(enabled);
}
/**
* Tells whether Jump to Action is enabled.
*
* @return true iff Jump to Action enabled
*/
public boolean isJumpToActionEnabled() {
return jumpToAction.isSelected();
}
/**
* Used by {@code GraphUIController} to set the panel's selected {@code UIAbstractBlock} for later use.
*
* @param block
* the block that was determined as the current one by the {@code GraphUIController}.
*/
public void setSelectedBlock(UIAbstractBlock block) {
this.selectedBlock = block;
}
/**
* Used by {@code GraphUIController} to get the selected block when updating the {@code StatePanelOpen}.
*
* @return the currently selected block
*/
public UIAbstractBlock getSelectedBlock() {
return this.selectedBlock;
}
private void saveImage(BufferedImage image, long timestamp, int index) throws IOException {
File outputDir = new File(outputPath);
if (!new File(outputPath).exists()) {
boolean result = outputDir.mkdir();
if (!result) {
throw new IOException();
}
}
File outputFile = new File(outputPath + File.separator + "export_" + timestamp + "_" + index + ".png");
ImageIO.write(image, "PNG", outputFile);
}
private void showExportErrorBox() {
new MessageBox(parentFrame, "Graph Export Failed", "An error occured while saving your image(s). \n" +
"Please ensure " + outputPath + " is a writable directory.");
}
private void autoLayoutAndShowGraph() {
new mxHierarchicalLayout(graph).execute(graph.getDefaultParent());
graphComponent.setVisible(true);
graphComponent.doLayout();
add(graphComponent, BorderLayout.CENTER);
revalidate();
}
private void initialGraphState() {
basicBlocks = new ArrayList<>();
edges = new ArrayList<>();
graph = new RestrictedMxGraph();
hasRendered = false;
if (graphComponent != null) {
remove(graphComponent);
revalidate();
}
graphComponent = new mxGraphComponent(graph);
graphComponent.setBorder(new LineBorder(new Color(188, 230, 254)));
graphComponent.getViewport().setBackground(new Color(251, 253, 255));
graphComponent.addMouseWheelListener(new MouseWheelListener() {
// On macOS, scrolling is inverted by default, so we invert zooming too.
private final boolean isMac = true;
@Override
public void mouseWheelMoved(MouseWheelEvent e) {
if (e.getModifiers() == MouseWheelEvent.ALT_MASK) {
if (e.getWheelRotation() > 0 ^ !isMac) { // XOR
zoomIn();
} else if (e.getWheelRotation() < 0 ^ !isMac) { // XOR
zoomOut();
}
}
}
});
}
private void decorateGraphButton(final AbstractButton button, final boolean highlightClickedOnly) {
button.setOpaque(true);
button.setBackground(Colors.LIGHT_TEXT.getColor());
button.setForeground(Colors.DARK_TEXT.getColor());
button.setBorder(new LineBorder(Colors.LIGHT_BACKGROUND.getColor(), 2, true));
// Remove default "Space Bar = click" behavior, so it doesn't interfere with keyboard shortcuts.
button.getInputMap().put(KeyStroke.getKeyStroke("SPACE"), "none");
final ButtonModel startModel = button.getModel();
startModel.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
if (highlightClickedOnly && button.getModel().isPressed() || button.getModel().isSelected()) {
button.setBackground(Colors.LIGHT_BACKGROUND.getColor());
} else {
button.setBackground(Colors.WHITE_BACKGROUND.getColor());
}
}
});
}
}
| Implementierung/visual-dfa/src/main/java/gui/visualgraph/VisualGraphPanel.java | package gui.visualgraph;
import com.mxgraph.layout.hierarchical.mxHierarchicalLayout;
import com.mxgraph.model.mxCell;
import com.mxgraph.swing.mxGraphComponent;
import com.mxgraph.view.mxGraph;
import dfa.framework.AbstractBlock;
import dfa.framework.BlockState;
import dfa.framework.DFAExecution;
import gui.*;
import dfa.framework.AnalysisState;
import javax.imageio.ImageIO;
import javax.swing.*;
import javax.swing.border.LineBorder;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseWheelEvent;
import java.awt.event.MouseWheelListener;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.List;
/**
* @author Patrick Petrovic
*
* Panel used to display the visual graph.
*/
public class VisualGraphPanel extends JPanel {
private List<UIBasicBlock> basicBlocks;
private List<UIEdge> edges;
private mxGraphComponent graphComponent;
private JToggleButton jumpToAction;
private JButton graphExport;
private mxGraph graph;
private Frame parentFrame = null;
private Map<AbstractBlock, UIAbstractBlock> blockMap;
private GraphExporter graphExporter;
private UIAbstractBlock selectedBlock;
private boolean hasRendered = false;
private boolean isExportInProgress = false;
private final String outputPath = System.getProperty("user.home") + File.separator + "visualDFA";
private final int fakeProgressBarMaxValue = 42;
/**
* Creates a new {@code VisualGraphPanel}.
*/
public VisualGraphPanel() {
this.basicBlocks = new ArrayList<>();
this.edges = new ArrayList<>();
this.blockMap = new HashMap<>();
setLayout(new BorderLayout());
jumpToAction = new JToggleButton("Jump to Action");
graphExport = new JButton("Export Graph");
decorateGraphButton(jumpToAction, false);
decorateGraphButton(graphExport, true);
jumpToAction.setIcon(IconLoader.loadIcon("icons/map-marker.png", 0.2));
jumpToAction.setPreferredSize(new Dimension(145, 40));
graphExport.setIcon(IconLoader.loadIcon("icons/share-symbol.png", 0.2));
graphExport.setPreferredSize(new Dimension(130, 40));
graphExporter = new GraphExporter();
JPanel buttonGroup = new JPanel();
buttonGroup.setLayout(new FlowLayout(FlowLayout.RIGHT));
buttonGroup.setComponentOrientation(ComponentOrientation.RIGHT_TO_LEFT);
buttonGroup.setSize(getWidth(), 60);
buttonGroup.add(graphExport);
buttonGroup.add(jumpToAction);
buttonGroup.setBackground(Colors.WHITE_BACKGROUND.getColor());
add(buttonGroup, BorderLayout.NORTH);
initialGraphState();
}
/**
* Inserts a given {@code UIBasicBlock} which will be rendered when {@code renderGraph()} is called.
*
* @param block
* the block to be added
*/
public void insertBasicBlock(UIBasicBlock block) {
basicBlocks.add(block);
}
protected void setBlockMap(Map<AbstractBlock, UIAbstractBlock> map) {
this.blockMap = map;
}
/**
* Inserts a given {@code UIEdge} which will be rendered when {@code renderGraph()} is called.
*
* @param edge
* the edge to be added
*/
public void insertEdge(UIEdge edge) {
edges.add(edge);
}
/**
* Renders all previously inserted blocks and edges, and invokes the auto-layouter if first render.
*
* @param dfa
* the {@code DFAExecution} that should be used to render this graph
*/
public void renderGraph(final DFAExecution dfa) {
AnalysisState analysisState = dfa.getCurrentAnalysisState();
graph.getModel().beginUpdate();
for (UIBasicBlock block : basicBlocks) {
block.render(analysisState);
}
for (UIEdge edge : edges) {
edge.render(analysisState);
}
// Apply layout before rendering child blocks, so that the layouter doesn't mess with them.
if (!hasRendered) {
autoLayoutAndShowGraph();
hasRendered = true;
graphExport.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
GraphExportBox exportBox = new GraphExportBox(parentFrame);
if (exportBox.getOption() == Option.YES_OPTION) {
graphExport.setEnabled(false);
float scale = exportBox.getQuality().ordinal() + 1;
final long timestamp = new Date().getTime();
if (exportBox.isBatchExport()) {
new GraphBatchExportThread(graphExporter, dfa, scale, exportBox.includeLineStates(), new GraphExportProgressView(outputPath) {
private int index = 0;
@Override
public void onImageExported(BufferedImage image) {
try {
saveImage(image, timestamp, index);
index++;
} catch (IOException ex) {
showExportErrorBox();
}
}
@Override
public void done() {
graphExport.setEnabled(true);
super.done();
}
}).start();
} else {
BlockState state = selectedBlock == null ? null : dfa.getCurrentAnalysisState().getBlockState(selectedBlock.getDFABlock());
final GraphExportProgressView view = new GraphExportProgressView(outputPath);
// Fake a progress bar to the user if no batch export, so it is not too fast.
new Thread() {
@Override
public void run() {
view.setMaxStep(fakeProgressBarMaxValue);
try {
Thread.sleep(100);
for (int i = 0; i < fakeProgressBarMaxValue; i++) {
Thread.sleep(15);
view.setExportStep(i);
}
graphExport.setEnabled(true);
} catch (InterruptedException ex) {
// Ignored.
}
view.done();
}
}.start();
try {
saveImage(graphExporter.exportCurrentGraph(graph, scale, selectedBlock, state), timestamp, 0);
} catch (IOException ex) {
showExportErrorBox();
}
}
}
}
});
}
for (UIBasicBlock block : basicBlocks) {
block.renderChildren(analysisState);
}
graph.getModel().endUpdate();
if (jumpToAction.isSelected() && blockMap != null) {
graphComponent.getGraph().clearSelection();
AnalysisState currentState = dfa.getCurrentAnalysisState();
AbstractBlock currentBlock;
if (currentState.getCurrentElementaryBlockIndex() == -1) {
currentBlock = currentState.getCurrentBasicBlock();
} else {
currentBlock = currentState.getCurrentElementaryBlock();
}
if (currentBlock != null) {
mxCell currentMxCell = blockMap.get(currentBlock).getMxCell();
graph.getSelectionModel().addCell(currentMxCell);
graphComponent.scrollCellToVisible(currentMxCell);
} else {
graph.getSelectionModel().clear();
}
}
}
/**
* Makes the currently shown graph invisible and deletes it.
*/
public void deleteGraph() {
initialGraphState();
repaint();
}
/**
* Increases the graph's zoom level.
*/
public void zoomIn() {
graphComponent.zoomIn();
}
/**
* Decreases the graph's zoom level.
*/
public void zoomOut() {
graphComponent.zoomOut();
}
/**
* Sets the {@code VisualGraphPanel}'s activation state
*
* @param activated
* Iff {@code true}, user interaction is allowed.
*/
public void setActivated(boolean activated) {
jumpToAction.setEnabled(activated);
graphExport.setEnabled(activated);
}
/**
* Returns a list of {@code BasicBlock}s in the graph.
*
* @return list of {@code BasicBlock}s
*/
public List<UIBasicBlock> getBasicBlocks() {
return basicBlocks;
}
/**
* Returns a list of {@code UIEdge}s in the graph.
*
* @return list of {@code UIEdge}s
*/
public List<UIEdge> getEdges() {
return edges;
}
/**
* Returns this panel's {@code mxGraph}.
*
* @return {@code mxGraph}
*/
public mxGraph getMxGraph() {
return graph;
}
/**
* Returns this panel's {@code mxGraphComponent}.
*
* @return {@code mxGraphComponent}
*/
public mxGraphComponent getGraphComponent() {
return graphComponent;
}
/**
* Sets the parent frame (used for modals)
*
* @param frame
* the parent frame
*/
public void setParentFrame(Frame frame) {
this.parentFrame = frame;
}
/**
* Enables or disables Jump to Action
*
* @param enabled
* if it should be enabled or disabled
*/
public void setJumpToAction(boolean enabled) {
jumpToAction.setSelected(enabled);
}
/**
* Tells whether Jump to Action is enabled.
*
* @return true iff Jump to Action enabled
*/
public boolean isJumpToActionEnabled() {
return jumpToAction.isSelected();
}
/**
* Used by {@code GraphUIController} to set the panel's selected {@code UIAbstractBlock} for later use.
*
* @param block
* the block that was determined as the current one by the {@code GraphUIController}.
*/
public void setSelectedBlock(UIAbstractBlock block) {
this.selectedBlock = block;
}
/**
* Used by {@code GraphUIController} to get the selected block when updating the {@code StatePanelOpen}.
*
* @return the currently selected block
*/
public UIAbstractBlock getSelectedBlock() {
return this.selectedBlock;
}
private void saveImage(BufferedImage image, long timestamp, int index) throws IOException {
File outputDir = new File(outputPath);
if (!new File(outputPath).exists()) {
boolean result = outputDir.mkdir();
if (!result) {
throw new IOException();
}
}
File outputFile = new File(outputPath + File.separator + "export_" + timestamp + "_" + index + ".png");
ImageIO.write(image, "PNG", outputFile);
}
private void showExportErrorBox() {
new MessageBox(parentFrame, "Graph Export Failed", "An error occured while saving your image(s). \n" +
"Please ensure " + outputPath + " is a writable directory.");
}
private void autoLayoutAndShowGraph() {
new mxHierarchicalLayout(graph).execute(graph.getDefaultParent());
graphComponent.setVisible(true);
graphComponent.doLayout();
add(graphComponent, BorderLayout.CENTER);
revalidate();
}
private void initialGraphState() {
basicBlocks = new ArrayList<>();
edges = new ArrayList<>();
graph = new RestrictedMxGraph();
hasRendered = false;
if (graphComponent != null) {
remove(graphComponent);
revalidate();
}
graphComponent = new mxGraphComponent(graph);
graphComponent.setBorder(new LineBorder(new Color(188, 230, 254)));
graphComponent.getViewport().setBackground(new Color(251, 253, 255));
graphComponent.addMouseWheelListener(new MouseWheelListener() {
@Override
public void mouseWheelMoved(MouseWheelEvent e) {
if (e.getWheelRotation() > 0) {
zoomIn();
} else if (e.getWheelRotation() < 0) {
zoomOut();
}
}
});
}
private void decorateGraphButton(final AbstractButton button, final boolean highlightClickedOnly) {
button.setOpaque(true);
button.setBackground(Colors.LIGHT_TEXT.getColor());
button.setForeground(Colors.DARK_TEXT.getColor());
button.setBorder(new LineBorder(Colors.LIGHT_BACKGROUND.getColor(), 2, true));
// Remove default "Space Bar = click" behavior, so it doesn't interfere with keyboard shortcuts.
button.getInputMap().put(KeyStroke.getKeyStroke("SPACE"), "none");
final ButtonModel startModel = button.getModel();
startModel.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
if (highlightClickedOnly && button.getModel().isPressed() || button.getModel().isSelected()) {
button.setBackground(Colors.LIGHT_BACKGROUND.getColor());
} else {
button.setBackground(Colors.WHITE_BACKGROUND.getColor());
}
}
});
}
}
| Scroll by default, horizontal scroll using SHIFT, zoom using ALT
| Implementierung/visual-dfa/src/main/java/gui/visualgraph/VisualGraphPanel.java | Scroll by default, horizontal scroll using SHIFT, zoom using ALT |
|
Java | mit | 7c97ba7b426f22e4f948ba2998bdd7157c95e0a6 | 0 | DemigodsRPG/Demigods3 | package com.censoredsoftware.Demigods.Engine.Command;
import org.bukkit.*;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.command.ConsoleCommandSender;
import org.bukkit.entity.Horse;
import org.bukkit.entity.Player;
import org.bukkit.scheduler.BukkitRunnable;
import com.censoredsoftware.Demigods.Engine.Demigods;
import com.censoredsoftware.Demigods.Engine.Object.Battle.Battle;
import com.censoredsoftware.Demigods.Engine.Object.Player.PlayerCharacter;
import com.censoredsoftware.Demigods.Engine.Object.Player.PlayerWrapper;
import com.censoredsoftware.Demigods.Engine.Utility.MiscUtility;
import com.censoredsoftware.Demigods.Engine.Utility.SpigotUtility;
public class DevelopmentCommands implements CommandExecutor
{
private static BukkitRunnable circle;
@Override
public boolean onCommand(CommandSender sender, Command command, String labels, String[] args)
{
if(command.getName().equalsIgnoreCase("removechar")) return removeChar(sender, args);
else if(command.getName().equalsIgnoreCase("test1")) return test1(sender, args);
else if(command.getName().equalsIgnoreCase("test2")) return test2(sender, args);
else if(command.getName().equalsIgnoreCase("hspawn")) return hspawn(sender);
else if(command.getName().equalsIgnoreCase("soundtest")) return soundTest(sender, args);
return false;
}
private static boolean test1(CommandSender sender, final String[] args)
{
Player player = (Player) sender;
player.sendMessage("Removing data...");
for(Battle battle : Battle.getAll())
{
battle.delete();
}
player.sendMessage("Data removed!");
return true;
}
private static boolean test2(CommandSender sender, final String[] args)
{
Player player = (Player) sender;
if(!SpigotUtility.runningSpigot()) return true;
if(args.length != 1)
{
player.sendMessage(ChatColor.RED + "I need more info.");
return false;
}
try
{
Effect effect = Effect.getByName(args[0].toUpperCase());
effect.getType();
}
catch(Exception notImportant)
{
player.sendMessage(ChatColor.RED + "Not a valid effect name.");
return false;
}
final Location center = player.getLocation();
if(circle == null)
{
circle = new BukkitRunnable()
{
@Override
public void run()
{
SpigotUtility.drawCircle(center, Effect.getByName(args[0].toUpperCase()), 16, 60);
}
};
Bukkit.getScheduler().scheduleSyncRepeatingTask(Demigods.plugin, circle, 20, 20);
}
else
{
circle.cancel();
circle = null;
}
return true;
}
private static boolean hspawn(CommandSender sender)
{
Player player = (Player) sender;
// This SHOULD happen automatically, but bukkit doesn't do this, so we need to.
if(player.isInsideVehicle() && player.getVehicle() instanceof Horse)
{
Horse horse = (Horse) player.getVehicle();
horse.eject();
horse.teleport(player.getLocation().getWorld().getSpawnLocation());
horse.setPassenger(player);
player.sendMessage(ChatColor.YELLOW + "Teleported to spawn...");
}
return true;
}
private static boolean soundTest(CommandSender sender, final String[] args)
{
if(sender instanceof ConsoleCommandSender) return false;
Player player = (Player) sender;
try
{
Sound sound = Sound.valueOf(args[0].toUpperCase());
if(!MiscUtility.isFloat(args[1].toUpperCase()))
{
player.sendMessage(ChatColor.RED + "Set a pitch, ie: 1F");
return false;
}
else
{
player.playSound(player.getLocation(), sound, 1F, Float.parseFloat(args[1].toUpperCase()));
player.sendMessage(ChatColor.YELLOW + "Sound played.");
return true;
}
}
catch(Exception ignored)
{}
player.sendMessage(ChatColor.RED + "Wrong arguments, please try again.");
return false;
}
private static boolean removeChar(CommandSender sender, String[] args)
{
if(args.length != 1) return false;
// Define args
Player player = Bukkit.getOfflinePlayer(sender.getName()).getPlayer();
String charName = args[0];
if(PlayerWrapper.hasCharName(player, charName))
{
PlayerCharacter character = PlayerCharacter.getCharacterByName(charName);
character.remove();
sender.sendMessage(ChatColor.RED + "Character removed!");
}
else sender.sendMessage(ChatColor.RED + "There was an error while removing your character.");
return true;
}
}
| src/main/java/com/censoredsoftware/Demigods/Engine/Command/DevelopmentCommands.java | package com.censoredsoftware.Demigods.Engine.Command;
import org.bukkit.*;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.command.ConsoleCommandSender;
import org.bukkit.entity.Horse;
import org.bukkit.entity.Player;
import org.bukkit.scheduler.BukkitRunnable;
import com.censoredsoftware.Demigods.Engine.Demigods;
import com.censoredsoftware.Demigods.Engine.Object.Battle.Battle;
import com.censoredsoftware.Demigods.Engine.Object.Player.PlayerCharacter;
import com.censoredsoftware.Demigods.Engine.Object.Player.PlayerWrapper;
import com.censoredsoftware.Demigods.Engine.Utility.MiscUtility;
import com.censoredsoftware.Demigods.Engine.Utility.SpigotUtility;
public class DevelopmentCommands implements CommandExecutor
{
private static BukkitRunnable circle;
@Override
public boolean onCommand(CommandSender sender, Command command, String labels, String[] args)
{
if(command.getName().equalsIgnoreCase("removechar")) return removeChar(sender, args);
else if(command.getName().equalsIgnoreCase("test1")) return test1(sender, args);
else if(command.getName().equalsIgnoreCase("test2")) return test2(sender, args);
else if(command.getName().equalsIgnoreCase("hspawn")) return hspawn(sender);
else if(command.getName().equalsIgnoreCase("soundtest")) return soundTest(sender, args);
return false;
}
private static boolean test1(CommandSender sender, final String[] args)
{
Player player = (Player) sender;
player.sendMessage("Removing data...");
for(Battle battle : Battle.getAll())
{
battle.delete();
}
player.sendMessage("Data removed!");
return true;
}
private static boolean test2(CommandSender sender, final String[] args)
{
Player player = (Player) sender;
if(!SpigotUtility.runningSpigot()) return true;
if(args.length != 1)
{
player.sendMessage(ChatColor.RED + "I need more info.");
return false;
}
try
{
Effect effect = Effect.getByName(args[0].toUpperCase());
effect.getType();
}
catch(Exception notImportant)
{
player.sendMessage(ChatColor.RED + "Not a valid effect name.");
return false;
}
final Location center = player.getLocation();
if(circle == null)
{
circle = new BukkitRunnable()
{
@Override
public void run()
{
SpigotUtility.drawCircle(center, Effect.getByName(args[0].toUpperCase()), 16, 60);
}
};
Bukkit.getScheduler().scheduleSyncRepeatingTask(Demigods.plugin, circle, 20, 20);
}
else
{
Bukkit.getScheduler().cancelTask(circle.getTaskId());
circle = null;
}
return true;
}
private static boolean hspawn(CommandSender sender)
{
Player player = (Player) sender;
// This SHOULD happen automatically, but bukkit doesn't do this, so we need to.
if(player.isInsideVehicle() && player.getVehicle() instanceof Horse)
{
Horse horse = (Horse) player.getVehicle();
horse.eject();
horse.teleport(player.getLocation().getWorld().getSpawnLocation());
horse.setPassenger(player);
player.sendMessage(ChatColor.YELLOW + "Teleported to spawn...");
}
return true;
}
private static boolean soundTest(CommandSender sender, final String[] args)
{
if(sender instanceof ConsoleCommandSender) return false;
Player player = (Player) sender;
try
{
Sound sound = Sound.valueOf(args[0].toUpperCase());
if(!MiscUtility.isFloat(args[1].toUpperCase()))
{
player.sendMessage(ChatColor.RED + "Set a pitch, ie: 1F");
return false;
}
else
{
player.playSound(player.getLocation(), sound, 1F, Float.parseFloat(args[1].toUpperCase()));
player.sendMessage(ChatColor.YELLOW + "Sound played.");
return true;
}
}
catch(Exception ignored)
{}
player.sendMessage(ChatColor.RED + "Wrong arguments, please try again.");
return false;
}
private static boolean removeChar(CommandSender sender, String[] args)
{
if(args.length != 1) return false;
// Define args
Player player = Bukkit.getOfflinePlayer(sender.getName()).getPlayer();
String charName = args[0];
if(PlayerWrapper.hasCharName(player, charName))
{
PlayerCharacter character = PlayerCharacter.getCharacterByName(charName);
character.remove();
sender.sendMessage(ChatColor.RED + "Character removed!");
}
else sender.sendMessage(ChatColor.RED + "There was an error while removing your character.");
return true;
}
}
| Make it actually cancel.
| src/main/java/com/censoredsoftware/Demigods/Engine/Command/DevelopmentCommands.java | Make it actually cancel. |
|
Java | mit | 56d969dc26da1524b422ebb104cd57ed4021c213 | 0 | davejohnclark/test-context | package com.dc.test.context.junit;
import static java.util.Arrays.asList;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import com.dc.test.context.AfterContext;
import com.dc.test.context.BeforeContext;
import com.dc.test.context.Context;
/**
* Junit Rule that invokes any methods annotated as "Before" the context of the test being run.
*
* @author dave
*
*/
public class ContextRule implements TestRule {
private Object testInstance;
private Map<String, List<Method>> beforeContexts = new HashMap<String, List<Method>>();
private Map<String, List<Method>> afterContexts = new HashMap<String, List<Method>>();
public ContextRule(Object testInstance) {
this.testInstance = testInstance;
for (Method method : testInstance.getClass().getMethods()) {
storeBeforeContext(method);
storeAfterContext(method);
}
}
private void storeBeforeContext(Method method) {
BeforeContext beforeContext = method.getAnnotation(BeforeContext.class);
if (beforeContext != null) {
addToStore(beforeContext.value(), method, beforeContexts);
}
}
private void storeAfterContext(Method method) {
AfterContext afterContext = method.getAnnotation(AfterContext.class);
if (afterContext != null) {
addToStore(afterContext.value(), method, afterContexts);
}
}
private void addToStore(String contextName, Method method, Map<String, List<Method>> contextStore) {
List<Method> methods = contextStore.get(contextName);
if (methods == null) {
methods = new ArrayList<Method>();
contextStore.put(contextName, methods);
}
methods.add(method);
}
@Override
public Statement apply(final Statement base, final Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
List<String> contextNames = getContextNames(description);
executeBeforeContexts(contextNames);
try {
base.evaluate();
} finally {
executeAfterContexts(contextNames);
}
}
};
}
private List<String> getContextNames(Description description) {
Context context = description.getAnnotation(Context.class);
if (context.whenEachOf().length >= 1) {
return asList(context.whenEachOf());
}
return asList(context.when());
}
private void executeBeforeContexts(List<String> contextNames) {
executeContexts(contextNames, beforeContexts);
}
private void executeAfterContexts(List<String> contextNames) {
executeContexts(contextNames, afterContexts);
}
private void executeContexts(List<String> contextNames, Map<String, List<Method>> contextStore) {
for (String name : contextNames) {
List<Method> methods = contextStore.get(name);
if (methods == null) {
continue;
}
invokeEachMethod(methods);
}
}
private void invokeEachMethod(List<Method> methods) {
for (Method method : methods) {
try {
invoke(method);
} catch (Exception e) {
throw new ContextRuleException(e);
}
}
}
private void invoke(Method method) throws IllegalAccessException, InvocationTargetException {
method.invoke(testInstance, (Object[]) null);
}
/**
* Indicates an exception was thrown during the execution of the ContextRule.
*
* @author dave
*
*/
private class ContextRuleException extends RuntimeException {
private static final long serialVersionUID = 5690097822353534540L;
public ContextRuleException(Exception e) {
super(e);
}
}
}
| test-context/src/main/java/com/dc/test/context/junit/ContextRule.java | package com.dc.test.context.junit;
import static java.util.Arrays.asList;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import com.dc.test.context.AfterContext;
import com.dc.test.context.BeforeContext;
import com.dc.test.context.Context;
/**
* Junit Rule that invokes any methods annotated as "Before" the context of the test being run.
*
* @author dave
*
*/
public class ContextRule implements TestRule {
private Object testInstance;
private Map<String, List<Method>> beforeContexts = new HashMap<String, List<Method>>();
private Map<String, List<Method>> afterContexts = new HashMap<String, List<Method>>();
public ContextRule(Object testInstance) {
this.testInstance = testInstance;
for (Method method : testInstance.getClass().getMethods()) {
storeBeforeContext(method);
storeAfterContext(method);
}
}
private void storeBeforeContext(Method method) {
BeforeContext beforeContext = method.getAnnotation(BeforeContext.class);
if (beforeContext != null) {
addToStore(beforeContext.value(), method, beforeContexts);
}
}
private void storeAfterContext(Method method) {
AfterContext afterContext = method.getAnnotation(AfterContext.class);
if (afterContext != null) {
addToStore(afterContext.value(), method, afterContexts);
}
}
private void addToStore(String contextName, Method method, Map<String, List<Method>> contextStore) {
List<Method> methods = contextStore.get(contextName);
if (methods == null) {
methods = new ArrayList<Method>();
contextStore.put(contextName, methods);
}
methods.add(method);
}
@Override
public Statement apply(final Statement base, final Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
List<String> contextNames = getContextNames(description);
executeBeforeContexts(contextNames);
base.evaluate();
executeAfterContexts(contextNames);
}
};
}
private List<String> getContextNames(Description description) {
Context context = description.getAnnotation(Context.class);
if (context.whenEachOf().length >= 1) {
return asList(context.whenEachOf());
}
return asList(context.when());
}
private void executeBeforeContexts(List<String> contextNames) {
executeContexts(contextNames, beforeContexts);
}
private void executeAfterContexts(List<String> contextNames) {
executeContexts(contextNames, afterContexts);
}
private void executeContexts(List<String> contextNames, Map<String, List<Method>> contextStore) {
for (String name : contextNames) {
List<Method> methods = contextStore.get(name);
if (methods == null) {
continue;
}
invokeEachMethod(methods);
}
}
private void invokeEachMethod(List<Method> methods) {
for (Method method : methods) {
try {
invoke(method);
} catch (Exception e) {
throw new ContextRuleException(e);
}
}
}
private void invoke(Method method) throws IllegalAccessException, InvocationTargetException {
method.invoke(testInstance, (Object[]) null);
}
/**
* Indicates an exception was thrown during the execution of the ContextRule.
*
* @author dave
*
*/
private class ContextRuleException extends RuntimeException {
private static final long serialVersionUID = 5690097822353534540L;
public ContextRuleException(Exception e) {
super(e);
}
}
}
| Add finally block around statement evaluation to ensure after runs | test-context/src/main/java/com/dc/test/context/junit/ContextRule.java | Add finally block around statement evaluation to ensure after runs |
|
Java | mit | f7723e341ebcf2e44cd3aac6abf325bd7bfcd5fb | 0 | kaupunki-apina/Peak | package fi.salminen.tomy.peak.util;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.util.DisplayMetrics;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.TextView;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import java.util.List;
import fi.salminen.tomy.peak.R;
import fi.salminen.tomy.peak.persistence.models.BusModel;
import io.reactivex.Observable;
import io.reactivex.ObservableEmitter;
import io.reactivex.ObservableOnSubscribe;
public class IconFactory {
private Context context;
private TextView journeyPatternRefLabel;
private View backgroundStationary;
private View backgroundMoving;
private int sideLength;
private int halfLength;
private Rect rect;
private int widthSpec;
private int heightSpec;
public IconFactory(Context context, int resIdLabel, int resIdStationary, int resIdMoving) {
this.context = context;
this.journeyPatternRefLabel = (TextView) inflate(resIdLabel);
this.backgroundStationary = inflate(resIdStationary);
this.backgroundMoving = inflate(resIdMoving);
this.sideLength = context.getResources().getDimensionPixelSize(R.dimen.bus_icon_side);
this.halfLength = sideLength / 2;
if (android.os.Build.VERSION.SDK_INT >= 11) {
// Disable hardware accelerated drawing
backgroundMoving.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
backgroundStationary.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
rect = new Rect();
rect.set(0, 0, sideLength, sideLength);
widthSpec = View.MeasureSpec.makeMeasureSpec(rect.width(), View.MeasureSpec.EXACTLY);
heightSpec = View.MeasureSpec.makeMeasureSpec(rect.height(), View.MeasureSpec.EXACTLY);
}
/**
* Inflates a view from layout resource file.
*
* @param resId Resource Id
* @return Inflated view
*/
private View inflate(int resId) {
DisplayMetrics displayMetrics = new DisplayMetrics();
View view = ((LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE)).inflate(resId, null);
((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay().getMetrics(displayMetrics);
view.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
return view;
}
public Observable<Void> getBusIcon(List<BusModel> models) {
return Observable.create(new ObservableOnSubscribe<Void>() {
@Override
public void subscribe(ObservableEmitter<Void> e) throws Exception {
for (BusModel model : models) {
// Icon for moving has a direction indicator.
View background = model.speed < 3 ? backgroundStationary : backgroundMoving;
Bitmap bm = Bitmap.createBitmap(sideLength, sideLength, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bm);
// Draw background.
background.layout(0, 0, sideLength, sideLength);
background.buildDrawingCache();
background.draw(canvas);
background.destroyDrawingCache();
// Rotate label so that it's orientated correctly.
canvas.save();
canvas.rotate((float) -model.bearing, halfLength, halfLength);
// Draw text on top.
journeyPatternRefLabel.setText(model.journeyPatternRef);
journeyPatternRefLabel.measure(widthSpec, heightSpec);
journeyPatternRefLabel.layout(0, 0, rect.width(), rect.height());
journeyPatternRefLabel.buildDrawingCache();
journeyPatternRefLabel.draw(canvas);
journeyPatternRefLabel.destroyDrawingCache();
canvas.restore();
model.icon = BitmapDescriptorFactory.fromBitmap(bm);
}
e.onComplete();
}
});
}
}
| app/src/main/java/fi/salminen/tomy/peak/util/IconFactory.java | package fi.salminen.tomy.peak.util;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.util.DisplayMetrics;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.TextView;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import java.util.List;
import fi.salminen.tomy.peak.R;
import fi.salminen.tomy.peak.persistence.models.BusModel;
import io.reactivex.Observable;
import io.reactivex.ObservableEmitter;
import io.reactivex.ObservableOnSubscribe;
public class IconFactory {
private Context context;
private TextView journeyPatternRefLabel;
private View backgroundStationary;
private View backgroundMoving;
private int sideLength;
private int halfLength;
private Rect rect;
private int widthSpec;
private int heightSpec;
public IconFactory(Context context, int resIdLabel, int resIdStationary, int resIdMoving) {
this.context = context;
this.journeyPatternRefLabel = (TextView) inflate(resIdLabel);
this.backgroundStationary = inflate(resIdStationary);
this.backgroundMoving = inflate(resIdMoving);
this.sideLength = context.getResources().getDimensionPixelSize(R.dimen.bus_icon_side);
this.halfLength = sideLength / 2;
if (android.os.Build.VERSION.SDK_INT >= 11) {
// Disable hardware accelerated drawing
backgroundMoving.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
backgroundStationary.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
rect = new Rect();
rect.set(0, 0, sideLength, sideLength);
widthSpec = View.MeasureSpec.makeMeasureSpec(rect.width(), View.MeasureSpec.EXACTLY);
heightSpec = View.MeasureSpec.makeMeasureSpec(rect.height(), View.MeasureSpec.EXACTLY);
}
/**
* Inflates a view from layout resource file.
*
* @param resId Resource Id
* @return Inflated view
*/
private View inflate(int resId) {
DisplayMetrics displayMetrics = new DisplayMetrics();
View view = ((LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE)).inflate(resId, null);
((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay().getMetrics(displayMetrics);
view.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
return view;
}
public Observable<Void> getBusIcon(List<BusModel> models) {
return Observable.create(new ObservableOnSubscribe<Void>() {
@Override
public void subscribe(ObservableEmitter<Void> e) throws Exception {
for (BusModel model : models) {
// Icon for moving has a direction indicator.
View background = model.speed == 0 ? backgroundStationary : backgroundMoving;
Bitmap bm = Bitmap.createBitmap(sideLength, sideLength, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bm);
// Draw background.
background.layout(0, 0, sideLength, sideLength);
background.buildDrawingCache();
background.draw(canvas);
background.destroyDrawingCache();
canvas.save();
canvas.rotate((float) -model.bearing, halfLength, halfLength);
// Draw text on top.
journeyPatternRefLabel.setText(model.journeyPatternRef);
journeyPatternRefLabel.measure(widthSpec, heightSpec);
journeyPatternRefLabel.layout(0, 0, rect.width(), rect.height());
journeyPatternRefLabel.buildDrawingCache();
journeyPatternRefLabel.draw(canvas);
journeyPatternRefLabel.destroyDrawingCache();
canvas.restore();
model.icon = BitmapDescriptorFactory.fromBitmap(bm);
}
e.onComplete();
}
});
}
}
| Increased threshold for "moving" icon
| app/src/main/java/fi/salminen/tomy/peak/util/IconFactory.java | Increased threshold for "moving" icon |
|
Java | epl-1.0 | b060d30dc5c3cc3ac725644cf84d662f1c1eaccc | 0 | SiriusLab/ModelDebugging,SiriusLab/SiriusAnimator,SiriusLab/ModelDebugging,SiriusLab/SiriusAnimator,SiriusLab/SiriusAnimator,SiriusLab/ModelDebugging | package org.gemoc.gemoc_language_workbench.extensions.k3.dsa.impl;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import org.eclipse.emf.ecore.EObject;
import org.gemoc.execution.engine.trace.gemoc_execution_trace.MSEOccurrence;
import org.gemoc.gemoc_language_workbench.api.dsa.CodeExecutionException;
import org.gemoc.gemoc_language_workbench.api.dsa.ICodeExecutor;
import org.gemoc.gemoc_language_workbench.extensions.k3.Activator;
import org.gemoc.gemoc_language_workbench.extensions.k3.dsa.api.IK3DSAExecutorClassLoader;
/**
* Executor that is able to find the helper class associated with a given object
* It also works for aspect on EMF: - In case of EObject, (target or parameter)
* it is also able to find the appropriate interface when looking for the method
*
* @author dvojtise
*
*/
public class Kermeta3AspectsCodeExecutor implements ICodeExecutor {
// protected ClassLoader classLoader;
protected IK3DSAExecutorClassLoader k3DSAExecutorClassLoader;
protected String bundleSymbolicName;
public Kermeta3AspectsCodeExecutor(IK3DSAExecutorClassLoader k3DSAExecutorClassLoader, String bundleSymbolicName) {
// this.classLoader = classLoader;
this.k3DSAExecutorClassLoader = k3DSAExecutorClassLoader;
this.bundleSymbolicName = bundleSymbolicName;
}
@Override
public Object execute(MSEOccurrence mseOccurrence) throws CodeExecutionException {
Object caller = mseOccurrence.getMse().getCaller();
String methodName = mseOccurrence.getMse().getAction().getName();
return internal_execute(caller, methodName, mseOccurrence.getParameters(), mseOccurrence);
}
@Override
public Object execute(Object caller, String methodName, List<Object> parameters) throws CodeExecutionException {
return internal_execute(caller, methodName, parameters, null);
}
private Object internal_execute(Object caller, String methodName, Collection<Object> parameters,
MSEOccurrence mseOccurrence) throws CodeExecutionException {
ArrayList<Object> staticParameters = new ArrayList<Object>();
staticParameters.add(caller);
if (parameters != null) {
staticParameters.addAll(parameters);
}
Method bestApplicableMethod = getBestApplicableMethod(caller, methodName, staticParameters);
if (bestApplicableMethod == null)
throw new CodeExecutionException("static class not found or method not found when calling " + methodName
+ " on " + caller + ". MSEOccurence=" + mseOccurrence, mseOccurrence, false);
Object[] args = new Object[0];
if (staticParameters != null) {
args = staticParameters.toArray();
}
Object result = null;
try {
result = bestApplicableMethod.invoke(null, args);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
e.printStackTrace();
throw new CodeExecutionException("Exception caught during execution of a call, see inner exception.", e,
mseOccurrence, true);
}
return result;
}
private Method getBestApplicableMethod(Object caller, String methodName, List<Object> parameters) {
Set<Class<?>> staticHelperClasses = getStaticHelperClasses(caller);
if (staticHelperClasses == null || staticHelperClasses.isEmpty()) {
return null;
}
for (Class<?> c : staticHelperClasses) {
Method m = getFirstApplicableMethod(c, methodName, parameters);
if (m != null)
return m;
}
return null;
}
/**
* return the first compatible method, goes up the inheritance hierarchy
*
* @param staticHelperClass
* @param methodName
* @param parameters
* @return
*/
protected Method getFirstApplicableMethod(Class<?> staticHelperClass, String methodName, List<Object> parameters) {
Method[] methods = staticHelperClass.getDeclaredMethods();
for (Method method : methods) {
Class<?>[] evaluatedMethodParamTypes = method.getParameterTypes();
if (method.getName().equals(methodName) && evaluatedMethodParamTypes.length == parameters.size()) {
boolean isAllParamCompatible = true;
for (int i = 0; i < evaluatedMethodParamTypes.length; i++) {
Object p = parameters.get(i);
if (evaluatedMethodParamTypes[i].isPrimitive()) {
if (evaluatedMethodParamTypes[i].equals(Integer.TYPE) && !Integer.class.isInstance(p)) {
isAllParamCompatible = false;
break;
} else if (evaluatedMethodParamTypes[i].equals(Boolean.TYPE) && !Boolean.class.isInstance(p)) {
isAllParamCompatible = false;
break;
}
} else if (!evaluatedMethodParamTypes[i].isInstance(p)) {
isAllParamCompatible = false;
break;
}
}
if (isAllParamCompatible) {
return method;
}
}
}
// tries going in the inheritance hierarchy
Class<?> superClass = staticHelperClass.getSuperclass();
if (superClass != null)
return getFirstApplicableMethod(superClass, methodName, parameters);
else
return null;
}
/**
* search static class by name (future version should use a map of available
* aspects, and deals with it as a list of applicable static classes)
*
*/
protected Set<Class<?>> getStaticHelperClasses(Object target) {
List<Class<?>> allPossibleInterfaces = getInterfacesClassOfEObjectOrClass(target);
String searchedPropertyFileName = "/META-INF/xtend-gen/" + bundleSymbolicName + ".k3_aspect_mapping.properties";
Properties properties = new Properties();
InputStream inputStream = k3DSAExecutorClassLoader.getResourceAsStream(searchedPropertyFileName);
if (inputStream == null) {
try {
inputStream = org.eclipse.core.runtime.Platform.getBundle(bundleSymbolicName)
.getEntry(searchedPropertyFileName).openStream();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return null;
}
}
String possibleStaticClassesNames = null;
try {
if (inputStream != null) {
properties.load(inputStream);
for (int i = 0; i < allPossibleInterfaces.size(); i++) {
possibleStaticClassesNames = properties
.getProperty(allPossibleInterfaces.get(i).getCanonicalName());
// Break so that the aspect is applied on the most precise
// type
if (possibleStaticClassesNames != null)
break;
}
}
} catch (IOException e) {
// TODO report for debug that no mapping was found
return null;
}
if (possibleStaticClassesNames == null) {
return null;
}
Set<Class<?>> classes = new HashSet<Class<?>>();
ClassNotFoundException possibleException = null;
for (String possibleName : possibleStaticClassesNames.replaceAll(" ", "").split(",")) {
try {
classes.add(k3DSAExecutorClassLoader.getClassForName(possibleName));
} catch (ClassNotFoundException e) {
possibleException = e;
}
}
if (classes.isEmpty()) {
Activator.getMessagingSystem().error("ClassNotFoundException, see Error Log View", Activator.PLUGIN_ID,
possibleException);
}
return classes;
}
/**
* returns the class of o or the interface that o implements in the case of
* EObjects
*
* @param o
* @return
*/
protected List<Class<?>> getInterfacesClassOfEObjectOrClass(Object o) {
List<Class<?>> possibleInterfaces = new ArrayList<Class<?>>();
if (o instanceof EObject) {
/*
* String targetClassCanonicalName =
* o.getClass().getCanonicalName(); String
* searchedAspectizedClassCanonicalName = targetClassCanonicalName;
* // apply special rules to retrieve the Ecore interface instead of
* the Impl String searchedAspectizedClasPackageName =
* targetClassCanonicalName.substring(0,
* targetClassCanonicalName.lastIndexOf("."));
* searchedAspectizedClasPackageName =
* searchedAspectizedClasPackageName.substring(0,
* searchedAspectizedClasPackageName.lastIndexOf(".impl"));
* searchedAspectizedClassCanonicalName =
* searchedAspectizedClasPackageName
* +"."+((EObject)o).eClass().getName();
*/
// @author tdegueul
// Since aspects may target both base / extended metamodel classes
// we need to retrieve the complete hierarchy of possible
// applications
List<Class<?>> interfaces = getAllInterfaces(o.getClass());
for (int i = 0; i < interfaces.size(); i++) {
Class<?> interfac = interfaces.get(i);
if (interfac.getSimpleName().equals(((EObject) o).eClass().getName())) {
possibleInterfaces.add(interfac);
}
}
} else {
possibleInterfaces.add(o.getClass());
}
return possibleInterfaces;
}
// The two following methods are copied from
// org.apache.commons.lang.ClassUtils
private static List<Class<?>> getAllInterfaces(Class<?> cls) {
if (cls == null) {
return null;
}
LinkedHashSet<Class<?>> interfacesFound = new LinkedHashSet<Class<?>>();
getAllInterfaces(cls, interfacesFound);
return new ArrayList<Class<?>>(interfacesFound);
}
private static void getAllInterfaces(Class<?> cls, HashSet<Class<?>> interfacesFound) {
while (cls != null) {
Class<?>[] interfaces = cls.getInterfaces();
for (Class<?> i : interfaces) {
if (interfacesFound.add(i)) {
getAllInterfaces(i, interfacesFound);
}
}
cls = cls.getSuperclass();
}
}
@Override
public String getExcutorID() {
return this.getClass().getSimpleName() + "[" + bundleSymbolicName + "]";
}
// @Override
// public boolean canExecute(ActionCall call)
// {
// return getBestApplicableMethod(call.getTriggeringEvent().getCaller(),
// call.getTriggeringEvent().getAction().getName(),
// call.getParameters(),
// call) != null;
// }
//
//
// @Override
// public boolean canExecute(Object caller, String methodName, List<Object>
// parameters)
// {
// return getBestApplicableMethod(caller,
// methodName,
// parameters,
// null) != null;
// }
}
| org/gemoc/gemoc_language_workbench/extensions/org.gemoc.gemoc_language_workbench.extensions.k3/src/main/java/org/gemoc/gemoc_language_workbench/extensions/k3/dsa/impl/Kermeta3AspectsCodeExecutor.java | package org.gemoc.gemoc_language_workbench.extensions.k3.dsa.impl;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Properties;
import org.eclipse.emf.ecore.EObject;
import org.gemoc.execution.engine.trace.gemoc_execution_trace.MSEOccurrence;
import org.gemoc.gemoc_language_workbench.api.dsa.CodeExecutionException;
import org.gemoc.gemoc_language_workbench.api.dsa.ICodeExecutor;
import org.gemoc.gemoc_language_workbench.extensions.k3.Activator;
import org.gemoc.gemoc_language_workbench.extensions.k3.dsa.api.IK3DSAExecutorClassLoader;
/**
* Executor that is able to find the helper class associated with a given object
* It also works for aspect on EMF: - In case of EObject, (target or parameter)
* it is also able to find the appropriate interface when looking for the method
*
* @author dvojtise
*
*/
public class Kermeta3AspectsCodeExecutor implements ICodeExecutor {
// protected ClassLoader classLoader;
protected IK3DSAExecutorClassLoader k3DSAExecutorClassLoader;
protected String bundleSymbolicName;
public Kermeta3AspectsCodeExecutor(
IK3DSAExecutorClassLoader k3DSAExecutorClassLoader,
String bundleSymbolicName) {
// this.classLoader = classLoader;
this.k3DSAExecutorClassLoader = k3DSAExecutorClassLoader;
this.bundleSymbolicName = bundleSymbolicName;
}
@Override
public Object execute(MSEOccurrence mseOccurrence) throws CodeExecutionException
{
Object caller = mseOccurrence.getMse().getCaller();
String methodName = mseOccurrence.getMse().getAction().getName();
return internal_execute(caller, methodName, mseOccurrence.getParameters(), mseOccurrence);
}
@Override
public Object execute(Object caller, String methodName, List<Object> parameters) throws CodeExecutionException
{
return internal_execute(caller, methodName, parameters, null);
}
private Object internal_execute(Object caller, String methodName, Collection<Object> parameters, MSEOccurrence mseOccurrence) throws CodeExecutionException
{
ArrayList<Object> staticParameters = new ArrayList<Object>();
staticParameters.add(caller);
if (parameters != null)
{
staticParameters.addAll(parameters);
}
Method bestApplicableMethod = getBestApplicableMethod(caller, methodName, staticParameters);
if (bestApplicableMethod == null)
throw new CodeExecutionException("static class not found or method not found when calling "+methodName+ " on "+caller+". MSEOccurence=" + mseOccurrence, mseOccurrence, false);
Object[] args = new Object[0];
if (staticParameters != null)
{
args = staticParameters.toArray();
}
Object result = null;
try {
result = bestApplicableMethod.invoke(null, args);
} catch (IllegalAccessException | IllegalArgumentException
| InvocationTargetException e)
{
e.printStackTrace();
throw new CodeExecutionException("Exception caught during execution of a call, see inner exception.", e, mseOccurrence, true);
}
return result;
}
private Method getBestApplicableMethod(Object caller,
String methodName,
List<Object> parameters)
{
Class<?> staticHelperClass = getStaticHelperClass(caller);
if (staticHelperClass == null)
{
return null;
}
return getFirstApplicableMethod(staticHelperClass, methodName, parameters);
}
/**
* return the first compatible method, goes up the inheritance hierarchy
*
* @param staticHelperClass
* @param methodName
* @param parameters
* @return
*/
protected Method getFirstApplicableMethod(Class<?> staticHelperClass,
String methodName, List<Object> parameters) {
Method[] methods = staticHelperClass.getDeclaredMethods();
for (Method method : methods) {
Class<?>[] evaluatedMethodParamTypes = method.getParameterTypes();
if (method.getName().equals(methodName)
&& evaluatedMethodParamTypes.length == parameters.size()) {
boolean isAllParamCompatible = true;
for (int i = 0; i < evaluatedMethodParamTypes.length; i++) {
Object p = parameters.get(i);
if (evaluatedMethodParamTypes[i].isPrimitive()) {
if (evaluatedMethodParamTypes[i].equals(Integer.TYPE)
&& !Integer.class.isInstance(p)) {
isAllParamCompatible = false;
break;
}
else if (evaluatedMethodParamTypes[i].equals(Boolean.TYPE)
&& !Boolean.class.isInstance(p)) {
isAllParamCompatible = false;
break;
}
}
else if (!evaluatedMethodParamTypes[i].isInstance(p)) {
isAllParamCompatible = false;
break;
}
}
if (isAllParamCompatible) {
return method;
}
}
}
// tries going in the inheritance hierarchy
Class<?> superClass = staticHelperClass.getSuperclass();
if (superClass != null)
return getFirstApplicableMethod(superClass, methodName, parameters);
else
return null;
}
/**
* search static class by name (future version should use a map of available
* aspects, and deals with it as a list of applicable static classes)
*
*/
protected Class<?> getStaticHelperClass(Object target) {
List<Class<?>> allPossibleInterfaces = getInterfacesClassOfEObjectOrClass(
target);
String searchedPropertyFileName = "/META-INF/xtend-gen/"
+ bundleSymbolicName + ".k3_aspect_mapping.properties";
Properties properties = new Properties();
InputStream inputStream = k3DSAExecutorClassLoader
.getResourceAsStream(searchedPropertyFileName);
if (inputStream == null) {
try {
inputStream = org.eclipse.core.runtime.Platform
.getBundle(bundleSymbolicName)
.getEntry(searchedPropertyFileName).openStream();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return null;
}
}
String possibleStaticClassName = null;
try {
if (inputStream != null) {
properties.load(inputStream);
for (int i = 0 ; i < allPossibleInterfaces.size() ; i++) {
possibleStaticClassName = properties
.getProperty(allPossibleInterfaces.get(i).getCanonicalName());
// Break so that the aspect is applied on the most precise type
if (possibleStaticClassName != null)
break ;
}
}
} catch (IOException e) {
// TODO report for debug that no mapping was found
return null;
}
if (possibleStaticClassName == null) {
return null;
}
try { // Class.forName(possibleStaticClassName)
// Class.forName(possibleStaticClassName, true, classLoader);
return k3DSAExecutorClassLoader
.getClassForName(possibleStaticClassName);
} catch (ClassNotFoundException e) {
Activator.getMessagingSystem().error(
"ClassNotFoundException, see Error Log View",
Activator.PLUGIN_ID, e);
}
return null;
}
/**
* returns the class of o or the interface that o implements in the case of
* EObjects
*
* @param o
* @return
*/
protected List<Class<?>> getInterfacesClassOfEObjectOrClass(Object o) {
List<Class<?>> possibleInterfaces = new ArrayList<Class<?>>();
if (o instanceof EObject) {
/*
* String targetClassCanonicalName =
* o.getClass().getCanonicalName(); String
* searchedAspectizedClassCanonicalName = targetClassCanonicalName;
* // apply special rules to retrieve the Ecore interface instead of
* the Impl String searchedAspectizedClasPackageName =
* targetClassCanonicalName.substring(0,
* targetClassCanonicalName.lastIndexOf("."));
* searchedAspectizedClasPackageName =
* searchedAspectizedClasPackageName.substring(0,
* searchedAspectizedClasPackageName.lastIndexOf(".impl"));
* searchedAspectizedClassCanonicalName =
* searchedAspectizedClasPackageName
* +"."+((EObject)o).eClass().getName();
*/
// @author tdegueul
// Since aspects may target both base / extended metamodel classes
// we need to retrieve the complete hierarchy of possible applications
List<Class<?>> interfaces = getAllInterfaces(o.getClass());
for (int i = 0; i < interfaces.size(); i++) {
Class<?> interfac = interfaces.get(i);
if (interfac.getSimpleName().equals(
((EObject) o).eClass().getName())) {
possibleInterfaces.add(interfac);
}
}
} else {
possibleInterfaces.add(o.getClass());
}
return possibleInterfaces;
}
// The two following methods are copied from org.apache.commons.lang.ClassUtils
private static List<Class<?>> getAllInterfaces(Class<?> cls) {
if (cls == null) {
return null;
}
LinkedHashSet<Class<?>> interfacesFound = new LinkedHashSet<Class<?>>();
getAllInterfaces(cls, interfacesFound);
return new ArrayList<Class<?>>(interfacesFound);
}
private static void getAllInterfaces(Class<?> cls, HashSet<Class<?>> interfacesFound) {
while (cls != null) {
Class<?>[] interfaces = cls.getInterfaces();
for (Class<?> i : interfaces) {
if (interfacesFound.add(i)) {
getAllInterfaces(i, interfacesFound);
}
}
cls = cls.getSuperclass();
}
}
@Override
public String getExcutorID() {
return this.getClass().getSimpleName()+"["+bundleSymbolicName+"]";
}
// @Override
// public boolean canExecute(ActionCall call)
// {
// return getBestApplicableMethod(call.getTriggeringEvent().getCaller(),
// call.getTriggeringEvent().getAction().getName(),
// call.getParameters(),
// call) != null;
// }
//
//
// @Override
// public boolean canExecute(Object caller, String methodName, List<Object> parameters)
// {
// return getBestApplicableMethod(caller,
// methodName,
// parameters,
// null) != null;
// }
}
| Adding to the Kermeta3AspectsCodeExecution the
capability to consider multiple aspects per
class in the properties file (required for
PlainK3 to work, since it has visitor +
actions in separate aspects.
| org/gemoc/gemoc_language_workbench/extensions/org.gemoc.gemoc_language_workbench.extensions.k3/src/main/java/org/gemoc/gemoc_language_workbench/extensions/k3/dsa/impl/Kermeta3AspectsCodeExecutor.java | Adding to the Kermeta3AspectsCodeExecution the capability to consider multiple aspects per class in the properties file (required for PlainK3 to work, since it has visitor + actions in separate aspects. |
|
Java | mpl-2.0 | 53f7dc68279216b555f666cec64603180b73ee1f | 0 | MozillaCZ/MozStumbler,petercpg/MozStumbler,cascheberg/MozStumbler,MozillaCZ/MozStumbler,priyankvex/MozStumbler,crankycoder/MozStumbler,priyankvex/MozStumbler,crankycoder/MozStumbler,petercpg/MozStumbler,garvankeeley/MozStumbler,garvankeeley/MozStumbler,petercpg/MozStumbler,cascheberg/MozStumbler,priyankvex/MozStumbler,cascheberg/MozStumbler,garvankeeley/MozStumbler,crankycoder/MozStumbler,MozillaCZ/MozStumbler | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.mozstumbler.service.stumblerthread.scanners;
import android.content.Context;
import android.content.Intent;
import android.location.GpsSatellite;
import android.location.GpsStatus;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.location.LocationProvider;
import android.os.Bundle;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import org.mozilla.mozstumbler.service.AppGlobals;
import org.mozilla.mozstumbler.service.AppGlobals.ActiveOrPassiveStumbling;
import org.mozilla.mozstumbler.service.Prefs;
import java.text.SimpleDateFormat;
import java.util.Date;
public class GPSScanner implements LocationListener {
public static final String ACTION_BASE = AppGlobals.ACTION_NAMESPACE + ".GPSScanner.";
public static final String ACTION_GPS_UPDATED = ACTION_BASE + "GPS_UPDATED";
public static final String ACTION_ARG_TIME = AppGlobals.ACTION_ARG_TIME;
public static final String SUBJECT_NEW_STATUS = "new_status";
public static final String SUBJECT_LOCATION_LOST = "location_lost";
public static final String SUBJECT_NEW_LOCATION = "new_location";
public static final String NEW_STATUS_ARG_FIXES = "fixes";
public static final String NEW_STATUS_ARG_SATS = "sats";
public static final String NEW_LOCATION_ARG_LOCATION = "location";
public static final String ACTION_NMEA_RECEIVED = ACTION_BASE + "NMEA_RECEIVED";
public static final String NMEA_DATA = "nmea_data";
public static final String NMEA_TIMESTAMP = "nmea_ts";
private static final String LOG_TAG = AppGlobals.LOG_PREFIX + GPSScanner.class.getSimpleName();
private static final int MIN_SAT_USED_IN_FIX = 3;
private static final long ACTIVE_MODE_GPS_MIN_UPDATE_TIME_MS = 1000;
private static final float ACTIVE_MODE_GPS_MIN_UPDATE_DISTANCE_M = 10;
private static final long PASSIVE_GPS_MIN_UPDATE_FREQ_MS = 3000;
private static final float PASSIVE_GPS_MOVEMENT_MIN_DELTA_M = 30;
private final LocationBlockList mBlockList = new LocationBlockList();
private final Context mContext;
private GpsStatus.Listener mGPSListener;
private GpsStatus.NmeaListener mNMEAListener;
private int mLocationCount;
private Location mLocation = new Location("internal");
private boolean mAutoGeofencing;
private boolean mIsPassiveMode;
private final ScanManager mScanManager;
public GPSScanner(Context context, ScanManager scanManager) {
mContext = context;
mScanManager = scanManager;
}
public void start(final ActiveOrPassiveStumbling stumblingMode) {
mIsPassiveMode = (stumblingMode == ActiveOrPassiveStumbling.PASSIVE_STUMBLING);
if (mIsPassiveMode ) {
startPassiveMode();
} else {
startActiveMode();
}
}
private void startPassiveMode() {
LocationManager locationManager = (LocationManager) mContext.getSystemService(Context.LOCATION_SERVICE);
locationManager.requestLocationUpdates(LocationManager.PASSIVE_PROVIDER,
0,
0, this);
}
private void startActiveMode() {
LocationManager lm = getLocationManager();
lm.requestLocationUpdates(LocationManager.GPS_PROVIDER,
ACTIVE_MODE_GPS_MIN_UPDATE_TIME_MS,
ACTIVE_MODE_GPS_MIN_UPDATE_DISTANCE_M,
this);
reportLocationLost();
mNMEAListener = new GpsStatus.NmeaListener() {
public void onNmeaReceived(long timestamp, String nmea) {
// Send an intent with a copy of the NMEA data
Intent intent = new Intent(ACTION_NMEA_RECEIVED);
intent.putExtra(NMEA_TIMESTAMP, timestamp);
intent.putExtra(NMEA_DATA, nmea);
LocalBroadcastManager.getInstance(mContext).sendBroadcastSync(intent);
}
};
mGPSListener = new GpsStatus.Listener() {
public void onGpsStatusChanged(int event) {
if (event == GpsStatus.GPS_EVENT_SATELLITE_STATUS) {
GpsStatus status = getLocationManager().getGpsStatus(null);
Iterable<GpsSatellite> sats = status.getSatellites();
int satellites = 0;
int fixes = 0;
for (GpsSatellite sat : sats) {
satellites++;
if (sat.usedInFix()) {
fixes++;
}
}
reportNewGpsStatus(fixes, satellites);
if (fixes < MIN_SAT_USED_IN_FIX) {
reportLocationLost();
}
if (AppGlobals.isDebug) {
Log.v(LOG_TAG, "onGpsStatusChange - satellites: " + satellites + " fixes: " + fixes);
}
} else if (event == GpsStatus.GPS_EVENT_STOPPED) {
reportLocationLost();
}
}
private void reportNewGpsStatus(int fixes, int sats) {
Intent i = new Intent(ACTION_GPS_UPDATED);
i.putExtra(Intent.EXTRA_SUBJECT, SUBJECT_NEW_STATUS);
i.putExtra(NEW_STATUS_ARG_FIXES, fixes);
i.putExtra(NEW_STATUS_ARG_SATS, sats);
i.putExtra(ACTION_ARG_TIME, System.currentTimeMillis());
LocalBroadcastManager.getInstance(mContext).sendBroadcastSync(i);
}
};
lm.addGpsStatusListener(mGPSListener);
}
public void stop() {
LocationManager lm = getLocationManager();
lm.removeUpdates(this);
reportLocationLost();
if (mGPSListener != null) {
lm.removeGpsStatusListener(mGPSListener);
mGPSListener = null;
}
}
public int getLocationCount() {
return mLocationCount;
}
public double getLatitude() {
return mLocation.getLatitude();
}
public double getLongitude() {
return mLocation.getLongitude();
}
public Location getLocation() {
return mLocation;
}
public void checkPrefs() {
if (mBlockList != null) {
mBlockList.updateBlocks();
}
mAutoGeofencing = Prefs.getInstance().getGeofenceHere();
}
public boolean isGeofenced() {
return (mBlockList != null) && mBlockList.isGeofenced();
}
private void sendToLogActivity(String msg) {
AppGlobals.guiLogInfo(msg, "#33ccff", false);
}
@Override
public void onLocationChanged(Location location) {
if (location == null) { // TODO: is this even possible??
reportLocationLost();
return;
}
String logMsg = (mIsPassiveMode)? "[Passive] " : "[Active] ";
String provider = location.getProvider();
if (!provider.toLowerCase().contains("gps")) {
sendToLogActivity(logMsg + "Discard fused/network location.");
// only interested in GPS locations
return;
}
// Seem to get greater likelihood of non-fused location with higher update freq.
// Check dist and time threshold here, not set on the listener.
if (mIsPassiveMode) {
final long timeDelta = location.getTime() - mLocation.getTime();
final boolean hasMoved = location.distanceTo(mLocation) > PASSIVE_GPS_MOVEMENT_MIN_DELTA_M;
if (timeDelta < PASSIVE_GPS_MIN_UPDATE_FREQ_MS || !hasMoved) {
return;
}
}
Date date = new Date(location.getTime());
SimpleDateFormat formatter = new SimpleDateFormat("HH:mm:ss");
String time = formatter.format(date);
logMsg += String.format("%s Coord: %.4f,%.4f, Acc: %.0f, Speed: %.0f, Alt: %.0f, Bearing: %.1f", time, location.getLatitude(),
location.getLongitude(), location.getAccuracy(), location.getSpeed(), location.getAltitude(), location.getBearing());
sendToLogActivity(logMsg);
if (mBlockList.contains(location)) {
Log.w(LOG_TAG, "Blocked location: " + location);
reportLocationLost();
return;
}
if (AppGlobals.isDebug) {
Log.d(LOG_TAG, "New location: " + location);
}
mLocation = location;
if (!mAutoGeofencing) {
reportNewLocationReceived(location);
}
mLocationCount++;
if (mIsPassiveMode) {
mScanManager.newPassiveGpsLocation();
}
}
@Override
public void onProviderDisabled(String provider) {
if (LocationManager.GPS_PROVIDER.equals(provider)) {
reportLocationLost();
}
}
@Override
public void onProviderEnabled(String provider) {
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
if ((status != LocationProvider.AVAILABLE) &&
(LocationManager.GPS_PROVIDER.equals(provider))) {
reportLocationLost();
}
}
private LocationManager getLocationManager() {
return (LocationManager) mContext.getSystemService(Context.LOCATION_SERVICE);
}
private void reportNewLocationReceived(Location location) {
Intent i = new Intent(ACTION_GPS_UPDATED);
i.putExtra(Intent.EXTRA_SUBJECT, SUBJECT_NEW_LOCATION);
i.putExtra(NEW_LOCATION_ARG_LOCATION, location);
i.putExtra(ACTION_ARG_TIME, System.currentTimeMillis());
LocalBroadcastManager.getInstance(mContext).sendBroadcastSync(i);
}
private void reportLocationLost() {
Intent i = new Intent(ACTION_GPS_UPDATED);
i.putExtra(Intent.EXTRA_SUBJECT, SUBJECT_LOCATION_LOST);
i.putExtra(ACTION_ARG_TIME, System.currentTimeMillis());
LocalBroadcastManager.getInstance(mContext).sendBroadcastSync(i);
}
}
| src/org/mozilla/mozstumbler/service/stumblerthread/scanners/GPSScanner.java | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.mozstumbler.service.stumblerthread.scanners;
import android.content.Context;
import android.content.Intent;
import android.location.GpsSatellite;
import android.location.GpsStatus;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.location.LocationProvider;
import android.os.Bundle;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import org.mozilla.mozstumbler.service.AppGlobals;
import org.mozilla.mozstumbler.service.AppGlobals.ActiveOrPassiveStumbling;
import org.mozilla.mozstumbler.service.Prefs;
import java.text.SimpleDateFormat;
import java.util.Date;
public class GPSScanner implements LocationListener {
public static final String ACTION_BASE = AppGlobals.ACTION_NAMESPACE + ".GPSScanner.";
public static final String ACTION_GPS_UPDATED = ACTION_BASE + "GPS_UPDATED";
public static final String ACTION_ARG_TIME = AppGlobals.ACTION_ARG_TIME;
public static final String SUBJECT_NEW_STATUS = "new_status";
public static final String SUBJECT_LOCATION_LOST = "location_lost";
public static final String SUBJECT_NEW_LOCATION = "new_location";
public static final String NEW_STATUS_ARG_FIXES = "fixes";
public static final String NEW_STATUS_ARG_SATS = "sats";
public static final String NEW_LOCATION_ARG_LOCATION = "location";
private static final String LOG_TAG = AppGlobals.LOG_PREFIX + GPSScanner.class.getSimpleName();
private static final int MIN_SAT_USED_IN_FIX = 3;
private static final long ACTIVE_MODE_GPS_MIN_UPDATE_TIME_MS = 1000;
private static final float ACTIVE_MODE_GPS_MIN_UPDATE_DISTANCE_M = 10;
private static final long PASSIVE_GPS_MIN_UPDATE_FREQ_MS = 3000;
private static final float PASSIVE_GPS_MOVEMENT_MIN_DELTA_M = 30;
private final LocationBlockList mBlockList = new LocationBlockList();
private final Context mContext;
private GpsStatus.Listener mGPSListener;
private int mLocationCount;
private Location mLocation = new Location("internal");
private boolean mAutoGeofencing;
private boolean mIsPassiveMode;
private final ScanManager mScanManager;
public GPSScanner(Context context, ScanManager scanManager) {
mContext = context;
mScanManager = scanManager;
}
public void start(final ActiveOrPassiveStumbling stumblingMode) {
mIsPassiveMode = (stumblingMode == ActiveOrPassiveStumbling.PASSIVE_STUMBLING);
if (mIsPassiveMode ) {
startPassiveMode();
} else {
startActiveMode();
}
}
private void startPassiveMode() {
LocationManager locationManager = (LocationManager) mContext.getSystemService(Context.LOCATION_SERVICE);
locationManager.requestLocationUpdates(LocationManager.PASSIVE_PROVIDER,
0,
0, this);
}
private void startActiveMode() {
LocationManager lm = getLocationManager();
lm.requestLocationUpdates(LocationManager.GPS_PROVIDER,
ACTIVE_MODE_GPS_MIN_UPDATE_TIME_MS,
ACTIVE_MODE_GPS_MIN_UPDATE_DISTANCE_M,
this);
reportLocationLost();
mGPSListener = new GpsStatus.Listener() {
public void onGpsStatusChanged(int event) {
if (event == GpsStatus.GPS_EVENT_SATELLITE_STATUS) {
GpsStatus status = getLocationManager().getGpsStatus(null);
Iterable<GpsSatellite> sats = status.getSatellites();
int satellites = 0;
int fixes = 0;
for (GpsSatellite sat : sats) {
satellites++;
if (sat.usedInFix()) {
fixes++;
}
}
reportNewGpsStatus(fixes, satellites);
if (fixes < MIN_SAT_USED_IN_FIX) {
reportLocationLost();
}
if (AppGlobals.isDebug) {
Log.v(LOG_TAG, "onGpsStatusChange - satellites: " + satellites + " fixes: " + fixes);
}
} else if (event == GpsStatus.GPS_EVENT_STOPPED) {
reportLocationLost();
}
}
};
lm.addGpsStatusListener(mGPSListener);
}
public void stop() {
LocationManager lm = getLocationManager();
lm.removeUpdates(this);
reportLocationLost();
if (mGPSListener != null) {
lm.removeGpsStatusListener(mGPSListener);
mGPSListener = null;
}
}
public int getLocationCount() {
return mLocationCount;
}
public double getLatitude() {
return mLocation.getLatitude();
}
public double getLongitude() {
return mLocation.getLongitude();
}
public Location getLocation() {
return mLocation;
}
public void checkPrefs() {
if (mBlockList != null) {
mBlockList.updateBlocks();
}
mAutoGeofencing = Prefs.getInstance().getGeofenceHere();
}
public boolean isGeofenced() {
return (mBlockList != null) && mBlockList.isGeofenced();
}
private void sendToLogActivity(String msg) {
AppGlobals.guiLogInfo(msg, "#33ccff", false);
}
@Override
public void onLocationChanged(Location location) {
if (location == null) { // TODO: is this even possible??
reportLocationLost();
return;
}
String logMsg = (mIsPassiveMode)? "[Passive] " : "[Active] ";
String provider = location.getProvider();
if (!provider.toLowerCase().contains("gps")) {
sendToLogActivity(logMsg + "Discard fused/network location.");
// only interested in GPS locations
return;
}
// Seem to get greater likelihood of non-fused location with higher update freq.
// Check dist and time threshold here, not set on the listener.
if (mIsPassiveMode) {
final long timeDelta = location.getTime() - mLocation.getTime();
final boolean hasMoved = location.distanceTo(mLocation) > PASSIVE_GPS_MOVEMENT_MIN_DELTA_M;
if (timeDelta < PASSIVE_GPS_MIN_UPDATE_FREQ_MS || !hasMoved) {
return;
}
}
Date date = new Date(location.getTime());
SimpleDateFormat formatter = new SimpleDateFormat("HH:mm:ss");
String time = formatter.format(date);
logMsg += String.format("%s Coord: %.4f,%.4f, Acc: %.0f, Speed: %.0f, Alt: %.0f, Bearing: %.1f", time, location.getLatitude(),
location.getLongitude(), location.getAccuracy(), location.getSpeed(), location.getAltitude(), location.getBearing());
sendToLogActivity(logMsg);
if (mBlockList.contains(location)) {
Log.w(LOG_TAG, "Blocked location: " + location);
reportLocationLost();
return;
}
if (AppGlobals.isDebug) {
Log.d(LOG_TAG, "New location: " + location);
}
mLocation = location;
if (!mAutoGeofencing) {
reportNewLocationReceived(location);
}
mLocationCount++;
if (mIsPassiveMode) {
mScanManager.newPassiveGpsLocation();
}
}
@Override
public void onProviderDisabled(String provider) {
if (LocationManager.GPS_PROVIDER.equals(provider)) {
reportLocationLost();
}
}
@Override
public void onProviderEnabled(String provider) {
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
if ((status != LocationProvider.AVAILABLE) &&
(LocationManager.GPS_PROVIDER.equals(provider))) {
reportLocationLost();
}
}
private LocationManager getLocationManager() {
return (LocationManager) mContext.getSystemService(Context.LOCATION_SERVICE);
}
private void reportNewLocationReceived(Location location) {
Intent i = new Intent(ACTION_GPS_UPDATED);
i.putExtra(Intent.EXTRA_SUBJECT, SUBJECT_NEW_LOCATION);
i.putExtra(NEW_LOCATION_ARG_LOCATION, location);
i.putExtra(ACTION_ARG_TIME, System.currentTimeMillis());
LocalBroadcastManager.getInstance(mContext).sendBroadcastSync(i);
}
private void reportLocationLost() {
Intent i = new Intent(ACTION_GPS_UPDATED);
i.putExtra(Intent.EXTRA_SUBJECT, SUBJECT_LOCATION_LOST);
i.putExtra(ACTION_ARG_TIME, System.currentTimeMillis());
LocalBroadcastManager.getInstance(mContext).sendBroadcastSync(i);
}
private void reportNewGpsStatus(int fixes, int sats) {
Intent i = new Intent(ACTION_GPS_UPDATED);
i.putExtra(Intent.EXTRA_SUBJECT, SUBJECT_NEW_STATUS);
i.putExtra(NEW_STATUS_ARG_FIXES, fixes);
i.putExtra(NEW_STATUS_ARG_SATS, sats);
i.putExtra(ACTION_ARG_TIME, System.currentTimeMillis());
LocalBroadcastManager.getInstance(mContext).sendBroadcastSync(i);
}
}
| Added an ACTION_NMEA_RECEIVED intent that is broadcast locally whenever new NMEA data is read in from GPS satellites. This can be used to help check if a GPS location is provided by a MockLocation provider.
| src/org/mozilla/mozstumbler/service/stumblerthread/scanners/GPSScanner.java | Added an ACTION_NMEA_RECEIVED intent that is broadcast locally whenever new NMEA data is read in from GPS satellites. This can be used to help check if a GPS location is provided by a MockLocation provider. |
|
Java | agpl-3.0 | f189ede9d2650fda617179e67a1dda8d857ccd1a | 0 | schedulix/schedulix,schedulix/schedulix,schedulix/schedulix,schedulix/schedulix | /*
Copyright (c) 2000-2013 "independIT Integrative Technologies GmbH",
Authors: Ronald Jeninga, Dieter Stubler
schedulix Enterprise Job Scheduling System
independIT Integrative Technologies GmbH [http://www.independit.de]
mailto:[email protected]
This file is part of schedulix
schedulix is free software:
you can redistribute it and/or modify it under the terms of the
GNU Affero General Public License as published by the
Free Software Foundation, either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.independit.scheduler.server;
import java.io.*;
import java.util.*;
import java.lang.*;
import java.net.*;
import java.sql.*;
import de.independit.scheduler.server.util.*;
import de.independit.scheduler.server.repository.*;
import de.independit.scheduler.server.exception.*;
import de.independit.scheduler.server.output.*;
public class Server
{
public final static String __version = "@(#) $Id: Server.java,v 2.17.2.6 2013/09/11 11:50:39 ronald Exp $";
private ThreadGroup utg;
private SSLListenThread ssllt;
private OrdinaryListenThread ult;
private OrdinaryListenThread svt;
private SchedulingThread wst;
private GarbageThread gst;
private TimerThread tt;
private TriggerThread trt;
private ThreadGroup wg;
private SyncFifo cmdQueue;
private SyncFifo roCmdQueue;
private WorkerThread[] wt;
private ShutdownThread shutt;
private RenewTicketThread rtt;
private DBCleanupThread dbct;
private String iniFile;
private SystemEnvironment env;
public Server(String inifile, boolean adminMode, boolean protectMode, String programLevel)
{
iniFile = inifile;
Properties props = new Properties();
InputStream ini;
ini = Server.class.getResourceAsStream(inifile);
try {
if(ini == null)
ini = new FileInputStream(inifile);
props.load(ini);
} catch(FileNotFoundException fnf) {
SDMSThread.doTrace(null, "Properties File not found : " + fnf, SDMSThread.SEVERITY_FATAL);
} catch(IOException ioe) {
SDMSThread.doTrace(null, "Error loading Properties file: " + ioe, SDMSThread.SEVERITY_FATAL);
}
for (Enumeration e = props.propertyNames() ; e.hasMoreElements() ;) {
String k = (String) e.nextElement();
if(k.equals(SystemEnvironment.S_DBPASSWD)) continue;
if(k.equals(SystemEnvironment.S_SYSPASSWD)) continue;
if(k.equals(SystemEnvironment.S_KEYSTOREPASSWORD)) continue;
if(k.equals(SystemEnvironment.S_TRUSTSTOREPASSWORD)) continue;
SDMSThread.doTrace(null, k + "=" + props.getProperty(k), SDMSThread.SEVERITY_INFO);
}
env = new SystemEnvironment(props, programLevel);
if(adminMode) env.disableConnect();
if(protectMode) SystemEnvironment.setProtectMode();
SystemEnvironment.server = this;
}
public String getIniFile()
{
return iniFile;
}
private void initShutdownThread()
{
shutt = new ShutdownThread(env, this);
Runtime r = Runtime.getRuntime();
r.addShutdownHook(shutt);
}
private void initRenewTicketThread() throws SDMSException
{
rtt = new RenewTicketThread(this);
SystemEnvironment.ticketThread = rtt;
rtt.initRenewTicketThread(env);
rtt.getTicket(rtt.pSysEnv);
}
private void startRenewTicketThread() throws SDMSException
{
SDMSThread.doTrace(null, "Starting Renew Ticket Thread", SDMSThread.SEVERITY_INFO);
rtt.start();
}
private void initDBCleanupThread() throws SDMSException
{
dbct = new DBCleanupThread(this);
SystemEnvironment.dbCleanupThread = dbct;
dbct.initDBCleanupThread(env);
}
private void startDBCleanupThread() throws SDMSException
{
SDMSThread.doTrace(null, "Starting Database Cleanup Thread", SDMSThread.SEVERITY_INFO);
dbct.start();
}
private void createRepository() throws SDMSException
{
new SDMSRepository(env);
}
private void initWorkers() throws SDMSException
{
wg = new ThreadGroup("WorkerThreads");
cmdQueue = new SyncFifo();
roCmdQueue = new SyncFifo();
SystemEnvironment.wg = wg;
}
private void startWorkers() throws SDMSException
{
int maxWorker;
SyncFifo q;
maxWorker = SystemEnvironment.maxWorker + SystemEnvironment.maxWriter;
SDMSThread.doTrace(null, "Starting " + maxWorker + " Worker Threads", SDMSThread.SEVERITY_INFO);
wt = new WorkerThread[maxWorker];
q = cmdQueue;
for(int i=0; i < maxWorker; ++i) {
if (i >= SystemEnvironment.maxWriter)
q = roCmdQueue;
wt[i] = new WorkerThread(env, wg, q, i);
wt[i].start();
}
}
private void initScheduling() throws SDMSException
{
wst = new SchedulingThread(env, cmdQueue);
wst.spinDelay = 50;
SystemEnvironment.sched = wst;
}
public void startScheduling() throws SDMSException
{
if(SystemEnvironment.getProtectMode()) return;
if(wst.isAlive()) return;
SDMSThread.doTrace(null, "Starting Scheduling Thread", SDMSThread.SEVERITY_INFO);
wst.start();
}
private void initGC() throws SDMSException
{
gst = new GarbageThread(env, cmdQueue);
SystemEnvironment.garb = gst;
}
private void startGC() throws SDMSException
{
SDMSThread.doTrace(null, "Starting Garbage Collection Thread", SDMSThread.SEVERITY_INFO);
gst.start();
}
private void initTT() throws SDMSException
{
trt = new TriggerThread(env, cmdQueue);
SystemEnvironment.tt = trt;
}
private void startTT() throws SDMSException
{
SDMSThread.doTrace(null, "Starting Trigger Thread", SDMSThread.SEVERITY_INFO);
trt.start();
}
private void initTimeScheduling() throws SDMSException
{
tt = new TimerThread(env, cmdQueue);
SystemEnvironment.timer = tt;
}
public void startTimeScheduling() throws SDMSException
{
if(SystemEnvironment.getProtectMode()) return;
if(tt.isAlive()) return;
SDMSThread.doTrace(null, "Starting Time Scheduling Thread", SDMSThread.SEVERITY_INFO);
tt.start();
}
private void initListener()
{
utg = new ThreadGroup(wg, "UserThreads");
SystemEnvironment.utg = utg;
}
private void startListener()
{
SDMSThread.doTrace(null, "Starting Listener Thread(s)", SDMSThread.SEVERITY_INFO);
if (SystemEnvironment.port != 0) {
ult = new OrdinaryListenThread(utg, SystemEnvironment.port, SystemEnvironment.maxConnects, cmdQueue, roCmdQueue, ListenThread.LISTENER);
ult.start();
} else {
SDMSThread.doTrace(null, "Standard communication Listener disabled", SDMSThread.SEVERITY_INFO);
ult = null;
}
if (SystemEnvironment.service_port != 0) {
svt = new OrdinaryListenThread(utg, SystemEnvironment.service_port, 1, cmdQueue, roCmdQueue, ListenThread.SERVICE);
svt.start();
} else {
SDMSThread.doTrace(null, "Service port Listener disabled", SDMSThread.SEVERITY_INFO);
svt = null;
}
if (SystemEnvironment.sslport != 0) {
ssllt = new SSLListenThread(utg, SystemEnvironment.sslport, SystemEnvironment.maxConnects, cmdQueue, roCmdQueue, ListenThread.LISTENER);
ssllt.start();
try {
Thread.sleep(1000);
} catch (java.lang.InterruptedException ie) { }
String[] prots = ssllt.getProtocols();
if (prots != null) {
SDMSThread.doTrace(null, "TLS Listen Thread started. Supported Protocols :", SDMSThread.SEVERITY_INFO);
for (int i = 0; i < prots.length; ++i)
SDMSThread.doTrace(null, "\t" + prots[i], SDMSThread.SEVERITY_INFO);
}
} else {
SDMSThread.doTrace(null, "SSL communication Listener disabled", SDMSThread.SEVERITY_INFO);
ssllt = null;
}
}
public void shutdown()
{
if(gst != null) {
if(gst.isAlive()) {
gst.do_stop();
SDMSThread.doTrace(null, "Stopped " + gst.toString(), SDMSThread.SEVERITY_INFO);
}
}
if(ult != null) {
if(ult.isAlive()) {
ult.do_stop();
}
killAll();
}
if(svt != null) {
if(svt.isAlive()) {
svt.do_stop();
}
killAll();
}
if(ssllt != null) {
if(ssllt.isAlive()) {
ssllt.do_stop();
}
killAll();
}
if(wst != null) {
if(wst.isAlive()) {
wst.do_stop();
SDMSThread.doTrace(null, "Stopped " + wst.toString(), SDMSThread.SEVERITY_INFO);
}
}
if(tt != null) {
if(tt.isAlive()) {
tt.do_stop();
SDMSThread.doTrace(null, "Stopped " + tt.toString(), SDMSThread.SEVERITY_INFO);
}
}
if(rtt != null) {
if(rtt.isAlive()) {
rtt.do_stop();
SDMSThread.doTrace(null, "Stopped " + rtt.toString(), SDMSThread.SEVERITY_INFO);
}
}
if (dbct != null) {
if (dbct.isAlive()) {
dbct.do_stop();
SDMSThread.doTrace(null, "Stopped " + dbct.toString(), SDMSThread.SEVERITY_INFO);
}
}
if(wt != null) {
for(int i = 0; i < wt.length; ++i) {
if(wt[i] != null) {
if(wt[i].isAlive()) {
wt[i].do_stop();
SDMSThread.doTrace(null, "Stopped " + wt[i].toString(), SDMSThread.SEVERITY_INFO);
}
}
}
}
shutt.do_stop();
}
public void killUser(int cid)
{
int numUser = utg.activeCount();
SDMSThread list[];
int i;
list = new SDMSThread[numUser];
utg.enumerate(list);
for(i = 0; i< numUser; i++) {
if(list[i] != null && list[i].id() == cid) {
list[i].do_stop();
}
}
}
public void killAll()
{
int numUser = utg.activeCount();
SDMSThread list[];
int i;
list = new SDMSThread[numUser];
utg.enumerate(list);
for(i = 0; i< numUser; i++) {
if(list[i] != null && list[i].isAlive()) {
list[i].do_stop();
SDMSThread.doTrace(null, "Stopped " + list[i].toString(), SDMSThread.SEVERITY_INFO);
}
}
}
public static synchronized Connection connectToDB(SystemEnvironment env) throws FatalException
{
String jdbcDriver = SystemEnvironment.jdbcDriver;
String dbUrl = SystemEnvironment.dbUrl;
String dbUser = SystemEnvironment.dbUser;
String dbPasswd = SystemEnvironment.dbPasswd;
Connection c;
if(jdbcDriver == null)
throw new FatalException(new SDMSMessage(env,
"03110181509", "No JDBC Driver Specified"));
if(dbUrl == null) throw new FatalException(new SDMSMessage(env,
"03110181510", "No JDBC URL Specified"));
try {
Class.forName(jdbcDriver);
} catch(ClassNotFoundException cnf) {
throw new FatalException(new SDMSMessage(env,
"03110181511", "Class $1 not Found", jdbcDriver));
}
try {
c = DriverManager.getConnection(dbUrl, dbUser, dbPasswd);
} catch(SQLException sqle) {
throw new FatalException(new SDMSMessage(env,
"03110181512", "Unable to connect to $1, $2", dbUrl, sqle.toString()));
}
try {
c.setAutoCommit(false);
} catch(SQLException sqle) {
throw new FatalException(new SDMSMessage(env,
"03202071128", "Cannot set autocommit off ($1)", sqle.toString()));
}
if (SystemEnvironment.SQUOTE == null) {
try {
final String driverName = c.getMetaData().getDriverName();
SDMSThread.doTrace(null, "JDBC Driver used : " + driverName, SDMSThread.SEVERITY_INFO);
if (driverName.startsWith("MySQL") || driverName.startsWith("MariaDB")) {
SystemEnvironment.SQUOTE = "`";
SystemEnvironment.EQUOTE = "`";
} else if (driverName.startsWith("Microsoft")) {
SystemEnvironment.SQUOTE = "[";
SystemEnvironment.EQUOTE = "]";
} else {
if (driverName.startsWith("PostgreSQL"))
SystemEnvironment.isPostgreSQL = true;
SystemEnvironment.SQUOTE = "";
SystemEnvironment.EQUOTE = "";
}
} catch (SQLException sqle) {
SDMSThread.doTrace(null, "Unknown JDBC Driver used; run into an exception while trying to determine the Driver Name : " + sqle.toString(), SDMSThread.SEVERITY_FATAL);
SystemEnvironment.SQUOTE = "";
SystemEnvironment.EQUOTE = "";
}
}
return c;
}
public void serverMain()
{
try {
initShutdownThread();
initRenewTicketThread();
try {
startRenewTicketThread();
} catch(SDMSException fe1) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03302061700",
"Fatal exception while starting TicketThread:\n$1", fe1.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
createRepository();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03202252201",
"Fatal exception while loading Repository:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
SDMSThread.doTrace(null, "Initializing System Threads", SDMSThread.SEVERITY_INFO);
initWorkers();
initListener();
initScheduling();
initTimeScheduling();
initTT();
initGC();
if (env.dbPreserveTime > 0)
initDBCleanupThread();
else
dbct = null;
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03202252202",
"Fatal exception while initializing System Threads:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startWorkers();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03202252203",
"Fatal exception while starting Workerthreads:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startScheduling();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03202252204",
"Fatal exception while starting SchedulingThread:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startTimeScheduling();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03206082124",
"Fatal exception while starting Time Scheduling:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startTT();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03407301455",
"Fatal exception while starting trigger thread:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startGC();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03311120827",
"Fatal exception while starting garbage collector:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
if (dbct != null) startDBCleanupThread();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03311141139",
"Fatal exception while starting dbCleanupThread:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
startListener();
SDMSMessage m = new SDMSMessage(env, "03110212341", "-- $1 -- $2 -- $3 -- ready --",
"SDMS", "Server", "Systems");
SDMSThread.doTrace(null, m.toString(), SDMSThread.SEVERITY_INFO);
for(int i = 0; i < wt.length; ++i) {
try {
wt[i].join();
SDMSThread.doTrace(null, "Worker " + i + " terminated", SDMSThread.SEVERITY_INFO);
} catch(InterruptedException ie) {
--i;
}
if(i == 0) {
shutdown();
}
}
while(true) {
try {
SDMSThread.doTrace(null, "Waiting for Listener", SDMSThread.SEVERITY_INFO);
ult.interrupt();
ult.join();
SDMSThread.doTrace(null, "Listener terminated", SDMSThread.SEVERITY_INFO);
if (svt != null) {
SDMSThread.doTrace(null, "Waiting for ServiceThread", SDMSThread.SEVERITY_INFO);
svt.interrupt();
svt.join();
SDMSThread.doTrace(null, "ServiceThread terminated", SDMSThread.SEVERITY_INFO);
}
if (dbct != null) {
SDMSThread.doTrace(null, "Waiting for DBCleanup", SDMSThread.SEVERITY_INFO);
if (dbct.isAlive()) {
dbct.join();
}
SDMSThread.doTrace(null, "DBCleanup Thread terminated", SDMSThread.SEVERITY_INFO);
}
} catch(InterruptedException ie) {
continue;
}
break;
}
System.exit(0);
}
}
| src/server/Server.java | /*
Copyright (c) 2000-2013 "independIT Integrative Technologies GmbH",
Authors: Ronald Jeninga, Dieter Stubler
schedulix Enterprise Job Scheduling System
independIT Integrative Technologies GmbH [http://www.independit.de]
mailto:[email protected]
This file is part of schedulix
schedulix is free software:
you can redistribute it and/or modify it under the terms of the
GNU Affero General Public License as published by the
Free Software Foundation, either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.independit.scheduler.server;
import java.io.*;
import java.util.*;
import java.lang.*;
import java.net.*;
import java.sql.*;
import de.independit.scheduler.server.util.*;
import de.independit.scheduler.server.repository.*;
import de.independit.scheduler.server.exception.*;
import de.independit.scheduler.server.output.*;
public class Server
{
public final static String __version = "@(#) $Id: Server.java,v 2.17.2.6 2013/09/11 11:50:39 ronald Exp $";
private ThreadGroup utg;
private SSLListenThread ssllt;
private OrdinaryListenThread ult;
private OrdinaryListenThread svt;
private SchedulingThread wst;
private GarbageThread gst;
private TimerThread tt;
private TriggerThread trt;
private ThreadGroup wg;
private SyncFifo cmdQueue;
private SyncFifo roCmdQueue;
private WorkerThread[] wt;
private ShutdownThread shutt;
private RenewTicketThread rtt;
private DBCleanupThread dbct;
private String iniFile;
private SystemEnvironment env;
public Server(String inifile, boolean adminMode, boolean protectMode, String programLevel)
{
iniFile = inifile;
Properties props = new Properties();
InputStream ini;
ini = Server.class.getResourceAsStream(inifile);
try {
if(ini == null)
ini = new FileInputStream(inifile);
props.load(ini);
} catch(FileNotFoundException fnf) {
SDMSThread.doTrace(null, "Properties File not found : " + fnf, SDMSThread.SEVERITY_FATAL);
} catch(IOException ioe) {
SDMSThread.doTrace(null, "Error loading Properties file: " + ioe, SDMSThread.SEVERITY_FATAL);
}
for (Enumeration e = props.propertyNames() ; e.hasMoreElements() ;) {
String k = (String) e.nextElement();
if(k.equals(SystemEnvironment.S_DBPASSWD)) continue;
if(k.equals(SystemEnvironment.S_SYSPASSWD)) continue;
if(k.equals(SystemEnvironment.S_KEYSTOREPASSWORD)) continue;
if(k.equals(SystemEnvironment.S_TRUSTSTOREPASSWORD)) continue;
SDMSThread.doTrace(null, k + "=" + props.getProperty(k), SDMSThread.SEVERITY_INFO);
}
env = new SystemEnvironment(props, programLevel);
if(adminMode) env.disableConnect();
if(protectMode) SystemEnvironment.setProtectMode();
SystemEnvironment.server = this;
}
public String getIniFile()
{
return iniFile;
}
private void initShutdownThread()
{
shutt = new ShutdownThread(env, this);
Runtime r = Runtime.getRuntime();
r.addShutdownHook(shutt);
}
private void initRenewTicketThread() throws SDMSException
{
rtt = new RenewTicketThread(this);
SystemEnvironment.ticketThread = rtt;
rtt.initRenewTicketThread(env);
rtt.getTicket(rtt.pSysEnv);
}
private void startRenewTicketThread() throws SDMSException
{
SDMSThread.doTrace(null, "Starting Renew Ticket Thread", SDMSThread.SEVERITY_INFO);
rtt.start();
}
private void initDBCleanupThread() throws SDMSException
{
dbct = new DBCleanupThread(this);
SystemEnvironment.dbCleanupThread = dbct;
dbct.initDBCleanupThread(env);
}
private void startDBCleanupThread() throws SDMSException
{
SDMSThread.doTrace(null, "Starting Database Cleanup Thread", SDMSThread.SEVERITY_INFO);
dbct.start();
}
private void createRepository() throws SDMSException
{
new SDMSRepository(env);
}
private void initWorkers() throws SDMSException
{
wg = new ThreadGroup("WorkerThreads");
cmdQueue = new SyncFifo();
roCmdQueue = new SyncFifo();
SystemEnvironment.wg = wg;
}
private void startWorkers() throws SDMSException
{
int maxWorker;
SyncFifo q;
maxWorker = SystemEnvironment.maxWorker + SystemEnvironment.maxWriter;
SDMSThread.doTrace(null, "Starting " + maxWorker + " Worker Threads", SDMSThread.SEVERITY_INFO);
wt = new WorkerThread[maxWorker];
q = cmdQueue;
for(int i=0; i < maxWorker; ++i) {
if (i >= SystemEnvironment.maxWriter)
q = roCmdQueue;
wt[i] = new WorkerThread(env, wg, q, i);
wt[i].start();
}
}
private void initScheduling() throws SDMSException
{
wst = new SchedulingThread(env, cmdQueue);
wst.spinDelay = 50;
SystemEnvironment.sched = wst;
}
public void startScheduling() throws SDMSException
{
if(SystemEnvironment.getProtectMode()) return;
if(wst.isAlive()) return;
SDMSThread.doTrace(null, "Starting Scheduling Thread", SDMSThread.SEVERITY_INFO);
wst.start();
}
private void initGC() throws SDMSException
{
gst = new GarbageThread(env, cmdQueue);
SystemEnvironment.garb = gst;
}
private void startGC() throws SDMSException
{
SDMSThread.doTrace(null, "Starting Garbage Collection Thread", SDMSThread.SEVERITY_INFO);
gst.start();
}
private void initTT() throws SDMSException
{
trt = new TriggerThread(env, cmdQueue);
SystemEnvironment.tt = trt;
}
private void startTT() throws SDMSException
{
SDMSThread.doTrace(null, "Starting Trigger Thread", SDMSThread.SEVERITY_INFO);
trt.start();
}
private void initTimeScheduling() throws SDMSException
{
tt = new TimerThread(env, cmdQueue);
SystemEnvironment.timer = tt;
}
public void startTimeScheduling() throws SDMSException
{
if(SystemEnvironment.getProtectMode()) return;
if(tt.isAlive()) return;
SDMSThread.doTrace(null, "Starting Time Scheduling Thread", SDMSThread.SEVERITY_INFO);
tt.start();
}
private void initListener()
{
utg = new ThreadGroup(wg, "UserThreads");
SystemEnvironment.utg = utg;
}
private void startListener()
{
SDMSThread.doTrace(null, "Starting Listener Thread(s)", SDMSThread.SEVERITY_INFO);
if (SystemEnvironment.port != 0) {
ult = new OrdinaryListenThread(utg, SystemEnvironment.port, SystemEnvironment.maxConnects, cmdQueue, roCmdQueue, ListenThread.LISTENER);
ult.start();
} else {
SDMSThread.doTrace(null, "Standard communication Listener disabled", SDMSThread.SEVERITY_INFO);
ult = null;
}
if (SystemEnvironment.service_port != 0) {
svt = new OrdinaryListenThread(utg, SystemEnvironment.service_port, 1, cmdQueue, roCmdQueue, ListenThread.SERVICE);
svt.start();
} else {
SDMSThread.doTrace(null, "Service port Listener disabled", SDMSThread.SEVERITY_INFO);
svt = null;
}
if (SystemEnvironment.sslport != 0) {
ssllt = new SSLListenThread(utg, SystemEnvironment.sslport, SystemEnvironment.maxConnects, cmdQueue, roCmdQueue, ListenThread.LISTENER);
ssllt.start();
try {
Thread.sleep(1000);
} catch (java.lang.InterruptedException ie) { }
String[] prots = ssllt.getProtocols();
if (prots != null) {
SDMSThread.doTrace(null, "TLS Listen Thread started. Supported Protocols :", SDMSThread.SEVERITY_INFO);
for (int i = 0; i < prots.length; ++i)
SDMSThread.doTrace(null, "\t" + prots[i], SDMSThread.SEVERITY_INFO);
}
} else {
SDMSThread.doTrace(null, "SSL communication Listener disabled", SDMSThread.SEVERITY_INFO);
ssllt = null;
}
}
public void shutdown()
{
if(gst != null) {
if(gst.isAlive()) {
gst.do_stop();
SDMSThread.doTrace(null, "Stopped " + gst.toString(), SDMSThread.SEVERITY_INFO);
}
}
if(ult != null) {
if(ult.isAlive()) {
ult.do_stop();
}
killAll();
}
if(svt != null) {
if(svt.isAlive()) {
svt.do_stop();
}
killAll();
}
if(ssllt != null) {
if(ssllt.isAlive()) {
ssllt.do_stop();
}
killAll();
}
if(wst != null) {
if(wst.isAlive()) {
wst.do_stop();
SDMSThread.doTrace(null, "Stopped " + wst.toString(), SDMSThread.SEVERITY_INFO);
}
}
if(tt != null) {
if(tt.isAlive()) {
tt.do_stop();
SDMSThread.doTrace(null, "Stopped " + tt.toString(), SDMSThread.SEVERITY_INFO);
}
}
if(rtt != null) {
if(rtt.isAlive()) {
rtt.do_stop();
SDMSThread.doTrace(null, "Stopped " + rtt.toString(), SDMSThread.SEVERITY_INFO);
}
}
if (dbct != null) {
if (dbct.isAlive()) {
dbct.do_stop();
SDMSThread.doTrace(null, "Stopped " + dbct.toString(), SDMSThread.SEVERITY_INFO);
}
}
if(wt != null) {
for(int i = 0; i < wt.length; ++i) {
if(wt[i] != null) {
if(wt[i].isAlive()) {
wt[i].do_stop();
SDMSThread.doTrace(null, "Stopped " + wt[i].toString(), SDMSThread.SEVERITY_INFO);
}
}
}
}
shutt.do_stop();
}
public void killUser(int cid)
{
int numUser = utg.activeCount();
SDMSThread list[];
int i;
list = new SDMSThread[numUser];
utg.enumerate(list);
for(i = 0; i< numUser; i++) {
if(list[i] != null && list[i].id() == cid) {
list[i].do_stop();
}
}
}
public void killAll()
{
int numUser = utg.activeCount();
SDMSThread list[];
int i;
list = new SDMSThread[numUser];
utg.enumerate(list);
for(i = 0; i< numUser; i++) {
if(list[i] != null && list[i].isAlive()) {
list[i].do_stop();
SDMSThread.doTrace(null, "Stopped " + list[i].toString(), SDMSThread.SEVERITY_INFO);
}
}
}
public static synchronized Connection connectToDB(SystemEnvironment env) throws FatalException
{
String jdbcDriver = SystemEnvironment.jdbcDriver;
String dbUrl = SystemEnvironment.dbUrl;
String dbUser = SystemEnvironment.dbUser;
String dbPasswd = SystemEnvironment.dbPasswd;
Connection c;
if(jdbcDriver == null)
throw new FatalException(new SDMSMessage(env,
"03110181509", "No JDBC Driver Specified"));
if(dbUrl == null) throw new FatalException(new SDMSMessage(env,
"03110181510", "No JDBC URL Specified"));
try {
Class.forName(jdbcDriver);
} catch(ClassNotFoundException cnf) {
throw new FatalException(new SDMSMessage(env,
"03110181511", "Class $1 not Found", jdbcDriver));
}
try {
c = DriverManager.getConnection(dbUrl, dbUser, dbPasswd);
} catch(SQLException sqle) {
throw new FatalException(new SDMSMessage(env,
"03110181512", "Unable to connect to $1, $2", dbUrl, sqle.toString()));
}
try {
c.setAutoCommit(false);
} catch(SQLException sqle) {
throw new FatalException(new SDMSMessage(env,
"03202071128", "Cannot set autocommit off ($1)", sqle.toString()));
}
if (SystemEnvironment.SQUOTE == null) {
try {
final String driverName = c.getMetaData().getDriverName();
SDMSThread.doTrace(null, "JDBC Driver used : " + driverName, SDMSThread.SEVERITY_INFO);
if (driverName.startsWith("MySQL") || driverName.startsWith("MariaDB")) {
SystemEnvironment.SQUOTE = "`";
SystemEnvironment.EQUOTE = "`";
} else if (driverName.startsWith("Microsoft")) {
SystemEnvironment.SQUOTE = "[";
SystemEnvironment.EQUOTE = "]";
} else {
if (driverName.startsWith("PostgreSQL"))
SystemEnvironment.isPostgreSQL = true;
SystemEnvironment.SQUOTE = "";
SystemEnvironment.EQUOTE = "";
}
} catch (SQLException sqle) {
SDMSThread.doTrace(null, "Unknown JDBC Driver used; run into an exception while trying to determine the Driver Name : " + sqle.toString(), SDMSThread.SEVERITY_FATAL);
SystemEnvironment.SQUOTE = "";
SystemEnvironment.EQUOTE = "";
}
}
return c;
}
public void serverMain()
{
try {
initShutdownThread();
initRenewTicketThread();
try {
startRenewTicketThread();
} catch(SDMSException fe1) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03302061700",
"Fatal exception while starting TicketThread:\n$1", fe1.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
createRepository();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03202252201",
"Fatal exception while loading Repository:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
SDMSThread.doTrace(null, "Initializing System Threads", SDMSThread.SEVERITY_INFO);
initWorkers();
initListener();
initScheduling();
initTimeScheduling();
initTT();
initGC();
if (env.dbPreserveTime > 0)
initDBCleanupThread();
else
dbct = null;
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03202252202",
"Fatal exception while initializing System Threads:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startWorkers();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03202252203",
"Fatal exception while starting Workerthreads:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startScheduling();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03202252204",
"Fatal exception while starting SchedulingThread:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startTimeScheduling();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03206082124",
"Fatal exception while starting Time Scheduling:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startTT();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03407301455",
"Fatal exception while starting trigger thread:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
startGC();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03311120827",
"Fatal exception while starting garbage collector:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
try {
if (dbct != null) startDBCleanupThread();
} catch(SDMSException fe) {
SDMSThread.doTrace(null, (new SDMSMessage(env, "03311141139",
"Fatal exception while starting dbCleanupThread:\n$1", fe.toString())).toString(), SDMSThread.SEVERITY_FATAL);
}
startListener();
SDMSMessage m = new SDMSMessage(env, "03110212341", "-- $1 -- $2 -- $3 -- ready --",
"SDMS", "Server", "Systems");
SDMSThread.doTrace(null, m.toString(), SDMSThread.SEVERITY_INFO);
for(int i = 0; i < wt.length; ++i) {
try {
wt[i].join();
SDMSThread.doTrace(null, "Worker " + i + " terminated", SDMSThread.SEVERITY_INFO);
} catch(InterruptedException ie) {
--i;
}
if(i == 0) {
shutdown();
}
}
while(true) {
try {
SDMSThread.doTrace(null, "Waiting for Listener", SDMSThread.SEVERITY_INFO);
ult.interrupt();
ult.join();
SDMSThread.doTrace(null, "Listener terminated", SDMSThread.SEVERITY_INFO);
if (svt != null) {
SDMSThread.doTrace(null, "Waiting for ServiceThread", SDMSThread.SEVERITY_INFO);
svt.interrupt();
svt.join();
SDMSThread.doTrace(null, "ServiceThread terminated", SDMSThread.SEVERITY_INFO);
}
if (dbct != null) {
SDMSThread.doTrace(null, "Waiting for DBCleanup", SDMSThread.SEVERITY_INFO);
if (dbct.isAlive()) {
dbct.join();
}
SDMSThread.doTrace(null, "DBCleanup Thread terminated", SDMSThread.SEVERITY_INFO);
}
} catch(InterruptedException ie) {
continue;
}
break;
}
System.exit(0);
}
}
| corrected indentation
| src/server/Server.java | corrected indentation |
|
Java | agpl-3.0 | d2c0b39e238ac8ab2155e03f7db25833b9e9848b | 0 | arenaoftitans/arena-of-titans,arenaoftitans/arena-of-titans,arenaoftitans/arena-of-titans | package dernierelignegameengine;
import java.util.Set;
import java.util.Objects;
/**
* <b>Represents one of the squares on the board.</b>
*
* <div>
* This class describes a square with the following information :
* <ul>
* <li>its coordinates on the board matrix,</li>
* <li>if it is occupied or not,</li>
* <li>its color.</li>
* </ul>
* </div>
*
* @author "Dernière Ligne" first development team
* @version 1.0
*/
public class Square {
/**
* Used to know if there is a player on this square.
*
* @since 1.0
*/
private boolean occupied;
/**
* The x coordinate of this square in the board matrix it belongs to.<br/>
* Once initialized, it cannot be changed.
*
* @since 1.0
*/
public final int x;
/**
* The y coordinate of this square in the board matrix it belongs to.<br/>
* Once initialized, it cannot be changed.
*
* @since 1.0
*/
public final int y;
/**
* The color of this square.<br/>
* Once initialized, it cannot be changed.
*
* @see Color
* @since 1.0
*/
public final Color color;
/**
* <b>Constructor initializing the square.</b>
* <div>
* The occupied state is false by default.
* </div>
*
* @param x
* The x coordinate of this square in the board matrix.
* @param y
* The y coordinate of this square in the board matrix.
* @param color
* The color of this square.
*
* @see Color
*
* @see Square#color
* @see Square#occupied
* @see Square#x
* @see Square#y
*
* @since 1.0
*/
public Square(int x, int y, Color color) {
this.x = x;
this.y = y;
this.color = color;
this.occupied = false;
}
/**
* <b>This method returns the occupied state of the square.</b>
*
* @return
* The occupied state of the square.
*
* @see Square#occupied
*
* @since 1.0
*/
public boolean isOccupied() {
return occupied;
}
/**
* <b>Changes the occupied state of the square to true.</b>
*
* @see Square#occupied
*
* @since 1.0
*/
public void setAsOccupied() {
this.occupied = true;
}
//TODO possibly merge these methods.
/**
* <b>Changes the occupied state of this square to false.</b>
*
* @see Square#occupied
*
* @since 1.0
*/
public void empty() {
occupied = false;
}
/**
* <b>Determines if a card with the given set of possible colors can aim for this square.</b>
* <div>
* False will be returned if the square is occupied or if its color is not contained in the given set of colors.
* </div>
*
* @param possibleSquaresColor
* The set of colors given.
*
* @return
* True if its possible with the set of given colors to aim for this square.
*
* @see Card#possibleSquaresColor
*
* @see Square#color
* @see Square#isOccupied()
*
* @since 1.0
*/
public boolean canMoveTo(Set<Color> possibleSquaresColor) {
return !isOccupied() && possibleSquaresColor.contains(this.color);
}
/**
* <b>Returns a customized hashcode.</b>
* <div>
* The value of the customized hashcode is : ((7 * 11 + x) * 11 + y) * 11 + hash(color) where hash is the default Objects.hashCode() method.
* </div>
*
* @return
* A customized hashcode.
*
* @see Square#color
* @see Square#x
* @see Square#y
*
* @since 1.0
*/
//TODO see for one line ?
@Override
public int hashCode() {
int hash = 7;
hash = 11 * hash + this.x;
hash = 11 * hash + this.y;
hash = 11 * hash + Objects.hashCode(this.color);
return hash;
}
//TODO simplify if
/**
* <b>Returns true if the given object is the same square.</b>
* <div>
* Returns true if the object is a non-null square with the same x,y and color than this square.<br/>
* Return false otherwise.
* </div>
*
* @param obj
* The object to compare to this square.
*
* @return
* True if the given object is the same square.
*
* @see Square#color
* @see Square#x
* @see Square#y
*
* @since 1.0
*/
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Square other = (Square) obj;
if (this.x != other.x) {
return false;
}
if (this.y != other.y) {
return false;
}
if (this.color != other.color) {
return false;
}
return true;
}
/**
* <b>Return a human readable string for this square.</b>
* <div>
* The string is : Square{occupiedState,x,y,color}
* </div>
*
* @return
* The custom human readable string.
*
* @see Square#color
* @see Square#occupied
* @see Square#x
* @see Square#y
*
* @since 1.0
*/
@Override
public String toString() {
return "Square{" + "occupied=" + occupied + ", x=" + x + ", y=" + y + ", color=" + color + '}';
}
}
| src/dernierelignegameengine/Square.java | package dernierelignegameengine;
import java.util.Set;
import java.util.Objects;
public class Square {
private boolean occupied = false;
public final int x;
public final int y;
public final Color color;
public Square(int x, int y, Color color) {
this.x = x;
this.y = y;
this.color = color;
}
public boolean isOccupied() {
return occupied;
}
public void setAsOccupied() {
this.occupied = true;
}
public boolean canMoveTo(Set<Color> possibleSquaresColor) {
return !isOccupied() && possibleSquaresColor.contains(this.color);
}
public void empty() {
occupied = false;
}
@Override
public int hashCode() {
int hash = 7;
hash = 11 * hash + this.x;
hash = 11 * hash + this.y;
hash = 11 * hash + Objects.hashCode(this.color);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Square other = (Square) obj;
if (this.x != other.x) {
return false;
}
if (this.y != other.y) {
return false;
}
if (this.color != other.color) {
return false;
}
return true;
}
@Override
public String toString() {
return "Square{" + "occupied=" + occupied + ", x=" + x + ", y=" + y + ", color=" + color + '}';
}
}
| doc(Square.java): Commenting using the Javadoc convention. | src/dernierelignegameengine/Square.java | doc(Square.java): Commenting using the Javadoc convention. |
|
Java | lgpl-2.1 | 63e9ee9e6c83ad735f596c7e37ea10c0cddb76d3 | 0 | julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine | package org.intermine.web.logic.widget;
/*
* Copyright (C) 2002-2008 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.intermine.metadata.Model;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.query.Query;
import org.intermine.objectstore.query.QueryClass;
import org.intermine.objectstore.query.Results;
import org.intermine.objectstore.query.ResultsRow;
import org.intermine.util.StringUtil;
import org.intermine.util.TypeUtil;
import org.intermine.web.logic.WebUtil;
import org.intermine.web.logic.bag.InterMineBag;
/**
* @author Julie Sullivan
*/
public class EnrichmentWidget extends Widget
{
private String max, filters, filterLabel, errorCorrection;
private String label;
private String externalLink, externalLinkLabel;
private String append;
private ArrayList<Map> resultMaps = new ArrayList<Map>();
private InterMineBag bag;
private int notAnalysed;
private ObjectStore os;
/**
* {@inheritDoc}
*/
public void process(InterMineBag imbag, ObjectStore ost) {
try {
this.bag = imbag;
this.os = ost;
Class<?> clazz = TypeUtil.instantiate(getDataSetLoader());
Constructor<?> constr = clazz.getConstructor(new Class[]
{
InterMineBag.class, ObjectStore.class, String.class
});
EnrichmentWidgetLdr ldr = (EnrichmentWidgetLdr) constr.newInstance(new Object[]
{
bag, os, getSelectedExtraAttribute()
});
resultMaps = WebUtil.statsCalc(os, ldr, bag, new Double(0 + max), errorCorrection);
int analysedTotal = ((Integer) (resultMaps.get(3)).get("widgetTotal")).intValue();
setNotAnalysed(bag.getSize() - analysedTotal);
} catch (ObjectStoreException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NumberFormatException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchMethodException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InstantiationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvocationTargetException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* @return the label
*/
public String getLabel() {
return label;
}
/**
* @param label the label to set
*/
public void setLabel(String label) {
this.label = label;
}
/**
* @return the filters
*/
public String getFilters() {
return filters;
}
/**
* @param filters list of filters to display on the widget
*/
public void setFilters(String filters) {
this.filters = filters;
}
/**
* @return the label for the filters
*/
public String getFilterLabel() {
return filterLabel;
}
/**
* @param filterLabel the label for the filters
*/
public void setFilterLabel(String filterLabel) {
this.filterLabel = filterLabel;
}
/**
* @return the maximum value this widget will display
*/
public String getMax() {
return max;
}
/**
* @param max maximum value this widget will display
*/
public void setMax(String max) {
this.max = max;
}
/**
* Return an XML String of this Type object
* @return a String version of this WebConfig object
*/
public String toString() {
return "< title=\"" + getTitle() + "\" link=\"" + getLink() + "\" ldr=\""
+ getDataSetLoader() + "\"/>";
}
/**
* {@inheritDoc}
*/
public String getExternalLink() {
return externalLink;
}
/**
* {@inheritDoc}
*/
public void setExternalLink(String externalLink) {
this.externalLink = externalLink;
}
/**
* {@inheritDoc}
*/
public Map<String, Collection> getExtraAttributes(InterMineBag imBag, ObjectStore os) {
Map<String, Collection> returnMap = new HashMap<String, Collection>();
returnMap.put(getFilterLabel(), Arrays.asList(getFilters().split(",")));
return returnMap;
}
/**
*
* @return List of column labels
*/
public List<String> getColumns() {
return Arrays.asList(new String[]
{
"", label, "p-Value"
});
}
/**
* {@inheritDoc}
*/
public List<List<String[]>> getFlattenedResults() {
if (resultMaps != null && !resultMaps.isEmpty()) {
Map<String, BigDecimal> pvalues = resultMaps.get(0);
Map<String, Long> totals = resultMaps.get(1);
Map<String, String> labelToId = resultMaps.get(2);
List<List<String[]>> flattenedResults = new LinkedList<List<String[]>>();
for (String id : pvalues.keySet()) {
List<String[]> row = new LinkedList();
BigDecimal bd = pvalues.get(id);
row.add(new String[]
{
"<input name=\"selected\" value=\"" + id + "\" id=\"selected_" + id
+ "\" type=\"checkbox\">"
});
String label = labelToId.get(id);
if (externalLink != null && !externalLink.equals("")) {
label += " <a href=\"" + externalLink + id
+ "\" target=\"_new\" class=\"extlink\">[";
if (externalLinkLabel != null && !externalLinkLabel.equals("")) {
label += externalLinkLabel;
}
label += id + "]</a>";
}
row.add(new String[] {label});
row.add(new String[] {bd.setScale(7,
BigDecimal.ROUND_HALF_EVEN).toEngineeringString()});
row.add(new String[]
{
totals.get(id).toString(),
"widgetAction.do?key=" + id + "&bagName=" + bag.getName() + "&link="
+ getLink()
});
flattenedResults.add(row);
}
return flattenedResults;
}
return null;
}
/**
* {@inheritDoc}
*/
public List<List<String>> getExportResults(String[]selected) throws Exception {
Map<String, BigDecimal> pvalues = resultMaps.get(0);
Map<String, Long> totals = resultMaps.get(1);
Map<String, String> labelToId = resultMaps.get(2);
List<List<String>> exportResults = new ArrayList<List<String>>();
List<String> selectedIds = Arrays.asList(selected);
Class<?> clazz = TypeUtil.instantiate(getDataSetLoader());
Constructor<?> constr = clazz.getConstructor(new Class[]
{
InterMineBag.class, ObjectStore.class, String.class
});
EnrichmentWidgetLdr ldr = (EnrichmentWidgetLdr) constr.newInstance(new Object[]
{
bag, os, getSelectedExtraAttribute()
});
Model model = os.getModel();
Class<?> bagCls = Class.forName(model.getPackageName() + "." + bag.getType());
QueryClass qc = new QueryClass(bagCls);
Query q = ldr.getExportQuery(selectedIds);
Results res = os.execute(q);
Iterator iter = res.iterator();
HashMap<String, List<String>> termsToIds = new HashMap();
while (iter.hasNext()) {
ResultsRow resRow = (ResultsRow) iter.next();
String term = resRow.get(0).toString();
String id = resRow.get(1).toString();
if (!termsToIds.containsKey(term)) {
termsToIds.put(term, new ArrayList<String>());
}
termsToIds.get(term).add(id);
}
for (String id : selectedIds) {
if (labelToId.get(id) != null) {
List row = new LinkedList();
row.add(id);
BigDecimal bd = pvalues.get(id);
Double d = bd.doubleValue();
row.add(d);
List<String> ids = termsToIds.get(id);
StringBuffer sb = new StringBuffer();
for (String term : ids) {
if (sb.length() > 0) {
sb.append(", ");
}
sb.append(term);
}
row.add(sb.toString());
exportResults.add(row);
}
}
return exportResults;
}
/**
* @return the errorCorrection
*/
public String getErrorCorrection() {
return errorCorrection;
}
/**
* @param errorCorrection the errorCorrection to set
*/
public void setErrorCorrection(String errorCorrection) {
this.errorCorrection = errorCorrection;
}
/**
* {@inheritDoc}
*/
public boolean getHasResults() {
return (resultMaps.get(0) != null && resultMaps.get(0).size() > 0);
}
/**
* just used for tiffin for now
* @return the text to append to the end of the external link
*/
public String getAppend() {
return append;
}
/**
* @param append the text to append
*/
public void setAppend(String append) {
this.append = append;
}
/**
* @return the externalLinkLabel
*/
public String getExternalLinkLabel() {
return externalLinkLabel;
}
/**
* @param externalLinkLabel the externalLinkLabel to set
*/
public void setExternalLinkLabel(String externalLinkLabel) {
this.externalLinkLabel = externalLinkLabel;
}
/**
* {@inheritDoc}
*/
public int getNotAnalysed() {
return notAnalysed;
}
/**
* {@inheritDoc}
*/
public void setNotAnalysed(int notAnalysed) {
this.notAnalysed = notAnalysed;
}
}
| intermine/web/main/src/org/intermine/web/logic/widget/EnrichmentWidget.java | package org.intermine.web.logic.widget;
/*
* Copyright (C) 2002-2008 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.intermine.metadata.Model;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.query.Query;
import org.intermine.objectstore.query.QueryClass;
import org.intermine.objectstore.query.Results;
import org.intermine.objectstore.query.ResultsRow;
import org.intermine.util.StringUtil;
import org.intermine.util.TypeUtil;
import org.intermine.web.logic.WebUtil;
import org.intermine.web.logic.bag.InterMineBag;
/**
* @author Julie Sullivan
*/
public class EnrichmentWidget extends Widget
{
private String max, filters, filterLabel, errorCorrection;
private String label;
private String externalLink, externalLinkLabel;
private String append;
private ArrayList<Map> resultMaps = new ArrayList<Map>();
private InterMineBag bag;
private int notAnalysed;
private ObjectStore os;
/**
* {@inheritDoc}
*/
public void process(InterMineBag imbag, ObjectStore ost) {
try {
this.bag = imbag;
this.os = ost;
Class<?> clazz = TypeUtil.instantiate(getDataSetLoader());
Constructor<?> constr = clazz.getConstructor(new Class[]
{
InterMineBag.class, ObjectStore.class, String.class
});
EnrichmentWidgetLdr ldr = (EnrichmentWidgetLdr) constr.newInstance(new Object[]
{
bag, os, getSelectedExtraAttribute()
});
resultMaps = WebUtil.statsCalc(os, ldr, bag, new Double(0 + max), errorCorrection);
int analysedTotal = ((Integer) (resultMaps.get(3)).get("widgetTotal")).intValue();
setNotAnalysed(bag.getSize() - analysedTotal);
} catch (ObjectStoreException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NumberFormatException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchMethodException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InstantiationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvocationTargetException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* @return the label
*/
public String getLabel() {
return label;
}
/**
* @param label the label to set
*/
public void setLabel(String label) {
this.label = label;
}
/**
* @return the filters
*/
public String getFilters() {
return filters;
}
/**
* @param filters list of filters to display on the widget
*/
public void setFilters(String filters) {
this.filters = filters;
}
/**
* @return the label for the filters
*/
public String getFilterLabel() {
return filterLabel;
}
/**
* @param filterLabel the label for the filters
*/
public void setFilterLabel(String filterLabel) {
this.filterLabel = filterLabel;
}
/**
* @return the maximum value this widget will display
*/
public String getMax() {
return max;
}
/**
* @param max maximum value this widget will display
*/
public void setMax(String max) {
this.max = max;
}
/**
* Return an XML String of this Type object
* @return a String version of this WebConfig object
*/
public String toString() {
return "< title=\"" + getTitle() + "\" link=\"" + getLink() + "\" ldr=\""
+ getDataSetLoader() + "\"/>";
}
/**
* {@inheritDoc}
*/
public String getExternalLink() {
return externalLink;
}
/**
* {@inheritDoc}
*/
public void setExternalLink(String externalLink) {
this.externalLink = externalLink;
}
/**
* {@inheritDoc}
*/
public Map<String, Collection> getExtraAttributes(InterMineBag imBag, ObjectStore os) {
Map<String, Collection> returnMap = new HashMap<String, Collection>();
returnMap.put(getFilterLabel(), Arrays.asList(getFilters().split(",")));
return returnMap;
}
/**
*
* @return List of column labels
*/
public List<String> getColumns() {
return Arrays.asList(new String[]
{
"", label, "p-Value"
});
}
/**
* {@inheritDoc}
*/
public List<List<String[]>> getFlattenedResults() {
if (resultMaps != null && !resultMaps.isEmpty()) {
Map<String, BigDecimal> pvalues = resultMaps.get(0);
Map<String, Long> totals = resultMaps.get(1);
Map<String, String> labelToId = resultMaps.get(2);
List<List<String[]>> flattenedResults = new LinkedList<List<String[]>>();
for (String id : pvalues.keySet()) {
List<String[]> row = new LinkedList();
BigDecimal bd = pvalues.get(id);
row.add(new String[]
{
"<input name=\"selected\" value=\"" + id + "\" id=\"selected_" + id
+ "\" type=\"checkbox\">"
});
String label = labelToId.get(id);
if (externalLink != null && !externalLink.equals("")) {
label += " <a href=\"" + externalLink + id
+ "\" target=\"_new\" class=\"extlink\">[";
if (externalLinkLabel != null && !externalLinkLabel.equals("")) {
label += externalLinkLabel;
}
label += id + "]</a>";
}
row.add(new String[] {label});
row.add(new String[] {bd.setScale(7,
BigDecimal.ROUND_HALF_EVEN).toEngineeringString()});
row.add(new String[]
{
totals.get(id).toString(),
"widgetAction.do?key=" + id + "&bagName=" + bag.getName() + "&link="
+ getLink()
});
flattenedResults.add(row);
}
return flattenedResults;
}
return null;
}
/**
* {@inheritDoc}
*/
public List<List<String>> getExportResults(String[]selected) throws Exception {
Map<String, BigDecimal> pvalues = resultMaps.get(0);
Map<String, Long> totals = resultMaps.get(1);
Map<String, String> labelToId = resultMaps.get(2);
List<List<String>> exportResults = new ArrayList<List<String>>();
List<String> selectedIds = Arrays.asList(selected);
Class<?> clazz = TypeUtil.instantiate(getDataSetLoader());
Constructor<?> constr = clazz.getConstructor(new Class[]
{
InterMineBag.class, ObjectStore.class, String.class
});
EnrichmentWidgetLdr ldr = (EnrichmentWidgetLdr) constr.newInstance(new Object[]
{
bag, os, getSelectedExtraAttribute()
});
Model model = os.getModel();
Class<?> bagCls = Class.forName(model.getPackageName() + "." + bag.getType());
QueryClass qc = new QueryClass(bagCls);
Query q = ldr.getExportQuery(selectedIds);
Results res = os.execute(q);
Iterator iter = res.iterator();
HashMap<String, List<String>> termsToIds = new HashMap();
while (iter.hasNext()) {
ResultsRow resRow = (ResultsRow) iter.next();
String term = resRow.get(0).toString();
String id = resRow.get(1).toString();
if (!termsToIds.containsKey(term)) {
termsToIds.put(term, new ArrayList<String>());
}
termsToIds.get(term).add(id);
}
for (String id : selectedIds) {
if (labelToId.get(id) != null) {
List row = new LinkedList();
row.add(labelToId.get(id));
BigDecimal bd = pvalues.get(id);
//bd.setScale(7, BigDecimal.ROUND_HALF_UP).doubleValue();
Double d = bd.doubleValue();
row.add(d);
List<String> ids = termsToIds.get(id);
StringBuffer sb = new StringBuffer();
for (String term : ids) {
if (sb.length() > 0) {
sb.append(", ");
}
sb.append(term);
}
row.add(sb.toString());
exportResults.add(row);
}
}
return exportResults;
}
/**
* @return the errorCorrection
*/
public String getErrorCorrection() {
return errorCorrection;
}
/**
* @param errorCorrection the errorCorrection to set
*/
public void setErrorCorrection(String errorCorrection) {
this.errorCorrection = errorCorrection;
}
/**
* {@inheritDoc}
*/
public boolean getHasResults() {
return (resultMaps.get(0) != null && resultMaps.get(0).size() > 0);
}
/**
* just used for tiffin for now
* @return the text to append to the end of the external link
*/
public String getAppend() {
return append;
}
/**
* @param append the text to append
*/
public void setAppend(String append) {
this.append = append;
}
/**
* @return the externalLinkLabel
*/
public String getExternalLinkLabel() {
return externalLinkLabel;
}
/**
* @param externalLinkLabel the externalLinkLabel to set
*/
public void setExternalLinkLabel(String externalLinkLabel) {
this.externalLinkLabel = externalLinkLabel;
}
/**
* {@inheritDoc}
*/
public int getNotAnalysed() {
return notAnalysed;
}
/**
* {@inheritDoc}
*/
public void setNotAnalysed(int notAnalysed) {
this.notAnalysed = notAnalysed;
}
}
| export the identifier field instead of the name field
Former-commit-id: 735e2964ec1d4e3bfe51646293ed0f6567376f09 | intermine/web/main/src/org/intermine/web/logic/widget/EnrichmentWidget.java | export the identifier field instead of the name field |
|
Java | apache-2.0 | 48ab3962a6a8abda274fff3b447243fcc04538d5 | 0 | mafulafunk/wicket,mafulafunk/wicket,mafulafunk/wicket | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.markup.html.link;
import org.apache.wicket.Page;
import org.apache.wicket.util.io.IClusterable;
/**
* Interface that is used to implement delayed page linking. The getPage() method returns an
* instance of Page when a link is actually clicked (thus avoiding the need to create a destination
* Page object for every link on a given Page in advance). The getPageIdentity() method returns the
* subclass of Page that getPage() will return if and when it is called.
* <p>
* This way of arranging things is useful in determining whether a link links to a given page, which
* is in turn useful for deciding how to display the link (because links in a navigation which link
* to a page itself are not useful and generally should instead indicate where the user is in the
* navigation).
* <p>
* To understand how getPageIdentity() is used in this way, take a look at the Link.linksTo() method
* and its override in PageLink. Also, see the documentation for getPageIdentity() below.
*
* @see Link#linksTo(Page)
* @author Jonathan Locke
* @deprecated Use {@link org.apache.wicket.core.request.handler.PageProvider}
*/
@Deprecated
public interface IPageLink extends IClusterable
{
/**
* Gets the page to go to.
*
* @return The page to go to.
*/
Page getPage();
/**
* Gets the class of the destination page, which serves as a form of identity that can be used
* to determine if a link is on the same Page that it links to. When Pages are parameterized,
* the Link.linksTo() method should be overridden instead.
* <p>
* A page's identity is important because links which are on the same page that they link to
* often need to be displayed in a different way to indicate that they are 'disabled' and don't
* go anywhere. Links can be manually disabled by calling Link.setDisabled(). Links which have
* setAutoEnable(true) will automatically enable or disable themselves depending on whether or
* not Link.linksTo() returns true. The default implementation of PageLink.linksTo() therefore
* looks like this:
*
* <pre>
* private final IPageLink pageLink;
*
* public boolean linksTo(final Page page)
* {
* return page.getClass() == pageLink.getPageIdentity();
* }
* </pre>
*
* @return The class of page linked to, as a form of identity
* @see Link#linksTo(Page)
*/
Class<? extends Page> getPageIdentity();
}
| wicket-core/src/main/java/org/apache/wicket/markup/html/link/IPageLink.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.markup.html.link;
import org.apache.wicket.Page;
import org.apache.wicket.util.io.IClusterable;
/**
* Interface that is used to implement delayed page linking. The getPage() method returns an
* instance of Page when a link is actually clicked (thus avoiding the need to create a destination
* Page object for every link on a given Page in advance). The getPageIdentity() method returns the
* subclass of Page that getPage() will return if and when it is called.
* <p>
* This way of arranging things is useful in determining whether a link links to a given page, which
* is in turn useful for deciding how to display the link (because links in a navigation which link
* to a page itself are not useful and generally should instead indicate where the user is in the
* navigation).
* <p>
* To understand how getPageIdentity() is used in this way, take a look at the Link.linksTo() method
* and its override in PageLink. Also, see the documentation for getPageIdentity() below.
*
* @see Link#linksTo(Page)
* @author Jonathan Locke
*/
public interface IPageLink extends IClusterable
{
/**
* Gets the page to go to.
*
* @return The page to go to.
*/
Page getPage();
/**
* Gets the class of the destination page, which serves as a form of identity that can be used
* to determine if a link is on the same Page that it links to. When Pages are parameterized,
* the Link.linksTo() method should be overridden instead.
* <p>
* A page's identity is important because links which are on the same page that they link to
* often need to be displayed in a different way to indicate that they are 'disabled' and don't
* go anywhere. Links can be manually disabled by calling Link.setDisabled(). Links which have
* setAutoEnable(true) will automatically enable or disable themselves depending on whether or
* not Link.linksTo() returns true. The default implementation of PageLink.linksTo() therefore
* looks like this:
*
* <pre>
* private final IPageLink pageLink;
*
* public boolean linksTo(final Page page)
* {
* return page.getClass() == pageLink.getPageIdentity();
* }
* </pre>
*
* @return The class of page linked to, as a form of identity
* @see Link#linksTo(Page)
*/
Class<? extends Page> getPageIdentity();
} | Mark IPageLink as deprecated
| wicket-core/src/main/java/org/apache/wicket/markup/html/link/IPageLink.java | Mark IPageLink as deprecated |
|
Java | apache-2.0 | 1d71b45d116e1ea85dea03070370f3f9ece16d6a | 0 | linqs/psl,linqs/psl,linqs/psl | /*
* This file is part of the PSL software.
* Copyright 2011-2015 University of Maryland
* Copyright 2013-2019 The Regents of the University of California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.linqs.psl.reasoner.sgd.term;
import org.linqs.psl.model.atom.RandomVariableAtom;
import org.linqs.psl.reasoner.term.Hyperplane;
import org.linqs.psl.reasoner.term.ReasonerTerm;
import org.apache.commons.lang.mutable.MutableInt;
import java.nio.ByteBuffer;
import java.util.Map;
/**
* A term in the objective to be optimized by a SGDReasoner.
*/
public class SGDObjectiveTerm implements ReasonerTerm {
private boolean squared;
private boolean hinge;
private float weight;
private float constant;
private float learningRate;
private short size;
private float[] coefficients;
private RandomVariableAtom[] variables;
public SGDObjectiveTerm(boolean squared, boolean hinge,
Hyperplane<RandomVariableAtom> hyperplane,
float weight, float learningRate) {
this.squared = squared;
this.hinge = hinge;
this.weight = weight;
this.learningRate = learningRate;
size = (short)hyperplane.size();
coefficients = hyperplane.getCoefficients();
variables = hyperplane.getVariables();
constant = hyperplane.getConstant();
}
@Override
public int size() {
return size;
}
public float evaluate() {
float dot = dot();
if (squared && hinge) {
// weight * [max(0.0, coeffs^T * x - constant)]^2
return weight * (float)Math.pow(Math.max(0.0f, dot), 2);
} else if (squared && !hinge) {
// weight * [coeffs^T * x - constant]^2
return weight * (float)Math.pow(dot, 2);
} else if (!squared && hinge) {
// weight * max(0.0, coeffs^T * x - constant)
return weight * Math.max(0.0f, dot);
} else {
// weight * (coeffs^T * x - constant)
return weight * dot;
}
}
public void minimize(int iteration) {
for (int i = 0 ; i < size; i++) {
float dot = dot();
float gradient = computeGradient(iteration, i, dot);
gradient *= (learningRate / iteration);
variables[i].setValue(Math.max(0.0f, Math.min(1.0f, variables[i].getValue() - gradient)));
}
}
private float computeGradient(int iteration, int varId, float dot) {
if (hinge && dot <= 0.0f) {
return 0.0f;
}
if (squared && hinge) {
return weight * 2.0f * dot * coefficients[varId];
} else if (squared && !hinge) {
return weight * 2.0f * dot * coefficients[varId];
} else if (!squared && hinge) {
return weight * coefficients[varId];
} else {
return weight * coefficients[varId];
}
}
private float dot() {
float value = 0.0f;
for (int i = 0; i < size; i++) {
value += coefficients[i] * variables[i].getValue();
}
return value - constant;
}
/**
* The number of bytes that writeFixedValues() will need to represent this term.
* This is just all the member datum.
*/
public int fixedByteSize() {
int bitSize =
Byte.SIZE // squared
+ Byte.SIZE // hinge
+ Float.SIZE // weight
+ Float.SIZE // constant
+ Float.SIZE // learningRate
+ Short.SIZE // size
+ size * (Float.SIZE + Integer.SIZE); // coefficients + variables
return bitSize / 8;
}
/**
* Write a binary representation of the fixed values of this term to a buffer.
* Note that the variables are written using their Object hashcode.
*/
public void writeFixedValues(ByteBuffer fixedBuffer) {
fixedBuffer.put((byte)(squared ? 1 : 0));
fixedBuffer.put((byte)(hinge ? 1 : 0));
fixedBuffer.putFloat(weight);
fixedBuffer.putFloat(constant);
fixedBuffer.putFloat(learningRate);
fixedBuffer.putShort(size);
for (int i = 0; i < size; i++) {
fixedBuffer.putFloat(coefficients[i]);
fixedBuffer.putInt(System.identityHashCode(variables[i]));
}
}
/**
* Assume the term that will be next read from the buffers.
*/
public void read(ByteBuffer fixedBuffer, ByteBuffer volatileBuffer,
Map<MutableInt, RandomVariableAtom> rvaMap, MutableInt intBuffer) {
squared = (fixedBuffer.get() == 1);
hinge = (fixedBuffer.get() == 1);
weight = fixedBuffer.getFloat();
constant = fixedBuffer.getFloat();
learningRate = fixedBuffer.getFloat();
size = fixedBuffer.getShort();
// Make sure that there is enough room for all these variables.
if (coefficients.length < size) {
coefficients = new float[size];
variables = new RandomVariableAtom[size];
}
for (int i = 0; i < size; i++) {
coefficients[i] = fixedBuffer.getFloat();
intBuffer.setValue(fixedBuffer.getInt());
variables[i] = rvaMap.get(intBuffer);
}
}
@Override
public String toString() {
// weight * [max(coeffs^T * x - constant, 0.0)]^2
StringBuilder builder = new StringBuilder();
builder.append(weight);
builder.append(" * ");
if (hinge) {
builder.append(" * max(0.0, ");
}
for (int i = 0; i < size; i++) {
builder.append("(");
builder.append(coefficients[i]);
builder.append(" * ");
builder.append(variables[i]);
builder.append(")");
if (i != size - 1) {
builder.append(" + ");
}
}
builder.append(" - ");
builder.append(constant);
if (hinge) {
builder.append(")");
}
if (squared) {
builder.append("^2");
}
return builder.toString();
}
}
| psl-core/src/main/java/org/linqs/psl/reasoner/sgd/term/SGDObjectiveTerm.java | /*
* This file is part of the PSL software.
* Copyright 2011-2015 University of Maryland
* Copyright 2013-2019 The Regents of the University of California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.linqs.psl.reasoner.sgd.term;
import org.linqs.psl.model.atom.RandomVariableAtom;
import org.linqs.psl.reasoner.term.Hyperplane;
import org.linqs.psl.reasoner.term.ReasonerTerm;
import org.apache.commons.lang.mutable.MutableInt;
import java.nio.ByteBuffer;
import java.util.Map;
/**
* A term in the objective to be optimized by a SGDReasoner.
*/
public class SGDObjectiveTerm implements ReasonerTerm {
private boolean squared;
private boolean hinge;
private float weight;
private float constant;
private float learningRate;
private short size;
private float[] coefficients;
private RandomVariableAtom[] variables;
public SGDObjectiveTerm(boolean squared, boolean hinge,
Hyperplane<RandomVariableAtom> hyperplane,
float weight, float learningRate) {
this.squared = squared;
this.hinge = hinge;
this.weight = weight;
this.learningRate = learningRate;
size = (short)hyperplane.size();
coefficients = hyperplane.getCoefficients();
variables = hyperplane.getVariables();
constant = hyperplane.getConstant();
}
@Override
public int size() {
return size;
}
public float evaluate() {
float dot = dot();
if (hinge && dot <= 0.0f) {
return 0.0f;
}
if (squared && hinge) {
// weight * [max(0.0, coeffs^T * x - constant)]^2
return weight * (float)Math.pow(Math.max(0.0f, dot), 2);
} else if (squared && !hinge) {
// weight * [coeffs^T * x - constant]^2
return weight * (float)Math.pow(dot, 2);
} else if (!squared && hinge) {
// weight * max(0.0, coeffs^T * x - constant)
return weight * Math.max(0.0f, dot);
} else {
// weight * (coeffs^T * x - constant)
return weight * dot;
}
}
public void minimize(int iteration) {
for (int i = 0 ; i < size; i++) {
float dot = dot();
float gradient = computeGradient(iteration, i, dot);
gradient *= (learningRate / iteration);
variables[i].setValue(Math.max(0.0f, Math.min(1.0f, variables[i].getValue() - gradient)));
}
}
private float computeGradient(int iteration, int varId, float dot) {
if (squared && hinge) {
return weight * 2.0f * dot * coefficients[varId];
} else if (squared && !hinge) {
return weight * 2.0f * dot * coefficients[varId];
} else if (!squared && hinge) {
return weight * coefficients[varId];
} else {
return weight * coefficients[varId];
}
}
private float dot() {
float value = 0.0f;
for (int i = 0; i < size; i++) {
value += coefficients[i] * variables[i].getValue();
}
return value - constant;
}
/**
* The number of bytes that writeFixedValues() will need to represent this term.
* This is just all the member datum.
*/
public int fixedByteSize() {
int bitSize =
Byte.SIZE // squared
+ Byte.SIZE // hinge
+ Float.SIZE // weight
+ Float.SIZE // constant
+ Float.SIZE // learningRate
+ Short.SIZE // size
+ size * (Float.SIZE + Integer.SIZE); // coefficients + variables
return bitSize / 8;
}
/**
* Write a binary representation of the fixed values of this term to a buffer.
* Note that the variables are written using their Object hashcode.
*/
public void writeFixedValues(ByteBuffer fixedBuffer) {
fixedBuffer.put((byte)(squared ? 1 : 0));
fixedBuffer.put((byte)(hinge ? 1 : 0));
fixedBuffer.putFloat(weight);
fixedBuffer.putFloat(constant);
fixedBuffer.putFloat(learningRate);
fixedBuffer.putShort(size);
for (int i = 0; i < size; i++) {
fixedBuffer.putFloat(coefficients[i]);
fixedBuffer.putInt(System.identityHashCode(variables[i]));
}
}
/**
* Assume the term that will be next read from the buffers.
*/
public void read(ByteBuffer fixedBuffer, ByteBuffer volatileBuffer,
Map<MutableInt, RandomVariableAtom> rvaMap, MutableInt intBuffer) {
squared = (fixedBuffer.get() == 1);
hinge = (fixedBuffer.get() == 1);
weight = fixedBuffer.getFloat();
constant = fixedBuffer.getFloat();
learningRate = fixedBuffer.getFloat();
size = fixedBuffer.getShort();
// Make sure that there is enough room for all these variables.
if (coefficients.length < size) {
coefficients = new float[size];
variables = new RandomVariableAtom[size];
}
for (int i = 0; i < size; i++) {
coefficients[i] = fixedBuffer.getFloat();
intBuffer.setValue(fixedBuffer.getInt());
variables[i] = rvaMap.get(intBuffer);
}
}
@Override
public String toString() {
// weight * [max(coeffs^T * x - constant, 0.0)]^2
StringBuilder builder = new StringBuilder();
builder.append(weight);
builder.append(" * ");
if (hinge) {
builder.append(" * max(0.0, ");
}
for (int i = 0; i < size; i++) {
builder.append("(");
builder.append(coefficients[i]);
builder.append(" * ");
builder.append(variables[i]);
builder.append(")");
if (i != size - 1) {
builder.append(" + ");
}
}
builder.append(" - ");
builder.append(constant);
if (hinge) {
builder.append(")");
}
if (squared) {
builder.append("^2");
}
return builder.toString();
}
}
| Fixed an issue in SGD terms where a hinge was applied in the wrong place.
| psl-core/src/main/java/org/linqs/psl/reasoner/sgd/term/SGDObjectiveTerm.java | Fixed an issue in SGD terms where a hinge was applied in the wrong place. |
|
Java | apache-2.0 | af091694f16279694f403412f43255791638e0e2 | 0 | lampepfl/dotty,lampepfl/dotty,lampepfl/dotty,sjrd/dotty,dotty-staging/dotty,sjrd/dotty,dotty-staging/dotty,som-snytt/dotty,sjrd/dotty,som-snytt/dotty,dotty-staging/dotty,som-snytt/dotty,dotty-staging/dotty,sjrd/dotty,lampepfl/dotty,dotty-staging/dotty,som-snytt/dotty,lampepfl/dotty,sjrd/dotty,som-snytt/dotty | /* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt;
import java.util.Optional;
import xsbti.Position;
import xsbti.Severity;
import dotty.tools.*;
import dotty.tools.dotc.*;
import dotty.tools.dotc.interfaces.Diagnostic;
import dotty.tools.dotc.util.SourceFile;
import dotty.tools.dotc.util.SourcePosition;
import dotty.tools.dotc.reporting.*;
import dotty.tools.dotc.reporting.diagnostic.Message;
import dotty.tools.dotc.reporting.diagnostic.MessageContainer;
import dotty.tools.dotc.reporting.diagnostic.messages;
import dotty.tools.dotc.core.Contexts.*;
import static dotty.tools.dotc.reporting.diagnostic.MessageContainer.*;
final public class DelegatingReporter extends AbstractReporter {
private final xsbti.Reporter delegate;
private static final Position noPosition = new Position() {
public Optional<java.io.File> sourceFile() {
return Optional.empty();
}
public Optional<String> sourcePath() {
return Optional.empty();
}
public Optional<Integer> line() {
return Optional.empty();
}
public String lineContent() {
return "";
}
public Optional<Integer> offset() {
return Optional.empty();
}
public Optional<Integer> pointer() {
return Optional.empty();
}
public Optional<String> pointerSpace() {
return Optional.empty();
}
};
public DelegatingReporter(xsbti.Reporter delegate) {
super();
this.delegate = delegate;
}
@Override
public void printSummary(Context ctx) {
delegate.printSummary();
}
public void doReport(MessageContainer cont, Context ctx) {
Severity severity;
switch (cont.level()) {
case Diagnostic.ERROR:
severity = Severity.Error;
break;
case Diagnostic.WARNING:
severity = Severity.Warn;
break;
case Diagnostic.INFO:
severity = Severity.Info;
break;
default:
throw new IllegalArgumentException("Bad diagnostic level: " + cont.level());
}
Position position;
if (cont.pos().exists()) {
SourcePosition pos = cont.pos();
SourceFile src = pos.source();
position = new Position() {
public Optional<java.io.File> sourceFile() {
if (src.exists()) return Optional.empty();
else return Optional.of(src.file().file());
}
public Optional<String> sourcePath() {
if (src.exists()) return Optional.empty();
else return Optional.of(src.file().path());
}
public Optional<Integer> line() {
int line = pos.line();
if (line == -1) return Optional.empty();
else return Optional.of(line);
}
public String lineContent() {
String line = pos.lineContent();
if (line.endsWith("\r\n"))
return line.substring(0, line.length() - 2);
else if (line.endsWith("\n") || line.endsWith("\u000c"))
return line.substring(0, line.length() - 1);
else
return line;
}
public Optional<Integer> offset() {
return Optional.of(pos.point());
}
public Optional<Integer> pointer() {
if (!src.exists()) return Optional.empty();
else return Optional.of(pos.point() - src.startOfLine(pos.point()));
}
public Optional<String> pointerSpace() {
if (!src.exists()) return Optional.empty();
else {
String lineContent = this.lineContent();
int pointer = this.pointer().get();
StringBuilder result = new StringBuilder();
for (int i = 0; i < pointer; i++)
result.append(lineContent.charAt(i) == '\t' ? '\t' : ' ');
return Optional.of(result.toString());
}
}
};
} else {
position = noPosition;
}
Message message = cont.contained();
StringBuilder rendered = new StringBuilder();
rendered.append(messageAndPos(message, cont.pos(), diagnosticLevel(cont), ctx));
boolean shouldExplain = new MessageContainer.MessageContext(ctx).shouldExplain(cont);
if (shouldExplain && !message.explanation().isEmpty()) {
rendered.append(explanation(message, ctx));
}
delegate.log(new Problem(position, message.msg(), severity, rendered.toString()));
}
}
| sbt-bridge/src/xsbt/DelegatingReporter.java | /* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt;
import java.util.Optional;
import xsbti.Position;
import xsbti.Severity;
import dotty.tools.*;
import dotty.tools.dotc.*;
import dotty.tools.dotc.interfaces.Diagnostic;
import dotty.tools.dotc.util.SourceFile;
import dotty.tools.dotc.util.SourcePosition;
import dotty.tools.dotc.reporting.*;
import dotty.tools.dotc.reporting.diagnostic.Message;
import dotty.tools.dotc.reporting.diagnostic.MessageContainer;
import dotty.tools.dotc.reporting.diagnostic.messages;
import dotty.tools.dotc.core.Contexts.*;
import static dotty.tools.dotc.reporting.diagnostic.MessageContainer.*;
final public class DelegatingReporter extends AbstractReporter {
private final xsbti.Reporter delegate;
private static final Position noPosition = new Position() {
public Optional<java.io.File> sourceFile() {
return Optional.empty();
}
public Optional<String> sourcePath() {
return Optional.empty();
}
public Optional<Integer> line() {
return Optional.empty();
}
public String lineContent() {
return "";
}
public Optional<Integer> offset() {
return Optional.empty();
}
public Optional<Integer> pointer() {
return Optional.empty();
}
public Optional<String> pointerSpace() {
return Optional.empty();
}
};
public DelegatingReporter(xsbti.Reporter delegate) {
super();
this.delegate = delegate;
}
@Override
public void printSummary(Context ctx) {
delegate.printSummary();
}
public void doReport(MessageContainer cont, Context ctx) {
Severity severity;
switch (cont.level()) {
case Diagnostic.ERROR:
severity = Severity.Error;
break;
case Diagnostic.WARNING:
severity = Severity.Warn;
break;
case Diagnostic.INFO:
severity = Severity.Info;
break;
default:
throw new IllegalArgumentException("Bad diagnostic level: " + cont.level());
}
Position position;
if (cont.pos().exists()) {
SourcePosition pos = cont.pos();
SourceFile src = pos.source();
position = new Position() {
public Optional<java.io.File> sourceFile() {
return Optional.ofNullable(src.file().file());
}
public Optional<String> sourcePath() {
return Optional.ofNullable(src.file().path());
}
public Optional<Integer> line() {
int line = pos.line();
if (line == -1) return Optional.empty();
else return Optional.of(line);
}
public String lineContent() {
String line = pos.lineContent();
if (line.endsWith("\r\n"))
return line.substring(0, line.length() - 2);
else if (line.endsWith("\n") || line.endsWith("\u000c"))
return line.substring(0, line.length() - 1);
else
return line;
}
public Optional<Integer> offset() {
return Optional.of(pos.point());
}
public Optional<Integer> pointer() {
if (!src.exists()) return Optional.empty();
else return Optional.of(pos.point() - src.startOfLine(pos.point()));
}
public Optional<String> pointerSpace() {
if (!src.exists()) return Optional.empty();
else {
String lineContent = this.lineContent();
int pointer = this.pointer().get();
StringBuilder result = new StringBuilder();
for (int i = 0; i < pointer; i++)
result.append(lineContent.charAt(i) == '\t' ? '\t' : ' ');
return Optional.of(result.toString());
}
}
};
} else {
position = noPosition;
}
Message message = cont.contained();
StringBuilder rendered = new StringBuilder();
rendered.append(messageAndPos(message, cont.pos(), diagnosticLevel(cont), ctx));
boolean shouldExplain = new MessageContainer.MessageContext(ctx).shouldExplain(cont);
if (shouldExplain && !message.explanation().isEmpty()) {
rendered.append(explanation(message, ctx));
}
delegate.log(new Problem(position, message.msg(), severity, rendered.toString()));
}
}
| Explicit check for existence of file
| sbt-bridge/src/xsbt/DelegatingReporter.java | Explicit check for existence of file |
|
Java | apache-2.0 | cf9c1539f2fce03dd9fb4ab4ef63d2b59dd792a2 | 0 | BruceZu/KeepTry,BruceZu/KeepTry,BruceZu/sawdust,BruceZu/sawdust,BruceZu/KeepTry,BruceZu/KeepTry,BruceZu/sawdust,BruceZu/sawdust | // Copyright (C) 2015 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WIEHOUE WARRANEIES OR CONDIEIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package charter3;
//TODO: support multi threads access concurrently.
public class DoublyLinkedList2<E> implements MyLinkedList {
private class Node<E> {
private E content;
private Node<E> prev;
private Node<E> next;
private Node(E content, Node<E> prev, Node<E> next) {
this.content = content;
this.prev = prev;
this.next = next;
}
private Node(E content) {
this.content = content;
}
private Node() {
}
}
private final Node<E> endSentinel = new Node<E>();
private final Node<E> headSentinel = new Node<E>();
private int sizeOfList;
private int indexOfEndNode() {
if (isEmpty()) {
throw new IndexOutOfBoundsException("This list is empty");
}
return sizeOfList - 1;
}
private int checkPositionIndex(int positionIndex) {
if (isEmpty()) {
throw new IndexOutOfBoundsException("This list is empty");
}
if (positionIndex < 0 || indexOfEndNode() < positionIndex) {
throw new IndexOutOfBoundsException("Index is wrong ");
}
return positionIndex;
}
private Node<E> getNodeOf(int index) {
checkPositionIndex(index);
Node<E> current;
if (index < size() / 2) {
int i = 0;
current = headSentinel.next;
while (i != index) {
current = current.next;
i++;
}
return current;
}
int i = indexOfEndNode();
current = endSentinel.prev;
while (i != index) {
current = current.prev;
i--;
}
return current;
}
private void addBetween(E newContent, Node p, Node n) {
Node<E> it = new Node<E>(newContent);
p.next = it;
it.next = n;
n.prev = it;
it.prev = p;
sizeOfList++;
}
private void deleteBetween(Node p, Node n) {
p.next = n;
n.prev = p;
sizeOfList--;
}
private E delete(int itsIndex, Node<E> it) {
checkPositionIndex(itsIndex);
deleteBetween(it.prev, it.next);
return it.content;
}
public DoublyLinkedList2() {
headSentinel.next = endSentinel;
endSentinel.prev = headSentinel;
}
public boolean isEmpty() {
return sizeOfList == 0;
}
public int size() {
return sizeOfList;
}
public boolean hasOnlyOneElement() {
return sizeOfList == 1;
}
public void add(Object newContent) {
addBetween((E) newContent, headSentinel, headSentinel.next);
}
public void appendToTheEnd(Object newContent) {
addBetween((E) newContent, endSentinel.prev, endSentinel);
}
public void addBefore(Object newContent, int index) {
checkPositionIndex(index);
Node<E> i = getNodeOf(index);
addBetween((E) newContent, i.prev, i);
}
public void addAfter(Object newContent, int index) {
checkPositionIndex(index);
Node<E> i = getNodeOf(index);
addBetween((E) newContent, i, i.next);
}
public E deleteHead() {
return delete(0, headSentinel.next);
}
public E deleteEnd() {
return delete(indexOfEndNode(), endSentinel.prev);
}
public E delete(int index) {
Node<E> it = getNodeOf(index);
return delete(index, it);
}
public E update(int index, Object newContent) {
Node<E> n = getNodeOf(index);
E re = n.content;
n.content = (E) newContent;
return re;
}
public E updateHead(Object newContent) {
E re = headSentinel.next.content;
headSentinel.next.content = (E) newContent;
return re;
}
public E updateEnd(Object newContent) {
E re = endSentinel.prev.content;
endSentinel.prev.content = (E) newContent;
return re;
}
public E getHead() {
if (isEmpty()) {
return null;
}
return headSentinel.next.content;
}
public E getEnd() {
if (isEmpty()) {
return null;
}
return endSentinel.prev.content;
}
public E get(int index) {
checkPositionIndex(index);
return getNodeOf(index).content;
}
@Override
public void clean() {
if (size() == 0) {
return;
}
sizeOfList = 0;
endSentinel.next = endSentinel;
endSentinel.prev = headSentinel;
}
}
| DataStructuresAndAlgorithmsInJava/src/main/java/charter3/DoublyLinkedList2.java | // Copyright (C) 2015 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WIEHOUE WARRANEIES OR CONDIEIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package charter3;
//TODO: support multi threads access concurrently.
public class DoublyLinkedList2<E> implements MyLinkedList {
private class Node<E> {
private E content;
private Node<E> prev;
private Node<E> next;
private Node(E content, Node<E> prev, Node<E> next) {
this.content = content;
this.prev = prev;
this.next = next;
}
private Node(E content) {
this.content = content;
}
private Node() {
}
}
private final Node<E> endSentinel = new Node<E>();
private final Node<E> headSentinel = new Node<E>();
private int sizeOfList;
private int indexOfEndNode() {
if (isEmpty()) {
throw new IndexOutOfBoundsException("This list is empty");
}
return sizeOfList - 1;
}
private int checkPositionIndex(int positionIndex) {
if (isEmpty()) {
throw new IndexOutOfBoundsException("This list is empty");
}
if (positionIndex < 0 || indexOfEndNode() < positionIndex) {
throw new IndexOutOfBoundsException("Index is wrong ");
}
return positionIndex;
}
private Node<E> getNodeOf(int index) {
checkPositionIndex(index);
Node<E> current;
if (index < size() / 2) {
int i = 0;
current = headSentinel.next;
while (i != index) {
current = current.next;
i++;
}
return current;
}
int i = indexOfEndNode();
current = endSentinel.prev;
while (i != index) {
current = current.prev;
i--;
}
return current;
}
private void addBetween(Node it, Node p, Node n) {
p.next = it;
it.next = n;
n.prev = it;
it.prev = p;
sizeOfList++;
}
private void deleteBetween(Node p, Node n) {
p.next = n;
n.prev = p;
sizeOfList--;
}
public DoublyLinkedList2() {
headSentinel.next = endSentinel;
endSentinel.prev = headSentinel;
}
public boolean isEmpty() {
return sizeOfList == 0;
}
public int size() {
return sizeOfList;
}
public boolean hasOnlyOneElement() {
return sizeOfList == 1;
}
public void add(Object newContent) {
Node<E> newNode = new Node<E>((E) newContent);
addBetween(newNode, headSentinel, headSentinel.next);
}
public void appendToTheEnd(Object newContent) {
Node<E> newNode = new Node<E>((E) newContent);
addBetween(newNode, endSentinel.prev, endSentinel);
}
public void addBefore(Object newContent, int index) {
checkPositionIndex(index);
Node<E> i = getNodeOf(index);
Node<E> newNode = new Node<E>((E) newContent);
addBetween(newNode, i.prev, i);
}
public void addAfter(Object newContent, int index) {
checkPositionIndex(index);
Node<E> i = getNodeOf(index);
Node<E> newNode = new Node<E>((E) newContent);
addBetween(newNode, i, i.next);
}
public E deleteHead() {
checkPositionIndex(0);
E re = headSentinel.next.content;
deleteBetween(headSentinel, headSentinel.next.next);
return re;
}
public E deleteEnd() {
checkPositionIndex(indexOfEndNode());
E re = endSentinel.prev.content;
deleteBetween(endSentinel.prev.prev, endSentinel);
return re;
}
public E delete(int index) {
checkPositionIndex(index);
Node<E> it = getNodeOf(index);
deleteBetween(it.prev, it.next);
return it.content;
}
public E update(int index, Object newContent) {
Node<E> n = getNodeOf(index);
E re = n.content;
n.content = (E) newContent;
return re;
}
public E updateHead(Object newContent) {
E re = headSentinel.next.content;
headSentinel.next.content = (E) newContent;
return re;
}
public E updateEnd(Object newContent) {
E re = endSentinel.prev.content;
endSentinel.prev.content = (E) newContent;
return re;
}
public E getHead() {
if (isEmpty()) {
return null;
}
return headSentinel.next.content;
}
public E getEnd() {
if (isEmpty()) {
return null;
}
return endSentinel.prev.content;
}
public E get(int index) {
checkPositionIndex(index);
return getNodeOf(index).content;
}
@Override
public void clean() {
if (size() == 0) {
return;
}
sizeOfList = 0;
endSentinel.next = endSentinel;
endSentinel.prev = headSentinel;
}
}
| Update addBetween() and delete() in DoublyLinkedList2
| DataStructuresAndAlgorithmsInJava/src/main/java/charter3/DoublyLinkedList2.java | Update addBetween() and delete() in DoublyLinkedList2 |
|
Java | apache-2.0 | 1bb4d64da576a115bb0389f9438fa03bffce1809 | 0 | ChrisLMerrill/muse,ChrisLMerrill/muse | package org.museautomation.selenium.values;
import org.museautomation.builtins.value.*;
import org.museautomation.core.*;
import org.museautomation.core.resource.*;
import org.museautomation.core.values.*;
import org.museautomation.core.values.descriptor.*;
import org.museautomation.selenium.*;
import org.openqa.selenium.*;
import org.openqa.selenium.support.ui.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
@MuseTypeId("option-count")
@MuseValueSourceName("Option Count")
@MuseValueSourceTypeGroup("Selenium.Element.Value")
@MuseValueSourceShortDescription("Returns the number of options in a select element")
@MuseValueSourceLongDescription("Resolves the supplied element as a select control and then returns the number of options.")
@MuseStringExpressionSupportImplementation(OptionCountSource.StringExpressionSupport.class)
@MuseSubsourceDescriptor(displayName = "Element", description = "The element to get value from", name = "element", type = SubsourceDescriptor.Type.Single)
@SuppressWarnings("unused") // instantiated via reflection
public class OptionCountSource extends BaseElementValueSource
{
@SuppressWarnings("unused") // used via reflection
public OptionCountSource(ValueSourceConfiguration config, MuseProject project) throws MuseInstantiationException
{
super(config, project);
}
@Override
public Integer resolveValue(MuseExecutionContext context) throws ValueSourceResolutionError
{
WebElement element = resolveElementSource(context, true);
try
{
Select select = new Select(element);
return select.getOptions().size();
}
catch (UnexpectedTagNameException e)
{
throw new ValueSourceResolutionError("The element is not a <select> tag.");
}
}
@Override
public String getDescription()
{
return String.format("elementAttribute(%s,%s)", getElementSource().getDescription(), _attribute_name_source.getDescription());
}
private MuseValueSource _attribute_name_source;
public final static String TYPE_ID = OptionCountSource.class.getAnnotation(MuseTypeId.class).value();
public final static String ELEMENT_PARAM = "element";
public static class StringExpressionSupport extends BaseArgumentedValueSourceStringSupport
{
@Override
public String getName()
{
return "optionCount";
}
@Override
protected int getNumberArguments()
{
return 1;
}
@Override
protected String getTypeId()
{
return OptionCountSource.TYPE_ID;
}
@Override
protected boolean storeSingleArgumentAsSingleSubsource()
{
return true;
}
}
}
| selenium/src/main/java/org/museautomation/selenium/values/OptionCountSource.java | package org.museautomation.selenium.values;
import org.museautomation.builtins.value.*;
import org.museautomation.core.*;
import org.museautomation.core.events.*;
import org.museautomation.core.resource.*;
import org.museautomation.core.step.*;
import org.museautomation.core.values.*;
import org.museautomation.core.values.descriptor.*;
import org.museautomation.selenium.*;
import org.openqa.selenium.*;
import org.openqa.selenium.support.ui.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
@MuseTypeId("option-count")
@MuseValueSourceName("Option Count")
@MuseValueSourceTypeGroup("Selenium.Element.Value")
@MuseValueSourceShortDescription("Returns the number of options in a select element")
@MuseValueSourceLongDescription("Resolves the supplied element as a select control and then returns the number of options.")
@MuseStringExpressionSupportImplementation(OptionCountSource.StringExpressionSupport.class)
@MuseSubsourceDescriptor(displayName = "Element", description = "The element to get value from", name = "element", type = SubsourceDescriptor.Type.Single)
@SuppressWarnings("unused") // instantiated via reflection
public class OptionCountSource extends BaseElementValueSource
{
@SuppressWarnings("unused") // used via reflection
public OptionCountSource(ValueSourceConfiguration config, MuseProject project) throws MuseInstantiationException
{
super(config, project);
}
@Override
public Integer resolveValue(MuseExecutionContext context) throws ValueSourceResolutionError
{
WebElement element = resolveElementSource(context, true);
try
{
Select select = new Select(element);
return select.getOptions().size();
}
catch (UnexpectedTagNameException e)
{
throw new ValueSourceResolutionError("The element is not a <select> tag.");
}
}
@Override
public String getDescription()
{
return String.format("elementAttribute(%s,%s)", getElementSource().getDescription(), _attribute_name_source.getDescription());
}
private MuseValueSource _attribute_name_source;
public final static String TYPE_ID = OptionCountSource.class.getAnnotation(MuseTypeId.class).value();
public final static String ELEMENT_PARAM = "element";
public static class StringExpressionSupport extends BaseArgumentedValueSourceStringSupport
{
@Override
public String getName()
{
return "optionCount";
}
@Override
protected int getNumberArguments()
{
return 1;
}
@Override
protected String getTypeId()
{
return OptionCountSource.TYPE_ID;
}
@Override
protected boolean storeSingleArgumentAsSingleSubsource()
{
return true;
}
}
}
| [selenium] MAINT: code cleanup
| selenium/src/main/java/org/museautomation/selenium/values/OptionCountSource.java | [selenium] MAINT: code cleanup |
|
Java | apache-2.0 | 29c59221584a5dad5ef434387989f06a1a4d5bcf | 0 | zhenlineo/java-driver,neo4j/neo4j-java-driver,boggle/neo4j-java-driver,neo4j/neo4j-java-driver,neo4j/neo4j-java-driver | /**
* Copyright (c) 2002-2015 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.driver.internal.connector.socket;
import java.io.IOException;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.SocketTimeoutException;
import java.net.StandardSocketOptions;
import java.nio.ByteBuffer;
import java.nio.channels.SocketChannel;
import java.util.List;
import org.neo4j.driver.exceptions.ClientException;
import org.neo4j.driver.internal.messaging.Message;
import org.neo4j.driver.internal.messaging.MessageFormat.Reader;
import org.neo4j.driver.internal.messaging.MessageFormat.Writer;
import static java.lang.Integer.getInteger;
public class SocketClient
{
/**
* Timeout for network read operations. By default, this is disabled (the database may take as long as it likes to
* reply). However, on networks that suffer from frequent net-splits, there is a serious issue where a socket may
* erroneously block for very long periods (up to 10 minutes). If your application suffers from this issue, you
* should enable the network timeout, by setting it to some value significantly higher than your slowest query.
*/
private static int defaultNetworkTimeout = getInteger( "neo4j.networkTimeoutMs", 0 );
private final String host;
private final int port;
private final int networkTimeout;
private SocketProtocol protocol;
private Reader reader;
private Writer writer;
private SocketChannel channel;
public SocketClient( String host, int port, int networkTimeout )
{
this.host = host;
this.port = port;
this.networkTimeout = networkTimeout;
}
public SocketClient( String host, int port )
{
this( host, port, defaultNetworkTimeout );
}
public void start()
{
try
{
channel = SocketChannel.open();
channel.setOption( StandardSocketOptions.SO_REUSEADDR, true );
channel.setOption( StandardSocketOptions.SO_KEEPALIVE, true );
channel.connect( new InetSocketAddress( host, port ) );
protocol = negotiateProtocol();
reader = protocol.reader();
writer = protocol.writer();
}
catch ( ConnectException e )
{
throw new ClientException( String.format( "Unable to connect to '%s' on port %s, " +
"ensure the database is running and that there is a working " +
"network " +
"connection to it.", host, port ) );
}
catch ( SocketTimeoutException e )
{
throw new ClientException( String.format( "Unable to connect to '%s' on port %s, " +
"database took longer than network timeout (%dms) to reply.",
host, port, networkTimeout ) );
}
catch ( IOException e )
{
throw new ClientException( "Unable to process request: " + e.getMessage(), e );
}
}
public void send( List<Message> pendingMessages, SocketResponseHandler handler ) throws IOException
{
for ( Message message : pendingMessages )
{
writer.write( message );
}
writer.flush();
// Wait until all pending requests have been replied to
while ( handler.receivedResponses() < pendingMessages.size() )
{
reader.read( handler );
}
}
public void stop()
{
try
{
channel.close();
}
catch ( IOException e )
{
throw new ClientException( "Unable to close socket connection properly." + e.getMessage(), e );
}
}
private SocketProtocol negotiateProtocol() throws IOException
{
// Propose protocol versions
ByteBuffer buf = ByteBuffer.wrap( new byte[]{
0, 0, 0, 1,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0} );
while ( buf.remaining() > 0 )
{
channel.write( buf );
}
// Read back the servers choice
buf.clear();
buf.limit( 4 );
while ( buf.remaining() > 0 )
{
channel.read( buf );
}
// Choose protocol, or fail
buf.flip();
final int proposal = buf.getInt();
switch ( proposal )
{
case 1: return new SocketProtocolV1( channel );
case 0: throw new ClientException( "The server does not support any of the protocol versions supported by " +
"this driver. Ensure that you are using driver and server versions that " +
"are compatible with one another." );
default: throw new ClientException( "Protocol error, server suggested unexpected protocol version: " +
proposal );
}
}
@Override
public String toString()
{
int version = protocol == null ? -1 : protocol.version();
return "SocketClient[protocolVersion=" + version + "]";
}
}
| driver/src/main/java/org/neo4j/driver/internal/connector/socket/SocketClient.java | /**
* Copyright (c) 2002-2015 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.driver.internal.connector.socket;
import java.io.IOException;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.SocketTimeoutException;
import java.net.StandardSocketOptions;
import java.nio.ByteBuffer;
import java.nio.channels.SocketChannel;
import java.util.List;
import org.neo4j.driver.exceptions.ClientException;
import org.neo4j.driver.internal.messaging.Message;
import org.neo4j.driver.internal.messaging.MessageFormat.Reader;
import org.neo4j.driver.internal.messaging.MessageFormat.Writer;
import static java.lang.Integer.getInteger;
public class SocketClient
{
/**
* Timeout for network read operations. By default, this is disabled (the database may take as long as it likes to
* reply). However, on networks that suffer from frequent net-splits, there is a serious issue where a socket may
* erroneously block for very long periods (up to 10 minutes). If your application suffers from this issue, you
* should enable the network timeout, by setting it to some value significantly higher than your slowest query.
*/
private static int defaultNetworkTimeout = getInteger( "neo4j.networkTimeoutMs", 0 );
private final String host;
private final int port;
private final int networkTimeout;
private SocketProtocol protocol;
private Reader reader;
private Writer writer;
private SocketChannel channel;
public SocketClient( String host, int port, int networkTimeout )
{
this.host = host;
this.port = port;
this.networkTimeout = networkTimeout;
}
public SocketClient( String host, int port )
{
this( host, port, defaultNetworkTimeout );
}
public void start()
{
try
{
channel = SocketChannel.open();
channel.setOption( StandardSocketOptions.SO_REUSEADDR, true );
channel.setOption( StandardSocketOptions.SO_KEEPALIVE, true );
channel.connect( new InetSocketAddress( host, port ) );
protocol = negotiateProtocol();
reader = protocol.reader();
writer = protocol.writer();
}
catch ( ConnectException e )
{
throw new ClientException( String.format( "Unable to connect to '%s' on port %s, " +
"ensure the database is running and that there is a working " +
"network " +
"connection to it.", host, port ) );
}
catch ( SocketTimeoutException e )
{
throw new ClientException( String.format( "Unable to connect to '%s' on port %s, " +
"database took longer than network timeout (%dms) to reply.",
host, port, networkTimeout ) );
}
catch ( IOException e )
{
throw new ClientException( "Unable to process request: " + e.getMessage(), e );
}
}
public void send( List<Message> pendingMessages, SocketResponseHandler handler ) throws IOException
{
for ( Message message : pendingMessages )
{
writer.write( message );
}
writer.flush();
// Wait until all pending requests have been replied to
while ( handler.receivedResponses() < pendingMessages.size() )
{
reader.read( handler );
}
}
public void stop()
{
try
{
channel.close();
}
catch ( IOException e )
{
throw new ClientException( "Unable to close socket connection properly." + e.getMessage(), e );
}
}
private SocketProtocol negotiateProtocol() throws IOException
{
// Propose protocol versions
ByteBuffer buf = ByteBuffer.wrap( new byte[]{
0, 0, 0, 1,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0} );
while(buf.remaining() > 0 )
{
channel.write( buf );
}
// Read back the servers choice
buf.clear();
buf.limit( 2 );
while(buf.remaining() > 0)
{
channel.read( buf );
}
// Choose protocol, or fail
buf.flip();
final int proposal = buf.getInt();
switch ( proposal )
{
case 1: return new SocketProtocolV1( channel );
case 0: throw new ClientException( "The server does not support any of the protocol versions supported by " +
"this driver. Ensure that you are using driver and server versions that " +
"are compatible with one another." );
default: throw new ClientException( "Protocol error, server suggested unexpected protocol version: " +
proposal );
}
}
@Override
public String toString()
{
int version = protocol == null ? -1 : protocol.version();
return "SocketClient[protocolVersion=" + version + "]";
}
}
| Fix server start failure caused by version negotiation
| driver/src/main/java/org/neo4j/driver/internal/connector/socket/SocketClient.java | Fix server start failure caused by version negotiation |
|
Java | apache-2.0 | 12d5b3b619ae0106d945a97e9476f856540556a9 | 0 | Xylus/pinpoint,naver/pinpoint,dawidmalina/pinpoint,KRDeNaT/pinpoint,lioolli/pinpoint,carpedm20/pinpoint,nstopkimsk/pinpoint,jaehong-kim/pinpoint,KRDeNaT/pinpoint,minwoo-jung/pinpoint,cit-lab/pinpoint,87439247/pinpoint,breadval/pinpoint,coupang/pinpoint,Xylus/pinpoint,eBaoTech/pinpoint,majinkai/pinpoint,cijung/pinpoint,shuvigoss/pinpoint,87439247/pinpoint,nstopkimsk/pinpoint,emeroad/pinpoint,KimTaehee/pinpoint,barneykim/pinpoint,Allive1/pinpoint,philipz/pinpoint,InfomediaLtd/pinpoint,jiaqifeng/pinpoint,InfomediaLtd/pinpoint,tsyma/pinpoint,cit-lab/pinpoint,minwoo-jung/pinpoint,Xylus/pinpoint,lioolli/pinpoint,chenguoxi1985/pinpoint,breadval/pinpoint,krishnakanthpps/pinpoint,sjmittal/pinpoint,87439247/pinpoint,andyspan/pinpoint,minwoo-jung/pinpoint,philipz/pinpoint,jiaqifeng/pinpoint,sjmittal/pinpoint,krishnakanthpps/pinpoint,masonmei/pinpoint,87439247/pinpoint,cijung/pinpoint,suraj-raturi/pinpoint,Skkeem/pinpoint,cit-lab/pinpoint,barneykim/pinpoint,wziyong/pinpoint,jaehong-kim/pinpoint,masonmei/pinpoint,tsyma/pinpoint,dawidmalina/pinpoint,PerfGeeks/pinpoint,naver/pinpoint,philipz/pinpoint,shuvigoss/pinpoint,masonmei/pinpoint,citywander/pinpoint,andyspan/pinpoint,KRDeNaT/pinpoint,eBaoTech/pinpoint,sjmittal/pinpoint,KimTaehee/pinpoint,denzelsN/pinpoint,Allive1/pinpoint,breadval/pinpoint,citywander/pinpoint,krishnakanthpps/pinpoint,naver/pinpoint,lioolli/pinpoint,Skkeem/pinpoint,jiaqifeng/pinpoint,KimTaehee/pinpoint,coupang/pinpoint,barneykim/pinpoint,chenguoxi1985/pinpoint,suraj-raturi/pinpoint,suraj-raturi/pinpoint,krishnakanthpps/pinpoint,sbcoba/pinpoint,philipz/pinpoint,chenguoxi1985/pinpoint,jaehong-kim/pinpoint,hcapitaine/pinpoint,krishnakanthpps/pinpoint,gspandy/pinpoint,dawidmalina/pinpoint,dawidmalina/pinpoint,Xylus/pinpoint,andyspan/pinpoint,wziyong/pinpoint,majinkai/pinpoint,lioolli/pinpoint,nstopkimsk/pinpoint,barneykim/pinpoint,shuvigoss/pinpoint,eBaoTech/pinpoint,denzelsN/pinpoint,tsyma/pinpoint,sbcoba/pinpoint,koo-taejin/pinpoint,barneykim/pinpoint,suraj-raturi/pinpoint,hcapitaine/pinpoint,naver/pinpoint,carpedm20/pinpoint,masonmei/pinpoint,koo-taejin/pinpoint,Allive1/pinpoint,gspandy/pinpoint,InfomediaLtd/pinpoint,KimTaehee/pinpoint,koo-taejin/pinpoint,majinkai/pinpoint,KimTaehee/pinpoint,dawidmalina/pinpoint,cit-lab/pinpoint,wziyong/pinpoint,eBaoTech/pinpoint,majinkai/pinpoint,minwoo-jung/pinpoint,hcapitaine/pinpoint,Xylus/pinpoint,breadval/pinpoint,PerfGeeks/pinpoint,cit-lab/pinpoint,Allive1/pinpoint,denzelsN/pinpoint,cijung/pinpoint,jiaqifeng/pinpoint,chenguoxi1985/pinpoint,koo-taejin/pinpoint,hcapitaine/pinpoint,shuvigoss/pinpoint,Skkeem/pinpoint,eBaoTech/pinpoint,KRDeNaT/pinpoint,gspandy/pinpoint,87439247/pinpoint,emeroad/pinpoint,naver/pinpoint,denzelsN/pinpoint,coupang/pinpoint,Skkeem/pinpoint,carpedm20/pinpoint,PerfGeeks/pinpoint,lioolli/pinpoint,sjmittal/pinpoint,masonmei/pinpoint,wziyong/pinpoint,krishnakanthpps/pinpoint,suraj-raturi/pinpoint,cijung/pinpoint,sjmittal/pinpoint,citywander/pinpoint,jiaqifeng/pinpoint,wziyong/pinpoint,Xylus/pinpoint,coupang/pinpoint,sbcoba/pinpoint,tsyma/pinpoint,KRDeNaT/pinpoint,minwoo-jung/pinpoint,coupang/pinpoint,philipz/pinpoint,hcapitaine/pinpoint,denzelsN/pinpoint,breadval/pinpoint,nstopkimsk/pinpoint,emeroad/pinpoint,carpedm20/pinpoint,denzelsN/pinpoint,PerfGeeks/pinpoint,chenguoxi1985/pinpoint,Skkeem/pinpoint,cijung/pinpoint,barneykim/pinpoint,gspandy/pinpoint,PerfGeeks/pinpoint,tsyma/pinpoint,Allive1/pinpoint,nstopkimsk/pinpoint,nstopkimsk/pinpoint,denzelsN/pinpoint,jiaqifeng/pinpoint,shuvigoss/pinpoint,coupang/pinpoint,gspandy/pinpoint,hcapitaine/pinpoint,tsyma/pinpoint,KimTaehee/pinpoint,jaehong-kim/pinpoint,shuvigoss/pinpoint,emeroad/pinpoint,cijung/pinpoint,jaehong-kim/pinpoint,Skkeem/pinpoint,carpedm20/pinpoint,masonmei/pinpoint,andyspan/pinpoint,jaehong-kim/pinpoint,sbcoba/pinpoint,InfomediaLtd/pinpoint,KRDeNaT/pinpoint,Allive1/pinpoint,minwoo-jung/pinpoint,breadval/pinpoint,majinkai/pinpoint,citywander/pinpoint,PerfGeeks/pinpoint,koo-taejin/pinpoint,eBaoTech/pinpoint,87439247/pinpoint,emeroad/pinpoint,citywander/pinpoint,emeroad/pinpoint,barneykim/pinpoint,andyspan/pinpoint,InfomediaLtd/pinpoint,koo-taejin/pinpoint,sbcoba/pinpoint,gspandy/pinpoint,philipz/pinpoint,sbcoba/pinpoint,wziyong/pinpoint,InfomediaLtd/pinpoint,citywander/pinpoint,dawidmalina/pinpoint,andyspan/pinpoint,lioolli/pinpoint,chenguoxi1985/pinpoint,majinkai/pinpoint,Xylus/pinpoint,cit-lab/pinpoint,sjmittal/pinpoint,suraj-raturi/pinpoint | package com.nhn.pinpoint.collector.dao.hbase;
import static com.nhn.pinpoint.common.hbase.HBaseTables.HOST_APPLICATION_MAP;
import static com.nhn.pinpoint.common.hbase.HBaseTables.HOST_APPLICATION_MAP_CF_MAP;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.nhn.pinpoint.collector.dao.HostApplicationMapDao;
import com.nhn.pinpoint.collector.util.AcceptedTimeService;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.nhn.pinpoint.common.hbase.HBaseTables;
import com.nhn.pinpoint.common.hbase.HbaseOperations2;
import com.nhn.pinpoint.common.util.BytesUtils;
import com.nhn.pinpoint.common.util.TimeSlot;
import com.nhn.pinpoint.common.util.TimeUtils;
/**
*
* @author netspider
*/
public class HbaseHostApplicationMapDao implements HostApplicationMapDao {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private HbaseOperations2 hbaseTemplate;
@Autowired
private AcceptedTimeService acceptedTimeService;
private final ConcurrentMap<String, Object> cache = new ConcurrentHashMap<String, Object>(1024);
private long lastUpdated = System.currentTimeMillis();
@Override
public void insert(String host, String applicationName, short serviceType) {
String cacheKey = host + applicationName + serviceType;
// 매 번 넣을 필요 없음.
if (cache.containsKey(cacheKey)) {
logger.debug("Skip insert host-application map. host={}, applicationName={}, serviceType={}", new Object[] { host, applicationName, serviceType });
return;
}
logger.debug("Insert host-application map. host={}, applicationName={}, serviceType={}", new Object[] { host, applicationName, serviceType });
byte[] rowKey = Bytes.toBytes(TimeUtils.reverseCurrentTimeMillis(TimeSlot.getStatisticsRowSlot(acceptedTimeService.getAcceptedTime())));
byte[] columnName = Bytes.toBytes(host);
byte[] applicationNameBytes = Bytes.toBytes(applicationName);
byte[] offsetBytes = new byte[HBaseTables.APPLICATION_NAME_MAX_LEN - applicationNameBytes.length];
byte[] serviceTypeBytes = Bytes.toBytes(serviceType);
byte[] value = BytesUtils.concat(applicationNameBytes, offsetBytes, serviceTypeBytes);
hbaseTemplate.put(HOST_APPLICATION_MAP, rowKey, HOST_APPLICATION_MAP_CF_MAP, columnName, value);
if (System.currentTimeMillis() - lastUpdated > 5000) {
cache.clear();
} else {
cache.put(cacheKey, 1);
}
lastUpdated = System.currentTimeMillis();
}
}
| src/main/java/com/nhn/pinpoint/collector/dao/hbase/HbaseHostApplicationMapDao.java | package com.nhn.pinpoint.collector.dao.hbase;
import static com.nhn.pinpoint.common.hbase.HBaseTables.HOST_APPLICATION_MAP;
import static com.nhn.pinpoint.common.hbase.HBaseTables.HOST_APPLICATION_MAP_CF_MAP;
import com.nhn.pinpoint.collector.dao.HostApplicationMapDao;
import com.nhn.pinpoint.collector.util.AcceptedTimeService;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.nhn.pinpoint.common.hbase.HBaseTables;
import com.nhn.pinpoint.common.hbase.HbaseOperations2;
import com.nhn.pinpoint.common.util.BytesUtils;
import com.nhn.pinpoint.common.util.TimeSlot;
import com.nhn.pinpoint.common.util.TimeUtils;
/**
*
* @author netspider
*/
public class HbaseHostApplicationMapDao implements HostApplicationMapDao {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private HbaseOperations2 hbaseTemplate;
@Autowired
private AcceptedTimeService acceptedTimeService;
@Override
public void insert(String host, String applicationName, short serviceType) {
logger.debug("Insert host-application map. host={}, applicationName={}, serviceType={}", new Object[] { host, applicationName, serviceType });
byte[] rowKey = Bytes.toBytes(TimeUtils.reverseCurrentTimeMillis(TimeSlot.getStatisticsRowSlot(acceptedTimeService.getAcceptedTime())));
byte[] columnName = Bytes.toBytes(host);
byte[] applicationNameBytes = Bytes.toBytes(applicationName);
byte[] offsetBytes = new byte[HBaseTables.APPLICATION_NAME_MAX_LEN - applicationNameBytes.length];
byte[] serviceTypeBytes = Bytes.toBytes(serviceType);
byte[] value = BytesUtils.concat(applicationNameBytes, offsetBytes, serviceTypeBytes);
hbaseTemplate.put(HOST_APPLICATION_MAP, rowKey, HOST_APPLICATION_MAP_CF_MAP, columnName, value);
}
}
| [유치수] [NOBTS] host application map을 매번 저장하지 않도록 함.
git-svn-id: fcdbc22eeb3942bcf1f87a5e22a32039e94b88c4@2116 84d0f5b1-2673-498c-a247-62c4ff18d310
| src/main/java/com/nhn/pinpoint/collector/dao/hbase/HbaseHostApplicationMapDao.java | [유치수] [NOBTS] host application map을 매번 저장하지 않도록 함. |
|
Java | apache-2.0 | 5116933d9418a93941ace8abf4f728a1ca7b00c2 | 0 | vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.model.admin.monitoring;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static com.yahoo.vespa.model.admin.monitoring.DefaultVespaMetrics.defaultVespaMetricSet;
import static java.util.Collections.singleton;
/**
* Encapsulates vespa service metrics.
*
* @author gjoranv
*/
public class VespaMetricSet {
public static final MetricSet vespaMetricSet = new MetricSet("vespa",
getVespaMetrics(),
singleton(defaultVespaMetricSet));
private static Set<Metric> getVespaMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.addAll(getSearchNodeMetrics());
metrics.addAll(getStorageMetrics());
metrics.addAll(getDocprocMetrics());
metrics.addAll(getClusterControllerMetrics());
metrics.addAll(getQrserverMetrics());
metrics.addAll(getContainerMetrics());
metrics.addAll(getConfigServerMetrics());
metrics.addAll(getSentinelMetrics());
metrics.addAll(getOtherMetrics());
return Collections.unmodifiableSet(metrics);
}
private static Set<Metric> getSentinelMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("sentinel.restarts.count"));
metrics.add(new Metric("sentinel.totalRestarts.last"));
metrics.add(new Metric("sentinel.uptime.last"));
metrics.add(new Metric("sentinel.running.count"));
metrics.add(new Metric("sentinel.running.last"));
return metrics;
}
private static Set<Metric> getOtherMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("slobrok.heartbeats.failed.count"));
metrics.add(new Metric("logd.processed.lines.count"));
metrics.add(new Metric("worker.connections.max"));
// Java (JRT) TLS metrics
metrics.add(new Metric("jrt.transport.tls-certificate-verification-failures"));
metrics.add(new Metric("jrt.transport.peer-authorization-failures"));
metrics.add(new Metric("jrt.transport.server.tls-connections-established"));
metrics.add(new Metric("jrt.transport.client.tls-connections-established"));
metrics.add(new Metric("jrt.transport.server.unencrypted-connections-established"));
metrics.add(new Metric("jrt.transport.client.unencrypted-connections-established"));
// C++ TLS metrics
metrics.add(new Metric("vds.server.network.tls-handshakes-failed"));
metrics.add(new Metric("vds.server.network.peer-authorization-failures"));
metrics.add(new Metric("vds.server.network.client.tls-connections-established"));
metrics.add(new Metric("vds.server.network.server.tls-connections-established"));
metrics.add(new Metric("vds.server.network.client.insecure-connections-established"));
metrics.add(new Metric("vds.server.network.server.insecure-connections-established"));
metrics.add(new Metric("vds.server.network.tls-connections-broken"));
metrics.add(new Metric("vds.server.network.failed-tls-config-reloads"));
// C++ Fnet metrics
metrics.add(new Metric("vds.server.fnet.num-connections"));
// Node certificate
metrics.add(new Metric("node-certificate.expiry.seconds"));
return metrics;
}
private static Set<Metric> getConfigServerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("configserver.requests.count"));
metrics.add(new Metric("configserver.failedRequests.count"));
metrics.add(new Metric("configserver.latency.max"));
metrics.add(new Metric("configserver.latency.sum"));
metrics.add(new Metric("configserver.latency.count"));
metrics.add(new Metric("configserver.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("configserver.cacheConfigElems.last"));
metrics.add(new Metric("configserver.cacheChecksumElems.last"));
metrics.add(new Metric("configserver.hosts.last"));
metrics.add(new Metric("configserver.delayedResponses.count"));
metrics.add(new Metric("configserver.sessionChangeErrors.count"));
metrics.add(new Metric("configserver.zkZNodes.last"));
metrics.add(new Metric("configserver.zkAvgLatency.last"));
metrics.add(new Metric("configserver.zkMaxLatency.last"));
metrics.add(new Metric("configserver.zkConnections.last"));
metrics.add(new Metric("configserver.zkOutstandingRequests.last"));
return metrics;
}
private static Set<Metric> getContainerMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
addMetric(metrics, "jdisc.http.requests", List.of("rate", "count"));
metrics.add(new Metric("handled.requests.count"));
metrics.add(new Metric("handled.latency.max"));
metrics.add(new Metric("handled.latency.sum"));
metrics.add(new Metric("handled.latency.count"));
metrics.add(new Metric("handled.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverRejectedRequests.rate"));
metrics.add(new Metric("serverRejectedRequests.count"));
metrics.add(new Metric("serverThreadPoolSize.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.min")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.max"));
metrics.add(new Metric("serverThreadPoolSize.rate")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.count")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.last"));
metrics.add(new Metric("serverActiveThreads.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverActiveThreads.min"));
metrics.add(new Metric("serverActiveThreads.max"));
metrics.add(new Metric("serverActiveThreads.rate")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverActiveThreads.sum"));
metrics.add(new Metric("serverActiveThreads.count"));
metrics.add(new Metric("serverActiveThreads.last"));
metrics.add(new Metric("serverNumOpenConnections.average"));
metrics.add(new Metric("serverNumOpenConnections.max"));
metrics.add(new Metric("serverNumOpenConnections.last"));
metrics.add(new Metric("serverNumConnections.average"));
metrics.add(new Metric("serverNumConnections.max"));
metrics.add(new Metric("serverNumConnections.last"));
{
List<String> suffixes = List.of("sum", "count", "last", "min", "max");
addMetric(metrics, "jdisc.thread_pool.unhandled_exceptions", suffixes);
addMetric(metrics, "jdisc.thread_pool.work_queue.capacity", suffixes);
addMetric(metrics, "jdisc.thread_pool.work_queue.size", suffixes);
}
metrics.add(new Metric("httpapi_latency.max"));
metrics.add(new Metric("httpapi_latency.sum"));
metrics.add(new Metric("httpapi_latency.count"));
metrics.add(new Metric("httpapi_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("httpapi_pending.max"));
metrics.add(new Metric("httpapi_pending.sum"));
metrics.add(new Metric("httpapi_pending.count"));
metrics.add(new Metric("httpapi_pending.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("httpapi_num_operations.rate"));
metrics.add(new Metric("httpapi_num_updates.rate"));
metrics.add(new Metric("httpapi_num_removes.rate"));
metrics.add(new Metric("httpapi_num_puts.rate"));
metrics.add(new Metric("httpapi_succeeded.rate"));
metrics.add(new Metric("httpapi_failed.rate"));
metrics.add(new Metric("httpapi_parse_error.rate"));
metrics.add(new Metric("mem.heap.total.average"));
metrics.add(new Metric("mem.heap.free.average"));
metrics.add(new Metric("mem.heap.used.average"));
metrics.add(new Metric("mem.heap.used.max"));
metrics.add(new Metric("jdisc.memory_mappings.max"));
metrics.add(new Metric("jdisc.open_file_descriptors.max"));
metrics.add(new Metric("jdisc.gc.count.average"));
metrics.add(new Metric("jdisc.gc.count.max"));
metrics.add(new Metric("jdisc.gc.count.last"));
metrics.add(new Metric("jdisc.gc.ms.average"));
metrics.add(new Metric("jdisc.gc.ms.max"));
metrics.add(new Metric("jdisc.gc.ms.last"));
metrics.add(new Metric("jdisc.deactivated_containers.total.last"));
metrics.add(new Metric("jdisc.deactivated_containers.with_retained_refs.last"));
metrics.add(new Metric("athenz-tenant-cert.expiry.seconds.last"));
metrics.add(new Metric("container-iam-role.expiry.seconds"));
metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate"));
addMetric(metrics, "jdisc.http.request.requests_per_connection", List.of("sum", "count", "min", "max", "average"));
metrics.add(new Metric("http.status.1xx.rate"));
metrics.add(new Metric("http.status.2xx.rate"));
metrics.add(new Metric("http.status.3xx.rate"));
metrics.add(new Metric("http.status.4xx.rate"));
metrics.add(new Metric("http.status.5xx.rate"));
metrics.add(new Metric("http.status.401.rate"));
metrics.add(new Metric("http.status.403.rate"));
metrics.add(new Metric("jdisc.http.request.uri_length.max"));
metrics.add(new Metric("jdisc.http.request.uri_length.sum"));
metrics.add(new Metric("jdisc.http.request.uri_length.count"));
metrics.add(new Metric("jdisc.http.request.uri_length.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("jdisc.http.request.content_size.max"));
metrics.add(new Metric("jdisc.http.request.content_size.sum"));
metrics.add(new Metric("jdisc.http.request.content_size.count"));
metrics.add(new Metric("jdisc.http.request.content_size.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.missing_client_cert.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.expired_client_cert.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.invalid_client_cert.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.incompatible_protocols.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.incompatible_ciphers.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.unknown.rate"));
metrics.add(new Metric("jdisc.http.handler.unhandled_exceptions.rate"));
addMetric(metrics, "jdisc.http.jetty.threadpool.thread.max", List.of("last"));
addMetric(metrics, "jdisc.http.jetty.threadpool.thread.reserved", List.of("last"));
addMetric(metrics, "jdisc.http.jetty.threadpool.thread.busy", List.of("sum", "count", "min", "max"));
addMetric(metrics, "jdisc.http.jetty.threadpool.thread.total", List.of("sum", "count", "min", "max"));
addMetric(metrics, "jdisc.http.jetty.threadpool.queue.size", List.of("sum", "count", "min", "max"));
addMetric(metrics, "jdisc.http.filtering.request.handled", List.of("rate"));
addMetric(metrics, "jdisc.http.filtering.request.unhandled", List.of("rate"));
addMetric(metrics, "jdisc.http.filtering.response.handled", List.of("rate"));
addMetric(metrics, "jdisc.http.filtering.response.unhandled", List.of("rate"));
addMetric(metrics, "jdisc.application.failed_component_graphs", List.of("rate"));
addMetric(metrics, "jdisc.http.filter.rule.blocked_requests", List.of("rate"));
addMetric(metrics, "jdisc.http.filter.rule.allowed_requests", List.of("rate"));
return metrics;
}
private static Set<Metric> getClusterControllerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("cluster-controller.down.count.last"));
metrics.add(new Metric("cluster-controller.initializing.count.last"));
metrics.add(new Metric("cluster-controller.maintenance.count.last"));
metrics.add(new Metric("cluster-controller.retired.count.last"));
metrics.add(new Metric("cluster-controller.stopping.count.last"));
metrics.add(new Metric("cluster-controller.up.count.last"));
metrics.add(new Metric("cluster-controller.cluster-state-change.count"));
metrics.add(new Metric("cluster-controller.busy-tick-time-ms.last"));
metrics.add(new Metric("cluster-controller.busy-tick-time-ms.max"));
metrics.add(new Metric("cluster-controller.busy-tick-time-ms.sum"));
metrics.add(new Metric("cluster-controller.busy-tick-time-ms.count"));
metrics.add(new Metric("cluster-controller.idle-tick-time-ms.last"));
metrics.add(new Metric("cluster-controller.idle-tick-time-ms.max"));
metrics.add(new Metric("cluster-controller.idle-tick-time-ms.sum"));
metrics.add(new Metric("cluster-controller.idle-tick-time-ms.count"));
metrics.add(new Metric("cluster-controller.work-ms.last"));
metrics.add(new Metric("cluster-controller.work-ms.sum"));
metrics.add(new Metric("cluster-controller.work-ms.count"));
metrics.add(new Metric("cluster-controller.is-master.last"));
metrics.add(new Metric("cluster-controller.remote-task-queue.size.last"));
// TODO(hakonhall): Update this name once persistent "count" metrics has been implemented.
// DO NOT RELY ON THIS METRIC YET.
metrics.add(new Metric("cluster-controller.node-event.count"));
metrics.add(new Metric("cluster-controller.resource_usage.nodes_above_limit.last"));
metrics.add(new Metric("cluster-controller.resource_usage.nodes_above_limit.max"));
metrics.add(new Metric("cluster-controller.resource_usage.max_memory_utilization.last"));
metrics.add(new Metric("cluster-controller.resource_usage.max_memory_utilization.max"));
metrics.add(new Metric("cluster-controller.resource_usage.max_disk_utilization.last"));
metrics.add(new Metric("cluster-controller.resource_usage.max_disk_utilization.max"));
metrics.add(new Metric("cluster-controller.resource_usage.disk_limit.last"));
metrics.add(new Metric("cluster-controller.resource_usage.memory_limit.last"));
metrics.add(new Metric("reindexing.progress.last"));
return metrics;
}
private static Set<Metric> getDocprocMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
// per chain
metrics.add(new Metric("documents_processed.rate"));
return metrics;
}
private static Set<Metric> getQrserverMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("peak_qps.max"));
metrics.add(new Metric("search_connections.max"));
metrics.add(new Metric("search_connections.sum"));
metrics.add(new Metric("search_connections.count"));
metrics.add(new Metric("search_connections.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("feed.latency.max"));
metrics.add(new Metric("feed.latency.sum"));
metrics.add(new Metric("feed.latency.count"));
metrics.add(new Metric("feed.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("feed.http-requests.count"));
metrics.add(new Metric("feed.http-requests.rate"));
metrics.add(new Metric("queries.rate"));
metrics.add(new Metric("query_container_latency.max"));
metrics.add(new Metric("query_container_latency.sum"));
metrics.add(new Metric("query_container_latency.count"));
metrics.add(new Metric("query_container_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("query_latency.max"));
metrics.add(new Metric("query_latency.sum"));
metrics.add(new Metric("query_latency.count"));
metrics.add(new Metric("query_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("query_latency.95percentile"));
metrics.add(new Metric("query_latency.99percentile"));
metrics.add(new Metric("failed_queries.rate"));
metrics.add(new Metric("degraded_queries.rate"));
metrics.add(new Metric("hits_per_query.max"));
metrics.add(new Metric("hits_per_query.sum"));
metrics.add(new Metric("hits_per_query.count"));
metrics.add(new Metric("hits_per_query.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("hits_per_query.95percentile"));
metrics.add(new Metric("hits_per_query.99percentile"));
metrics.add(new Metric("query_hit_offset.max"));
metrics.add(new Metric("query_hit_offset.sum"));
metrics.add(new Metric("query_hit_offset.count"));
metrics.add(new Metric("documents_covered.count"));
metrics.add(new Metric("documents_total.count"));
metrics.add(new Metric("dispatch_internal.rate"));
metrics.add(new Metric("dispatch_fdispatch.rate"));
addMetric(metrics, "jdisc.render.latency", Set.of("min", "max", "count", "sum", "last", "average"));
metrics.add(new Metric("totalhits_per_query.max"));
metrics.add(new Metric("totalhits_per_query.sum"));
metrics.add(new Metric("totalhits_per_query.count"));
metrics.add(new Metric("totalhits_per_query.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("totalhits_per_query.95percentile"));
metrics.add(new Metric("totalhits_per_query.99percentile"));
metrics.add(new Metric("empty_results.rate"));
metrics.add(new Metric("requestsOverQuota.rate"));
metrics.add(new Metric("requestsOverQuota.count"));
metrics.add(new Metric("relevance.at_1.sum"));
metrics.add(new Metric("relevance.at_1.count"));
metrics.add(new Metric("relevance.at_1.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("relevance.at_3.sum"));
metrics.add(new Metric("relevance.at_3.count"));
metrics.add(new Metric("relevance.at_3.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("relevance.at_10.sum"));
metrics.add(new Metric("relevance.at_10.count"));
metrics.add(new Metric("relevance.at_10.average")); // TODO: Remove in Vespa 8
// Errors from qrserver
metrics.add(new Metric("error.timeout.rate"));
metrics.add(new Metric("error.backends_oos.rate"));
metrics.add(new Metric("error.plugin_failure.rate"));
metrics.add(new Metric("error.backend_communication_error.rate"));
metrics.add(new Metric("error.empty_document_summaries.rate"));
metrics.add(new Metric("error.invalid_query_parameter.rate"));
metrics.add(new Metric("error.internal_server_error.rate"));
metrics.add(new Metric("error.misconfigured_server.rate"));
metrics.add(new Metric("error.invalid_query_transformation.rate"));
metrics.add(new Metric("error.result_with_errors.rate"));
metrics.add(new Metric("error.unspecified.rate"));
metrics.add(new Metric("error.unhandled_exception.rate"));
return metrics;
}
private static void addSearchNodeExecutorMetrics(Set<Metric> metrics, String prefix) {
metrics.add(new Metric(prefix + ".queuesize.max"));
metrics.add(new Metric(prefix + ".queuesize.sum"));
metrics.add(new Metric(prefix + ".queuesize.count"));
metrics.add(new Metric(prefix + ".maxpending.last")); // TODO: Remove in Vespa 8
metrics.add(new Metric(prefix + ".accepted.rate"));
}
private static Set<Metric> getSearchNodeMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("content.proton.documentdb.documents.total.last"));
metrics.add(new Metric("content.proton.documentdb.documents.ready.last"));
metrics.add(new Metric("content.proton.documentdb.documents.active.last"));
metrics.add(new Metric("content.proton.documentdb.documents.removed.last"));
metrics.add(new Metric("content.proton.documentdb.index.docs_in_memory.last"));
metrics.add(new Metric("content.proton.documentdb.disk_usage.last"));
metrics.add(new Metric("content.proton.documentdb.memory_usage.allocated_bytes.max"));
metrics.add(new Metric("content.proton.documentdb.heart_beat_age.last"));
metrics.add(new Metric("content.proton.transport.query.count.rate"));
metrics.add(new Metric("content.proton.docsum.docs.rate"));
metrics.add(new Metric("content.proton.docsum.latency.max"));
metrics.add(new Metric("content.proton.docsum.latency.sum"));
metrics.add(new Metric("content.proton.docsum.latency.count"));
metrics.add(new Metric("content.proton.docsum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.transport.query.latency.max"));
metrics.add(new Metric("content.proton.transport.query.latency.sum"));
metrics.add(new Metric("content.proton.transport.query.latency.count"));
metrics.add(new Metric("content.proton.transport.query.latency.average")); // TODO: Remove in Vespa 8
// Search protocol
metrics.add(new Metric("content.proton.search_protocol.query.latency.max"));
metrics.add(new Metric("content.proton.search_protocol.query.latency.sum"));
metrics.add(new Metric("content.proton.search_protocol.query.latency.count"));
metrics.add(new Metric("content.proton.search_protocol.query.request_size.max"));
metrics.add(new Metric("content.proton.search_protocol.query.request_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.query.request_size.count"));
metrics.add(new Metric("content.proton.search_protocol.query.reply_size.max"));
metrics.add(new Metric("content.proton.search_protocol.query.reply_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.query.reply_size.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.latency.max"));
metrics.add(new Metric("content.proton.search_protocol.docsum.latency.sum"));
metrics.add(new Metric("content.proton.search_protocol.docsum.latency.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.max"));
metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.max"));
metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.requested_documents.count"));
// Executors shared between all document dbs
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.proton");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.flush");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.match");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.docsum");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.shared");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.warmup");
// jobs
metrics.add(new Metric("content.proton.documentdb.job.total.average"));
metrics.add(new Metric("content.proton.documentdb.job.attribute_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.memory_index_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.disk_index_fusion.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.bucket_move.average"));
metrics.add(new Metric("content.proton.documentdb.job.lid_space_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.removed_documents_prune.average"));
// Threading service (per document db)
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.master");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.index");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.summary");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.index_field_inverter");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.index_field_writer");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.attribute_field_writer");
// lid space
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.highest_used_lid.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.highest_used_lid.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.highest_used_lid.last"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.used_lids.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.used_lids.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.used_lids.last"));
// bucket move
metrics.add(new Metric("content.proton.documentdb.bucket_move.buckets_pending.last"));
// resource usage
metrics.add(new Metric("content.proton.resource_usage.disk.average"));
metrics.add(new Metric("content.proton.resource_usage.disk_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.transient_memory.average"));
metrics.add(new Metric("content.proton.resource_usage.transient_disk.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_mappings.max"));
metrics.add(new Metric("content.proton.resource_usage.open_file_descriptors.max"));
metrics.add(new Metric("content.proton.resource_usage.feeding_blocked.max"));
metrics.add(new Metric("content.proton.resource_usage.malloc_arena.max"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.address_space.max"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.last")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.max"));
// transaction log
metrics.add(new Metric("content.proton.transactionlog.entries.average"));
metrics.add(new Metric("content.proton.transactionlog.disk_usage.average"));
metrics.add(new Metric("content.proton.transactionlog.replay_time.last"));
// document store
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.onhold_bytes.average"));
// document store cache
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.memory_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.hit_rate.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.lookups.rate"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.invalidations.rate"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.memory_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.hit_rate.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.lookups.rate"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.invalidations.rate"));
// attribute
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.onhold_bytes.average"));
// index
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.onhold_bytes.average"));
// matching
metrics.add(new Metric("content.proton.documentdb.matching.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.soft_doomed_queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.max"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.count"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.max")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.sum")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.count")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.max"));
metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.count"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.rate")); // TODO: Consider remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.max"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doomed_queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.min"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.max")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.sum")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.count")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.rate")); // TODO: Consider remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.limited_queries.rate"));
return metrics;
}
private static Set<Metric> getStorageMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
// TODO: For the purpose of this file and likely elsewhere, all but the last aggregate specifier,
// TODO: such as 'average' and 'sum' in the metric names below are just confusing and can be mentally
// TODO: disregarded when considering metric names. Consider cleaning up for Vespa 8.
// TODO Vespa 8 all metrics with .sum in the name should have that removed.
metrics.add(new Metric("vds.datastored.alldisks.docs.average"));
metrics.add(new Metric("vds.datastored.alldisks.bytes.average"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.max"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.sum"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.count"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.max"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.sum"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.count"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergemetadatareadlatency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergemetadatareadlatency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergemetadatareadlatency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatareadlatency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatareadlatency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatareadlatency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatawritelatency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatawritelatency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatawritelatency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put_latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put_latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put_latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_latency.count"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.max"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.sum"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.count"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.rate"));
metrics.add(new Metric("vds.visitor.allthreads.created.sum.rate"));
metrics.add(new Metric("vds.visitor.allthreads.failed.sum.rate"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.test_and_set_failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.request_size.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.request_size.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.request_size.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.test_and_set_failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.request_size.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.request_size.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.request_size.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.request_size.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.request_size.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.request_size.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.test_and_set_failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.request_size.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.request_size.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.request_size.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.splitbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.joinbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.setbucketstates.count.rate"));
//Distributor
metrics.add(new Metric("vds.idealstate.buckets_rechecking.average"));
metrics.add(new Metric("vds.idealstate.idealstate_diff.average"));
metrics.add(new Metric("vds.idealstate.buckets_toofewcopies.average"));
metrics.add(new Metric("vds.idealstate.buckets_toomanycopies.average"));
metrics.add(new Metric("vds.idealstate.buckets.average"));
metrics.add(new Metric("vds.idealstate.buckets_notrusted.average"));
metrics.add(new Metric("vds.idealstate.bucket_replicas_moving_out.average"));
metrics.add(new Metric("vds.idealstate.bucket_replicas_copying_out.average"));
metrics.add(new Metric("vds.idealstate.bucket_replicas_copying_in.average"));
metrics.add(new Metric("vds.idealstate.bucket_replicas_syncing.average"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.delete_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.merge_bucket.blocked.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.throttled.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.source_only_copy_changed.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.source_only_copy_delete_blocked.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.source_only_copy_delete_failed.rate"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.split_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.join_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.garbage_collection.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.garbage_collection.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.garbage_collection.pending.average"));
metrics.add(new Metric("vds.idealstate.garbage_collection.documents_removed.count"));
metrics.add(new Metric("vds.idealstate.garbage_collection.documents_removed.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.max"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.count"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.puts.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.test_and_set_failed.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.concurrent_mutations.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.max"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.count"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.removes.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.test_and_set_failed.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.concurrent_mutations.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.max"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.count"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.updates.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.test_and_set_failed.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.concurrent_mutations.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.diverging_timestamp_updates.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.max"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.count"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.gets.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.max"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.count"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.visitor.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.docsstored.average"));
metrics.add(new Metric("vds.distributor.bytesstored.average"));
metrics.add(new Metric("vds.bouncer.clock_skew_aborts.count"));
metrics.add(new Metric("vds.mergethrottler.averagequeuewaitingtime.max"));
metrics.add(new Metric("vds.mergethrottler.averagequeuewaitingtime.sum"));
metrics.add(new Metric("vds.mergethrottler.averagequeuewaitingtime.count"));
metrics.add(new Metric("vds.mergethrottler.queuesize.max"));
metrics.add(new Metric("vds.mergethrottler.queuesize.sum"));
metrics.add(new Metric("vds.mergethrottler.queuesize.count"));
metrics.add(new Metric("vds.mergethrottler.bounced_due_to_back_pressure.rate"));
metrics.add(new Metric("vds.mergethrottler.locallyexecutedmerges.ok.rate"));
metrics.add(new Metric("vds.mergethrottler.mergechains.ok.rate"));
metrics.add(new Metric("vds.mergethrottler.mergechains.failures.busy.rate"));
metrics.add(new Metric("vds.mergethrottler.mergechains.failures.total.rate"));
return metrics;
}
private static void addMetric(Set<Metric> metrics, String metricName, Iterable<String> aggregateSuffices) {
for (String suffix : aggregateSuffices) {
metrics.add(new Metric(metricName + "." + suffix));
}
}
}
| config-model/src/main/java/com/yahoo/vespa/model/admin/monitoring/VespaMetricSet.java | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.model.admin.monitoring;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static com.yahoo.vespa.model.admin.monitoring.DefaultVespaMetrics.defaultVespaMetricSet;
import static java.util.Collections.singleton;
/**
* Encapsulates vespa service metrics.
*
* @author gjoranv
*/
public class VespaMetricSet {
public static final MetricSet vespaMetricSet = new MetricSet("vespa",
getVespaMetrics(),
singleton(defaultVespaMetricSet));
private static Set<Metric> getVespaMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.addAll(getSearchNodeMetrics());
metrics.addAll(getStorageMetrics());
metrics.addAll(getDocprocMetrics());
metrics.addAll(getClusterControllerMetrics());
metrics.addAll(getQrserverMetrics());
metrics.addAll(getContainerMetrics());
metrics.addAll(getConfigServerMetrics());
metrics.addAll(getSentinelMetrics());
metrics.addAll(getOtherMetrics());
return Collections.unmodifiableSet(metrics);
}
private static Set<Metric> getSentinelMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("sentinel.restarts.count"));
metrics.add(new Metric("sentinel.totalRestarts.last"));
metrics.add(new Metric("sentinel.uptime.last"));
metrics.add(new Metric("sentinel.running.count"));
metrics.add(new Metric("sentinel.running.last"));
return metrics;
}
private static Set<Metric> getOtherMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("slobrok.heartbeats.failed.count"));
metrics.add(new Metric("logd.processed.lines.count"));
metrics.add(new Metric("worker.connections.max"));
// Java (JRT) TLS metrics
metrics.add(new Metric("jrt.transport.tls-certificate-verification-failures"));
metrics.add(new Metric("jrt.transport.peer-authorization-failures"));
metrics.add(new Metric("jrt.transport.server.tls-connections-established"));
metrics.add(new Metric("jrt.transport.client.tls-connections-established"));
metrics.add(new Metric("jrt.transport.server.unencrypted-connections-established"));
metrics.add(new Metric("jrt.transport.client.unencrypted-connections-established"));
// C++ TLS metrics
metrics.add(new Metric("vds.server.network.tls-handshakes-failed"));
metrics.add(new Metric("vds.server.network.peer-authorization-failures"));
metrics.add(new Metric("vds.server.network.client.tls-connections-established"));
metrics.add(new Metric("vds.server.network.server.tls-connections-established"));
metrics.add(new Metric("vds.server.network.client.insecure-connections-established"));
metrics.add(new Metric("vds.server.network.server.insecure-connections-established"));
metrics.add(new Metric("vds.server.network.tls-connections-broken"));
metrics.add(new Metric("vds.server.network.failed-tls-config-reloads"));
// C++ Fnet metrics
metrics.add(new Metric("vds.server.fnet.num-connections"));
// Node certificate
metrics.add(new Metric("node-certificate.expiry.seconds"));
return metrics;
}
private static Set<Metric> getConfigServerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("configserver.requests.count"));
metrics.add(new Metric("configserver.failedRequests.count"));
metrics.add(new Metric("configserver.latency.max"));
metrics.add(new Metric("configserver.latency.sum"));
metrics.add(new Metric("configserver.latency.count"));
metrics.add(new Metric("configserver.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("configserver.cacheConfigElems.last"));
metrics.add(new Metric("configserver.cacheChecksumElems.last"));
metrics.add(new Metric("configserver.hosts.last"));
metrics.add(new Metric("configserver.delayedResponses.count"));
metrics.add(new Metric("configserver.sessionChangeErrors.count"));
metrics.add(new Metric("configserver.zkZNodes.last"));
metrics.add(new Metric("configserver.zkAvgLatency.last"));
metrics.add(new Metric("configserver.zkMaxLatency.last"));
metrics.add(new Metric("configserver.zkConnections.last"));
metrics.add(new Metric("configserver.zkOutstandingRequests.last"));
return metrics;
}
private static Set<Metric> getContainerMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
addMetric(metrics, "jdisc.http.requests", List.of("rate", "count"));
metrics.add(new Metric("handled.requests.count"));
metrics.add(new Metric("handled.latency.max"));
metrics.add(new Metric("handled.latency.sum"));
metrics.add(new Metric("handled.latency.count"));
metrics.add(new Metric("handled.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverRejectedRequests.rate"));
metrics.add(new Metric("serverRejectedRequests.count"));
metrics.add(new Metric("serverThreadPoolSize.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.min")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.max"));
metrics.add(new Metric("serverThreadPoolSize.rate")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.count")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.last"));
metrics.add(new Metric("serverActiveThreads.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverActiveThreads.min"));
metrics.add(new Metric("serverActiveThreads.max"));
metrics.add(new Metric("serverActiveThreads.rate")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverActiveThreads.sum"));
metrics.add(new Metric("serverActiveThreads.count"));
metrics.add(new Metric("serverActiveThreads.last"));
metrics.add(new Metric("serverNumOpenConnections.average"));
metrics.add(new Metric("serverNumOpenConnections.max"));
metrics.add(new Metric("serverNumOpenConnections.last"));
metrics.add(new Metric("serverNumConnections.average"));
metrics.add(new Metric("serverNumConnections.max"));
metrics.add(new Metric("serverNumConnections.last"));
{
List<String> suffixes = List.of("sum", "count", "last", "min", "max");
addMetric(metrics, "jdisc.thread_pool.unhandled_exceptions", suffixes);
addMetric(metrics, "jdisc.thread_pool.work_queue.capacity", suffixes);
addMetric(metrics, "jdisc.thread_pool.work_queue.size", suffixes);
}
metrics.add(new Metric("httpapi_latency.max"));
metrics.add(new Metric("httpapi_latency.sum"));
metrics.add(new Metric("httpapi_latency.count"));
metrics.add(new Metric("httpapi_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("httpapi_pending.max"));
metrics.add(new Metric("httpapi_pending.sum"));
metrics.add(new Metric("httpapi_pending.count"));
metrics.add(new Metric("httpapi_pending.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("httpapi_num_operations.rate"));
metrics.add(new Metric("httpapi_num_updates.rate"));
metrics.add(new Metric("httpapi_num_removes.rate"));
metrics.add(new Metric("httpapi_num_puts.rate"));
metrics.add(new Metric("httpapi_succeeded.rate"));
metrics.add(new Metric("httpapi_failed.rate"));
metrics.add(new Metric("httpapi_parse_error.rate"));
metrics.add(new Metric("mem.heap.total.average"));
metrics.add(new Metric("mem.heap.free.average"));
metrics.add(new Metric("mem.heap.used.average"));
metrics.add(new Metric("mem.heap.used.max"));
metrics.add(new Metric("jdisc.memory_mappings.max"));
metrics.add(new Metric("jdisc.open_file_descriptors.max"));
metrics.add(new Metric("jdisc.gc.count.average"));
metrics.add(new Metric("jdisc.gc.count.max"));
metrics.add(new Metric("jdisc.gc.count.last"));
metrics.add(new Metric("jdisc.gc.ms.average"));
metrics.add(new Metric("jdisc.gc.ms.max"));
metrics.add(new Metric("jdisc.gc.ms.last"));
metrics.add(new Metric("jdisc.deactivated_containers.total.last"));
metrics.add(new Metric("jdisc.deactivated_containers.with_retained_refs.last"));
metrics.add(new Metric("athenz-tenant-cert.expiry.seconds.last"));
metrics.add(new Metric("container-iam-role.expiry.seconds"));
metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate"));
addMetric(metrics, "jdisc.http.request.requests_per_connection", List.of("sum", "count", "min", "max", "average"));
metrics.add(new Metric("http.status.1xx.rate"));
metrics.add(new Metric("http.status.2xx.rate"));
metrics.add(new Metric("http.status.3xx.rate"));
metrics.add(new Metric("http.status.4xx.rate"));
metrics.add(new Metric("http.status.5xx.rate"));
metrics.add(new Metric("http.status.401.rate"));
metrics.add(new Metric("http.status.403.rate"));
metrics.add(new Metric("jdisc.http.request.uri_length.max"));
metrics.add(new Metric("jdisc.http.request.uri_length.sum"));
metrics.add(new Metric("jdisc.http.request.uri_length.count"));
metrics.add(new Metric("jdisc.http.request.uri_length.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("jdisc.http.request.content_size.max"));
metrics.add(new Metric("jdisc.http.request.content_size.sum"));
metrics.add(new Metric("jdisc.http.request.content_size.count"));
metrics.add(new Metric("jdisc.http.request.content_size.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.missing_client_cert.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.expired_client_cert.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.invalid_client_cert.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.incompatible_protocols.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.incompatible_ciphers.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.unknown.rate"));
metrics.add(new Metric("jdisc.http.handler.unhandled_exceptions.rate"));
addMetric(metrics, "jdisc.http.jetty.threadpool.thread.max", List.of("last"));
addMetric(metrics, "jdisc.http.jetty.threadpool.thread.reserved", List.of("last"));
addMetric(metrics, "jdisc.http.jetty.threadpool.thread.busy", List.of("sum", "count", "min", "max"));
addMetric(metrics, "jdisc.http.jetty.threadpool.thread.total", List.of("sum", "count", "min", "max"));
addMetric(metrics, "jdisc.http.jetty.threadpool.queue.size", List.of("sum", "count", "min", "max"));
addMetric(metrics, "jdisc.http.filtering.request.handled", List.of("rate"));
addMetric(metrics, "jdisc.http.filtering.request.unhandled", List.of("rate"));
addMetric(metrics, "jdisc.http.filtering.response.handled", List.of("rate"));
addMetric(metrics, "jdisc.http.filtering.response.unhandled", List.of("rate"));
addMetric(metrics, "jdisc.application.failed_component_graphs", List.of("rate"));
addMetric(metrics, "jdisc.http.filter.rule.blocked_requests", List.of("rate"));
addMetric(metrics, "jdisc.http.filter.rule.allowed_requests", List.of("rate"));
return metrics;
}
private static Set<Metric> getClusterControllerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("cluster-controller.down.count.last"));
metrics.add(new Metric("cluster-controller.initializing.count.last"));
metrics.add(new Metric("cluster-controller.maintenance.count.last"));
metrics.add(new Metric("cluster-controller.retired.count.last"));
metrics.add(new Metric("cluster-controller.stopping.count.last"));
metrics.add(new Metric("cluster-controller.up.count.last"));
metrics.add(new Metric("cluster-controller.cluster-state-change.count"));
metrics.add(new Metric("cluster-controller.busy-tick-time-ms.last"));
metrics.add(new Metric("cluster-controller.busy-tick-time-ms.max"));
metrics.add(new Metric("cluster-controller.busy-tick-time-ms.sum"));
metrics.add(new Metric("cluster-controller.busy-tick-time-ms.count"));
metrics.add(new Metric("cluster-controller.idle-tick-time-ms.last"));
metrics.add(new Metric("cluster-controller.idle-tick-time-ms.max"));
metrics.add(new Metric("cluster-controller.idle-tick-time-ms.sum"));
metrics.add(new Metric("cluster-controller.idle-tick-time-ms.count"));
metrics.add(new Metric("cluster-controller.work-ms.last"));
metrics.add(new Metric("cluster-controller.work-ms.sum"));
metrics.add(new Metric("cluster-controller.work-ms.count"));
metrics.add(new Metric("cluster-controller.is-master.last"));
metrics.add(new Metric("cluster-controller.remote-task-queue.size.last"));
// TODO(hakonhall): Update this name once persistent "count" metrics has been implemented.
// DO NOT RELY ON THIS METRIC YET.
metrics.add(new Metric("cluster-controller.node-event.count"));
metrics.add(new Metric("cluster-controller.resource_usage.nodes_above_limit.last"));
metrics.add(new Metric("cluster-controller.resource_usage.nodes_above_limit.max"));
metrics.add(new Metric("cluster-controller.resource_usage.max_memory_utilization.last"));
metrics.add(new Metric("cluster-controller.resource_usage.max_memory_utilization.max"));
metrics.add(new Metric("cluster-controller.resource_usage.max_disk_utilization.last"));
metrics.add(new Metric("cluster-controller.resource_usage.max_disk_utilization.max"));
metrics.add(new Metric("cluster-controller.resource_usage.disk_limit.last"));
metrics.add(new Metric("cluster-controller.resource_usage.memory_limit.last"));
metrics.add(new Metric("reindexing.progress.last"));
return metrics;
}
private static Set<Metric> getDocprocMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
// per chain
metrics.add(new Metric("documents_processed.rate"));
return metrics;
}
private static Set<Metric> getQrserverMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("peak_qps.max"));
metrics.add(new Metric("search_connections.max"));
metrics.add(new Metric("search_connections.sum"));
metrics.add(new Metric("search_connections.count"));
metrics.add(new Metric("search_connections.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("feed.latency.max"));
metrics.add(new Metric("feed.latency.sum"));
metrics.add(new Metric("feed.latency.count"));
metrics.add(new Metric("feed.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("feed.http-requests.count"));
metrics.add(new Metric("feed.http-requests.rate"));
metrics.add(new Metric("queries.rate"));
metrics.add(new Metric("query_container_latency.max"));
metrics.add(new Metric("query_container_latency.sum"));
metrics.add(new Metric("query_container_latency.count"));
metrics.add(new Metric("query_container_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("query_latency.max"));
metrics.add(new Metric("query_latency.sum"));
metrics.add(new Metric("query_latency.count"));
metrics.add(new Metric("query_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("query_latency.95percentile"));
metrics.add(new Metric("query_latency.99percentile"));
metrics.add(new Metric("failed_queries.rate"));
metrics.add(new Metric("degraded_queries.rate"));
metrics.add(new Metric("hits_per_query.max"));
metrics.add(new Metric("hits_per_query.sum"));
metrics.add(new Metric("hits_per_query.count"));
metrics.add(new Metric("hits_per_query.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("hits_per_query.95percentile"));
metrics.add(new Metric("hits_per_query.99percentile"));
metrics.add(new Metric("query_hit_offset.max"));
metrics.add(new Metric("query_hit_offset.sum"));
metrics.add(new Metric("query_hit_offset.count"));
metrics.add(new Metric("documents_covered.count"));
metrics.add(new Metric("documents_total.count"));
metrics.add(new Metric("dispatch_internal.rate"));
metrics.add(new Metric("dispatch_fdispatch.rate"));
addMetric(metrics, "jdisc.render.latency", Set.of("min", "max", "count", "sum", "last", "average"));
metrics.add(new Metric("totalhits_per_query.max"));
metrics.add(new Metric("totalhits_per_query.sum"));
metrics.add(new Metric("totalhits_per_query.count"));
metrics.add(new Metric("totalhits_per_query.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("totalhits_per_query.95percentile"));
metrics.add(new Metric("totalhits_per_query.99percentile"));
metrics.add(new Metric("empty_results.rate"));
metrics.add(new Metric("requestsOverQuota.rate"));
metrics.add(new Metric("requestsOverQuota.count"));
metrics.add(new Metric("relevance.at_1.sum"));
metrics.add(new Metric("relevance.at_1.count"));
metrics.add(new Metric("relevance.at_1.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("relevance.at_3.sum"));
metrics.add(new Metric("relevance.at_3.count"));
metrics.add(new Metric("relevance.at_3.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("relevance.at_10.sum"));
metrics.add(new Metric("relevance.at_10.count"));
metrics.add(new Metric("relevance.at_10.average")); // TODO: Remove in Vespa 8
// Errors from qrserver
metrics.add(new Metric("error.timeout.rate"));
metrics.add(new Metric("error.backends_oos.rate"));
metrics.add(new Metric("error.plugin_failure.rate"));
metrics.add(new Metric("error.backend_communication_error.rate"));
metrics.add(new Metric("error.empty_document_summaries.rate"));
metrics.add(new Metric("error.invalid_query_parameter.rate"));
metrics.add(new Metric("error.internal_server_error.rate"));
metrics.add(new Metric("error.misconfigured_server.rate"));
metrics.add(new Metric("error.invalid_query_transformation.rate"));
metrics.add(new Metric("error.result_with_errors.rate"));
metrics.add(new Metric("error.unspecified.rate"));
metrics.add(new Metric("error.unhandled_exception.rate"));
return metrics;
}
private static void addSearchNodeExecutorMetrics(Set<Metric> metrics, String prefix) {
metrics.add(new Metric(prefix + ".queuesize.max"));
metrics.add(new Metric(prefix + ".queuesize.sum"));
metrics.add(new Metric(prefix + ".queuesize.count"));
metrics.add(new Metric(prefix + ".maxpending.last")); // TODO: Remove in Vespa 8
metrics.add(new Metric(prefix + ".accepted.rate"));
}
private static Set<Metric> getSearchNodeMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("content.proton.documentdb.documents.total.last"));
metrics.add(new Metric("content.proton.documentdb.documents.ready.last"));
metrics.add(new Metric("content.proton.documentdb.documents.active.last"));
metrics.add(new Metric("content.proton.documentdb.documents.removed.last"));
metrics.add(new Metric("content.proton.documentdb.index.docs_in_memory.last"));
metrics.add(new Metric("content.proton.documentdb.disk_usage.last"));
metrics.add(new Metric("content.proton.documentdb.memory_usage.allocated_bytes.max"));
metrics.add(new Metric("content.proton.documentdb.heart_beat_age.last"));
metrics.add(new Metric("content.proton.transport.query.count.rate"));
metrics.add(new Metric("content.proton.docsum.docs.rate"));
metrics.add(new Metric("content.proton.docsum.latency.max"));
metrics.add(new Metric("content.proton.docsum.latency.sum"));
metrics.add(new Metric("content.proton.docsum.latency.count"));
metrics.add(new Metric("content.proton.docsum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.transport.query.latency.max"));
metrics.add(new Metric("content.proton.transport.query.latency.sum"));
metrics.add(new Metric("content.proton.transport.query.latency.count"));
metrics.add(new Metric("content.proton.transport.query.latency.average")); // TODO: Remove in Vespa 8
// Search protocol
metrics.add(new Metric("content.proton.search_protocol.query.latency.max"));
metrics.add(new Metric("content.proton.search_protocol.query.latency.sum"));
metrics.add(new Metric("content.proton.search_protocol.query.latency.count"));
metrics.add(new Metric("content.proton.search_protocol.query.request_size.max"));
metrics.add(new Metric("content.proton.search_protocol.query.request_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.query.request_size.count"));
metrics.add(new Metric("content.proton.search_protocol.query.reply_size.max"));
metrics.add(new Metric("content.proton.search_protocol.query.reply_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.query.reply_size.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.latency.max"));
metrics.add(new Metric("content.proton.search_protocol.docsum.latency.sum"));
metrics.add(new Metric("content.proton.search_protocol.docsum.latency.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.max"));
metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.max"));
metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.requested_documents.count"));
// Executors shared between all document dbs
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.proton");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.flush");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.match");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.docsum");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.shared");
addSearchNodeExecutorMetrics(metrics, "content.proton.executor.warmup");
// jobs
metrics.add(new Metric("content.proton.documentdb.job.total.average"));
metrics.add(new Metric("content.proton.documentdb.job.attribute_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.memory_index_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.disk_index_fusion.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.bucket_move.average"));
metrics.add(new Metric("content.proton.documentdb.job.lid_space_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.removed_documents_prune.average"));
// Threading service (per document db)
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.master");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.index");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.summary");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.index_field_inverter");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.index_field_writer");
addSearchNodeExecutorMetrics(metrics, "content.proton.documentdb.threading_service.attribute_field_writer");
// lid space
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.highest_used_lid.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.highest_used_lid.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.highest_used_lid.last"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.used_lids.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.used_lids.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.used_lids.last"));
// bucket move
metrics.add(new Metric("content.proton.documentdb.bucket_move.buckets_pending.last"));
// resource usage
metrics.add(new Metric("content.proton.resource_usage.disk.average"));
metrics.add(new Metric("content.proton.resource_usage.disk_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.transient_memory.average"));
metrics.add(new Metric("content.proton.resource_usage.transient_disk.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_mappings.max"));
metrics.add(new Metric("content.proton.resource_usage.open_file_descriptors.max"));
metrics.add(new Metric("content.proton.resource_usage.feeding_blocked.max"));
metrics.add(new Metric("content.proton.resource_usage.malloc_arena.max"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.address_space.max"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.last")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.max"));
// transaction log
metrics.add(new Metric("content.proton.transactionlog.entries.average"));
metrics.add(new Metric("content.proton.transactionlog.disk_usage.average"));
metrics.add(new Metric("content.proton.transactionlog.replay_time.last"));
// document store
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.onhold_bytes.average"));
// document store cache
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.memory_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.hit_rate.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.lookups.rate"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.invalidations.rate"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.memory_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.hit_rate.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.lookups.rate"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.invalidations.rate"));
// attribute
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.onhold_bytes.average"));
// index
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.onhold_bytes.average"));
// matching
metrics.add(new Metric("content.proton.documentdb.matching.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.soft_doomed_queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.max"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.count"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.max")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.sum")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.count")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.max"));
metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.count"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.rate")); // TODO: Consider remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.max"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doomed_queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.min"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.max")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.sum")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.count")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.rate")); // TODO: Consider remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.limited_queries.rate"));
return metrics;
}
private static Set<Metric> getStorageMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
// TODO: For the purpose of this file and likely elsewhere, all but the last aggregate specifier,
// TODO: such as 'average' and 'sum' in the metric names below are just confusing and can be mentally
// TODO: disregarded when considering metric names. Consider cleaning up for Vespa 8.
// TODO Vespa 8 all metrics with .sum in the name should have that removed.
metrics.add(new Metric("vds.datastored.alldisks.docs.average"));
metrics.add(new Metric("vds.datastored.alldisks.bytes.average"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.max"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.sum"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.count"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.max"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.sum"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.count"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergemetadatareadlatency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergemetadatareadlatency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergemetadatareadlatency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatareadlatency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatareadlatency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatareadlatency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatawritelatency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatawritelatency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.mergedatawritelatency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put_latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put_latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put_latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_latency.count"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.max"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.sum"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.count"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.rate"));
metrics.add(new Metric("vds.visitor.allthreads.created.sum.rate"));
metrics.add(new Metric("vds.visitor.allthreads.failed.sum.rate"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.test_and_set_failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.request_size.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.request_size.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.request_size.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.test_and_set_failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.request_size.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.request_size.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.request_size.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.request_size.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.request_size.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.request_size.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.test_and_set_failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.request_size.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.request_size.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.request_size.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.splitbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.joinbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.setbucketstates.count.rate"));
//Distributor
metrics.add(new Metric("vds.idealstate.buckets_rechecking.average"));
metrics.add(new Metric("vds.idealstate.idealstate_diff.average"));
metrics.add(new Metric("vds.idealstate.buckets_toofewcopies.average"));
metrics.add(new Metric("vds.idealstate.buckets_toomanycopies.average"));
metrics.add(new Metric("vds.idealstate.buckets.average"));
metrics.add(new Metric("vds.idealstate.buckets_notrusted.average"));
metrics.add(new Metric("vds.idealstate.bucket_replicas_moving_out.average"));
metrics.add(new Metric("vds.idealstate.bucket_replicas_copying_out.average"));
metrics.add(new Metric("vds.idealstate.bucket_replicas_copying_in.average"));
metrics.add(new Metric("vds.idealstate.bucket_replicas_syncing.average"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.delete_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.split_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.join_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.garbage_collection.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.garbage_collection.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.garbage_collection.pending.average"));
metrics.add(new Metric("vds.idealstate.garbage_collection.documents_removed.count"));
metrics.add(new Metric("vds.idealstate.garbage_collection.documents_removed.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.max"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.count"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.puts.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.test_and_set_failed.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.concurrent_mutations.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.max"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.count"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.removes.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.test_and_set_failed.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.concurrent_mutations.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.max"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.count"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.updates.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.test_and_set_failed.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.concurrent_mutations.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.diverging_timestamp_updates.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.max"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.count"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.gets.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.max"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.count"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.visitor.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.docsstored.average"));
metrics.add(new Metric("vds.distributor.bytesstored.average"));
metrics.add(new Metric("vds.bouncer.clock_skew_aborts.count"));
metrics.add(new Metric("vds.mergethrottler.averagequeuewaitingtime.max"));
metrics.add(new Metric("vds.mergethrottler.averagequeuewaitingtime.sum"));
metrics.add(new Metric("vds.mergethrottler.averagequeuewaitingtime.count"));
metrics.add(new Metric("vds.mergethrottler.queuesize.max"));
metrics.add(new Metric("vds.mergethrottler.queuesize.sum"));
metrics.add(new Metric("vds.mergethrottler.queuesize.count"));
metrics.add(new Metric("vds.mergethrottler.bounced_due_to_back_pressure.rate"));
metrics.add(new Metric("vds.mergethrottler.locallyexecutedmerges.ok.rate"));
metrics.add(new Metric("vds.mergethrottler.mergechains.ok.rate"));
metrics.add(new Metric("vds.mergethrottler.mergechains.failures.busy.rate"));
metrics.add(new Metric("vds.mergethrottler.mergechains.failures.total.rate"));
return metrics;
}
private static void addMetric(Set<Metric> metrics, String metricName, Iterable<String> aggregateSuffices) {
for (String suffix : aggregateSuffices) {
metrics.add(new Metric(metricName + "." + suffix));
}
}
}
| Expose more merge bucket related metrics.
| config-model/src/main/java/com/yahoo/vespa/model/admin/monitoring/VespaMetricSet.java | Expose more merge bucket related metrics. |
|
Java | apache-2.0 | 5f778296776fb255da963af73dc168504f62edf5 | 0 | boalang/compiler,boalang/compiler,boalang/compiler,boalang/compiler,boalang/compiler | /*
* Copyright 2016, Hridesh Rajan, Robert Dyer, Hoan Nguyen
* Iowa State University of Science and Technology
* and Bowling Green State University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boa.datagen.scm;
import java.io.*;
import java.util.*;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.Writer;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.dom.*;
import org.eclipse.wst.jsdt.core.JavaScriptCore;
import org.eclipse.wst.jsdt.core.dom.JavaScriptUnit;
import boa.types.Ast.ASTRoot;
import boa.types.Code.Revision;
import boa.types.Diff.ChangedFile;
import boa.types.Diff.ChangedFile.Builder;
import boa.types.Diff.ChangedFile.FileKind;
import boa.types.Shared.ChangeKind;
import boa.types.Shared.Person;
import boa.datagen.util.FileIO;
import boa.datagen.util.JavaScriptErrorCheckVisitor;
import boa.datagen.util.JavaScriptVisitor;
import boa.datagen.util.Java7Visitor;
import boa.datagen.util.Java8Visitor;
import boa.datagen.util.JavaErrorCheckVisitor;
import boa.datagen.util.Properties;
/**
* @author rdyer
*/
public abstract class AbstractCommit {
protected static final boolean debug = false; //util.Properties.getBoolean("debug", main.DefaultProperties.DEBUG);
protected AbstractConnector connector;
protected AbstractCommit(AbstractConnector cnn) {
this.connector = cnn;
}
protected String id = null;
public void setId(final String id) { this.id = id; }
protected String author;
public void setAuthor(final String author) { this.author = author; }
protected String committer;
public void setCommitter(final String committer) { this.committer = committer; }
protected String message;
public void setMessage(final String message) { this.message = message; }
protected Date date;
public void setDate(final Date date) { this.date = date; }
private Map<String, String> changedPaths = new HashMap<String, String>();
public void setChangedPaths(final Map<String, String> changedPaths) { this.changedPaths = changedPaths; }
private Map<String, String> addedPaths = new HashMap<String, String>();
public void setAddedPaths(final Map<String, String> addedPaths) { this.addedPaths = addedPaths; }
private Map<String, String> removedPaths = new HashMap<String, String>();
public void setRemovedPaths(final Map<String, String> removedPaths) { this.removedPaths = removedPaths; }
protected int[] parentIndices;
protected void setParentIndices(final int[] parentList) {
parentIndices = parentList;
}
protected int[] getParentIndices() {
return parentIndices;
}
protected static final ByteArrayOutputStream buffer = new ByteArrayOutputStream(4096);
protected abstract String getFileContents(final String path);
protected abstract Person parsePerson(final String s);
public Revision asProtobuf(final boolean parse, final Writer astWriter, final String revKey, final String keyDelim) {
final Revision.Builder revision = Revision.newBuilder();
revision.setId(id);
final Person author = parsePerson(this.author);
final Person committer = parsePerson(this.committer);
revision.setAuthor(author == null ? committer : author);
revision.setCommitter(committer);
long time = -1;
if (date != null)
time = date.getTime() * 1000;
revision.setCommitDate(time);
if (message != null)
revision.setLog(message);
else
revision.setLog("");
for (final String path : changedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, parse, astWriter, revKey, keyDelim);
fb.setChange(ChangeKind.MODIFIED);
//fb.setKey("");
revision.addFiles(fb.build());
}
for (final String path : addedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, parse, astWriter, revKey, keyDelim);
fb.setChange(ChangeKind.ADDED);
//fb.setKey("");
revision.addFiles(fb.build());
}
for (final String path : removedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, false, null, revKey, keyDelim);
fb.setChange(ChangeKind.DELETED);
//fb.setKey("");
revision.addFiles(fb.build());
}
return revision.build();
}
@SuppressWarnings("deprecation")
private Builder processChangeFile(String path, boolean parse, Writer astWriter, String revKey, String keyDelim) {
final ChangedFile.Builder fb = ChangedFile.newBuilder();
fb.setName(path);
fb.setKind(FileKind.OTHER);
final String lowerPath = path.toLowerCase();
if (lowerPath.endsWith(".txt"))
fb.setKind(FileKind.TEXT);
else if (lowerPath.endsWith(".xml"))
fb.setKind(FileKind.XML);
else if (lowerPath.endsWith(".jar") || lowerPath.endsWith(".class"))
fb.setKind(FileKind.BINARY);
else if (lowerPath.endsWith(".java") && parse) {
final String content = getFileContents(path);
fb.setKind(FileKind.SOURCE_JAVA_JLS2);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_4, AST.JLS2, false, astWriter, revKey + keyDelim + path)) {
if (debug)
System.err.println("Found JLS2 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS3);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_5, AST.JLS3, false, astWriter, revKey + keyDelim + path)) {
if (debug)
System.err.println("Found JLS3 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS4);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_7, AST.JLS4, false, astWriter, revKey + keyDelim + path)) {
if (debug)
System.err.println("Found JLS4 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS8);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_8, AST.JLS8, false, astWriter, revKey + keyDelim + path)) {
if (debug)
System.err.println("Found JLS8 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_ERROR);
try {
astWriter.append(new Text(revKey + keyDelim + fb.getName()), new BytesWritable(ASTRoot.newBuilder().build().toByteArray()));
} catch (IOException e) {
e.printStackTrace();
}
} else
if (debug)
System.err.println("Accepted JLS8: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS4: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS3: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS2: revision " + id + ": file " + path);
}
fb.setKey(revKey);
return fb;
}
private boolean parseJavaScriptFile(final String path,
final ChangedFile.Builder fb, final String content,
final String compliance, final int astLevel,
final boolean storeOnError, Writer astWriter, String key) {
try {
//System.out.println("parsing=" + (++count) + "\t" + path);
final org.eclipse.wst.jsdt.core.dom.ASTParser parser = org.eclipse.wst.jsdt.core.dom.ASTParser
.newParser(astLevel);
parser.setKind(ASTParser.K_COMPILATION_UNIT);
parser.setResolveBindings(true);
parser.setSource(content.toCharArray());
final Map options = JavaCore.getOptions();
JavaCore.setComplianceOptions(compliance, options);
parser.setCompilerOptions(options);
JavaScriptUnit cu;
try{
cu = (JavaScriptUnit) parser.createAST(null);
}catch(java.lang.IllegalArgumentException ex){
return false;
}
final JavaScriptErrorCheckVisitor errorCheck = new JavaScriptErrorCheckVisitor();
cu.accept(errorCheck);
if (!errorCheck.hasError || storeOnError) {
final ASTRoot.Builder ast = ASTRoot.newBuilder();
// final CommentsRoot.Builder comments =
// CommentsRoot.newBuilder();
final JavaScriptVisitor visitor = new JavaScriptVisitor(content);
try {
ast.addNamespaces(visitor.getNamespaces(cu));
// for (final String s : visitor.getImports())
// ast.addImports(s);
/*
* for (final Comment c : visitor.getComments())
* comments.addComments(c);
*/
} catch (final UnsupportedOperationException e) {
return false;
} catch (final Exception e) {
if (debug)
System.err.println("Error visiting: " + path);
//e.printStackTrace();
return false;
}
if (astWriter != null) {
try {
// System.out.println("writing=" + count + "\t" + path);
astWriter.append(new Text(key), new BytesWritable(ast
.build().toByteArray()));
} catch (IOException e) {
e.printStackTrace();
}
} else
fb.setAst(ast);
// fb.setComments(comments);
}
return !errorCheck.hasError;
} catch (final Exception e) {
e.printStackTrace();
return false;
}
}
public Revision asProtobuf(final boolean parse) {
final Revision.Builder revision = Revision.newBuilder();
revision.setId(id);
final Person author = parsePerson(this.author);
final Person committer = parsePerson(this.committer);
revision.setAuthor(author == null ? committer : author);
revision.setCommitter(committer);
long time = -1;
if (date != null)
time = date.getTime() * 1000;
revision.setCommitDate(time);
if (message != null)
revision.setLog(message);
else
revision.setLog("");
for (final String path : changedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, parse);
fb.setChange(ChangeKind.MODIFIED);
fb.setKey("");
revision.addFiles(fb.build());
}
for (final String path : addedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, parse);
fb.setChange(ChangeKind.ADDED);
fb.setKey("");
revision.addFiles(fb.build());
}
for (final String path : removedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, false);
fb.setChange(ChangeKind.DELETED);
fb.setKey("");
revision.addFiles(fb.build());
}
return revision.build();
}
public Map<String,String> getLOC() {
final Map<String,String> l = new HashMap<String,String>();
for (final String path : changedPaths.keySet())
l.put(path, processLOC(path));
for (final String path : addedPaths.keySet())
l.put(path, processLOC(path));
return l;
}
@SuppressWarnings("deprecation")
protected ChangedFile.Builder processChangeFile(final String path, final boolean attemptParse) {
final ChangedFile.Builder fb = ChangedFile.newBuilder();
fb.setName(path);
fb.setKind(FileKind.OTHER);
final String lowerPath = path.toLowerCase();
if (lowerPath.endsWith(".txt"))
fb.setKind(FileKind.TEXT);
else if (lowerPath.endsWith(".xml"))
fb.setKind(FileKind.XML);
else if (lowerPath.endsWith(".jar") || lowerPath.endsWith(".class"))
fb.setKind(FileKind.BINARY);
else if (lowerPath.endsWith(".java") && attemptParse) {
final String content = getFileContents(path);
fb.setKind(FileKind.SOURCE_JAVA_JLS2);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_4, AST.JLS2, false, null, null)) {
if (debug)
System.err.println("Found JLS2 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS3);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_5, AST.JLS3, false, null, null)) {
if (debug)
System.err.println("Found JLS3 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS4);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_7, AST.JLS4, false, null, null)) {
if (debug)
System.err.println("Found JLS4 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS8);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_8, AST.JLS8, false, null, null)) {
if (debug)
System.err.println("Found JLS8 parse error in: revision " + id + ": file " + path);
//fb.setContent(content);
fb.setKind(FileKind.SOURCE_JAVA_ERROR);
} else
if (debug)
System.err.println("Accepted JLS8: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS4: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS3: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS2: revision " + id + ": file " + path);
}
return fb;
}
private boolean parseJavaFile(final String path, final ChangedFile.Builder fb, final String content, final String compliance, final int astLevel, final boolean storeOnError, Writer astWriter, String key) {
try {
final ASTParser parser = ASTParser.newParser(astLevel);
parser.setKind(ASTParser.K_COMPILATION_UNIT);
parser.setResolveBindings(true);
parser.setSource(content.toCharArray());
final Map options = JavaCore.getOptions();
JavaCore.setComplianceOptions(compliance, options);
parser.setCompilerOptions(options);
final CompilationUnit cu = (CompilationUnit) parser.createAST(null);
final JavaErrorCheckVisitor errorCheck = new JavaErrorCheckVisitor();
cu.accept(errorCheck);
if (!errorCheck.hasError || storeOnError) {
final ASTRoot.Builder ast = ASTRoot.newBuilder();
//final CommentsRoot.Builder comments = CommentsRoot.newBuilder();
final Java7Visitor visitor;
if (astLevel == AST.JLS8)
visitor = new Java8Visitor(content, connector.nameIndices);
else
visitor = new Java7Visitor(content, connector.nameIndices);
try {
ast.addNamespaces(visitor.getNamespaces(cu));
for (final String s : visitor.getImports())
ast.addImports(s);
/*for (final Comment c : visitor.getComments())
comments.addComments(c);*/
} catch (final UnsupportedOperationException e) {
return false;
} catch (final Exception e) {
if (debug)
System.err.println("Error visiting: " + path);
e.printStackTrace();
return false;
}
if (astWriter != null) {
try {
astWriter.append(new Text(key), new BytesWritable(ast.build().toByteArray()));
} catch (IOException e) {
e.printStackTrace();
}
}
else
fb.setAst(ast);
//fb.setComments(comments);
}
return !errorCheck.hasError;
} catch (final Exception e) {
e.printStackTrace();
return false;
}
}
protected String processLOC(final String path) {
String loc = "";
final String lowerPath = path.toLowerCase();
if (!(lowerPath.endsWith(".txt") || lowerPath.endsWith(".xml") || lowerPath.endsWith(".java")))
return loc;
final String content = getFileContents(path);
final File dir = new File(new File(System.getProperty("java.io.tmpdir")), UUID.randomUUID().toString());
final File tmpPath = new File(dir, path.substring(0, path.lastIndexOf("/")));
tmpPath.mkdirs();
final File tmpFile = new File(tmpPath, path.substring(path.lastIndexOf("/") + 1));
FileIO.writeFileContents(tmpFile, content);
try {
final Process proc = Runtime.getRuntime().exec(new String[] {"/home/boa/ohcount/bin/ohcount", "-i", tmpFile.getPath()});
final BufferedReader outStream = new BufferedReader(new InputStreamReader(proc.getInputStream()));
String line = null;
while ((line = outStream.readLine()) != null)
loc += line;
outStream.close();
proc.waitFor();
} catch (final IOException e) {
e.printStackTrace();
} catch (final InterruptedException e) {
e.printStackTrace();
}
try {
FileIO.delete(dir);
} catch (final IOException e) {
e.printStackTrace();
}
return loc;
}
}
| src/java/boa/datagen/scm/AbstractCommit.java | /*
* Copyright 2016, Hridesh Rajan, Robert Dyer, Hoan Nguyen
* Iowa State University of Science and Technology
* and Bowling Green State University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boa.datagen.scm;
import java.io.*;
import java.util.*;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.Writer;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.dom.*;
import org.eclipse.wst.jsdt.core.JavaScriptCore;
import org.eclipse.wst.jsdt.core.dom.JavaScriptUnit;
import boa.types.Ast.ASTRoot;
import boa.types.Code.Revision;
import boa.types.Diff.ChangedFile;
import boa.types.Diff.ChangedFile.Builder;
import boa.types.Diff.ChangedFile.FileKind;
import boa.types.Shared.ChangeKind;
import boa.types.Shared.Person;
import boa.datagen.util.FileIO;
import boa.datagen.util.JavaScriptErrorCheckVisitor;
import boa.datagen.util.JavaScriptVisitor;
import boa.datagen.util.Java7Visitor;
import boa.datagen.util.Java8Visitor;
import boa.datagen.util.JavaErrorCheckVisitor;
import boa.datagen.util.Properties;
/**
* @author rdyer
*/
public abstract class AbstractCommit {
protected static final boolean debug = false; //util.Properties.getBoolean("debug", main.DefaultProperties.DEBUG);
protected AbstractConnector connector;
protected AbstractCommit(AbstractConnector cnn) {
this.connector = cnn;
}
protected String id = null;
public void setId(final String id) { this.id = id; }
protected String author;
public void setAuthor(final String author) { this.author = author; }
protected String committer;
public void setCommitter(final String committer) { this.committer = committer; }
protected String message;
public void setMessage(final String message) { this.message = message; }
protected Date date;
public void setDate(final Date date) { this.date = date; }
private Map<String, String> changedPaths = new HashMap<String, String>();
public void setChangedPaths(final Map<String, String> changedPaths) { this.changedPaths = changedPaths; }
private Map<String, String> addedPaths = new HashMap<String, String>();
public void setAddedPaths(final Map<String, String> addedPaths) { this.addedPaths = addedPaths; }
private Map<String, String> removedPaths = new HashMap<String, String>();
public void setRemovedPaths(final Map<String, String> removedPaths) { this.removedPaths = removedPaths; }
protected int[] parentIndices;
protected void setParentIndices(final int[] parentList) {
parentIndices = parentList;
}
protected int[] getParentIndices() {
return parentIndices;
}
protected static final ByteArrayOutputStream buffer = new ByteArrayOutputStream(4096);
protected abstract String getFileContents(final String path);
protected abstract Person parsePerson(final String s);
public Revision asProtobuf(final boolean parse, final Writer astWriter, final String revKey, final String keyDelim) {
final Revision.Builder revision = Revision.newBuilder();
revision.setId(id);
final Person author = parsePerson(this.author);
final Person committer = parsePerson(this.committer);
revision.setAuthor(author == null ? committer : author);
revision.setCommitter(committer);
long time = -1;
if (date != null)
time = date.getTime() * 1000;
revision.setCommitDate(time);
if (message != null)
revision.setLog(message);
else
revision.setLog("");
for (final String path : changedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, parse, astWriter, revKey, keyDelim);
fb.setChange(ChangeKind.MODIFIED);
//fb.setKey("");
revision.addFiles(fb.build());
}
for (final String path : addedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, parse, astWriter, revKey, keyDelim);
fb.setChange(ChangeKind.ADDED);
//fb.setKey("");
revision.addFiles(fb.build());
}
for (final String path : removedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, false, null, revKey, keyDelim);
fb.setChange(ChangeKind.DELETED);
//fb.setKey("");
revision.addFiles(fb.build());
}
return revision.build();
}
private Builder processChangeFile(String path, boolean parse, Writer astWriter, String revKey, String keyDelim) {
final ChangedFile.Builder fb = ChangedFile.newBuilder();
fb.setName(path);
fb.setKind(FileKind.OTHER);
final String lowerPath = path.toLowerCase();
if (lowerPath.endsWith(".txt"))
fb.setKind(FileKind.TEXT);
else if (lowerPath.endsWith(".xml"))
fb.setKind(FileKind.XML);
else if (lowerPath.endsWith(".jar") || lowerPath.endsWith(".class"))
fb.setKind(FileKind.BINARY);
else if (lowerPath.endsWith(".java") && parse) {
final String content = getFileContents(path);
fb.setKind(FileKind.SOURCE_JAVA_JLS2);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_4, AST.JLS2, false, astWriter, revKey + keyDelim + path)) {
if (debug)
System.err.println("Found JLS2 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS3);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_5, AST.JLS3, false, astWriter, revKey + keyDelim + path)) {
if (debug)
System.err.println("Found JLS3 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS4);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_7, AST.JLS4, false, astWriter, revKey + keyDelim + path)) {
if (debug)
System.err.println("Found JLS4 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS8);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_8, AST.JLS8, false, astWriter, revKey + keyDelim + path)) {
if (debug)
System.err.println("Found JLS8 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_ERROR);
try {
astWriter.append(new Text(revKey + keyDelim + fb.getName()), new BytesWritable(ASTRoot.newBuilder().build().toByteArray()));
} catch (IOException e) {
e.printStackTrace();
}
} else
if (debug)
System.err.println("Accepted JLS8: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS4: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS3: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS2: revision " + id + ": file " + path);
}
fb.setKey(revKey);
return fb;
}
private boolean parseJavaScriptFile(final String path,
final ChangedFile.Builder fb, final String content,
final String compliance, final int astLevel,
final boolean storeOnError, Writer astWriter, String key) {
try {
//System.out.println("parsing=" + (++count) + "\t" + path);
final org.eclipse.wst.jsdt.core.dom.ASTParser parser = org.eclipse.wst.jsdt.core.dom.ASTParser
.newParser(astLevel);
parser.setKind(ASTParser.K_COMPILATION_UNIT);
parser.setResolveBindings(true);
parser.setSource(content.toCharArray());
final Map options = JavaCore.getOptions();
JavaCore.setComplianceOptions(compliance, options);
parser.setCompilerOptions(options);
JavaScriptUnit cu;
try{
cu = (JavaScriptUnit) parser.createAST(null);
}catch(java.lang.IllegalArgumentException ex){
return false;
}
final JavaScriptErrorCheckVisitor errorCheck = new JavaScriptErrorCheckVisitor();
cu.accept(errorCheck);
if (!errorCheck.hasError || storeOnError) {
final ASTRoot.Builder ast = ASTRoot.newBuilder();
// final CommentsRoot.Builder comments =
// CommentsRoot.newBuilder();
final JavaScriptVisitor visitor = new JavaScriptVisitor(content);
try {
ast.addNamespaces(visitor.getNamespaces(cu));
// for (final String s : visitor.getImports())
// ast.addImports(s);
/*
* for (final Comment c : visitor.getComments())
* comments.addComments(c);
*/
} catch (final UnsupportedOperationException e) {
return false;
} catch (final Exception e) {
if (debug)
System.err.println("Error visiting: " + path);
//e.printStackTrace();
return false;
}
if (astWriter != null) {
try {
// System.out.println("writing=" + count + "\t" + path);
astWriter.append(new Text(key), new BytesWritable(ast
.build().toByteArray()));
} catch (IOException e) {
e.printStackTrace();
}
} else
fb.setAst(ast);
// fb.setComments(comments);
}
return !errorCheck.hasError;
} catch (final Exception e) {
e.printStackTrace();
return false;
}
}
public Revision asProtobuf(final boolean parse) {
final Revision.Builder revision = Revision.newBuilder();
revision.setId(id);
final Person author = parsePerson(this.author);
final Person committer = parsePerson(this.committer);
revision.setAuthor(author == null ? committer : author);
revision.setCommitter(committer);
long time = -1;
if (date != null)
time = date.getTime() * 1000;
revision.setCommitDate(time);
if (message != null)
revision.setLog(message);
else
revision.setLog("");
for (final String path : changedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, parse);
fb.setChange(ChangeKind.MODIFIED);
fb.setKey("");
revision.addFiles(fb.build());
}
for (final String path : addedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, parse);
fb.setChange(ChangeKind.ADDED);
fb.setKey("");
revision.addFiles(fb.build());
}
for (final String path : removedPaths.keySet()) {
final ChangedFile.Builder fb = processChangeFile(path, false);
fb.setChange(ChangeKind.DELETED);
fb.setKey("");
revision.addFiles(fb.build());
}
return revision.build();
}
public Map<String,String> getLOC() {
final Map<String,String> l = new HashMap<String,String>();
for (final String path : changedPaths.keySet())
l.put(path, processLOC(path));
for (final String path : addedPaths.keySet())
l.put(path, processLOC(path));
return l;
}
protected ChangedFile.Builder processChangeFile(final String path, final boolean attemptParse) {
final ChangedFile.Builder fb = ChangedFile.newBuilder();
fb.setName(path);
fb.setKind(FileKind.OTHER);
final String lowerPath = path.toLowerCase();
if (lowerPath.endsWith(".txt"))
fb.setKind(FileKind.TEXT);
else if (lowerPath.endsWith(".xml"))
fb.setKind(FileKind.XML);
else if (lowerPath.endsWith(".jar") || lowerPath.endsWith(".class"))
fb.setKind(FileKind.BINARY);
else if (lowerPath.endsWith(".java") && attemptParse) {
final String content = getFileContents(path);
fb.setKind(FileKind.SOURCE_JAVA_JLS2);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_4, AST.JLS2, false, null, null)) {
if (debug)
System.err.println("Found JLS2 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS3);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_5, AST.JLS3, false, null, null)) {
if (debug)
System.err.println("Found JLS3 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS4);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_7, AST.JLS4, false, null, null)) {
if (debug)
System.err.println("Found JLS4 parse error in: revision " + id + ": file " + path);
fb.setKind(FileKind.SOURCE_JAVA_JLS8);
if (!parseJavaFile(path, fb, content, JavaCore.VERSION_1_8, AST.JLS8, false, null, null)) {
if (debug)
System.err.println("Found JLS8 parse error in: revision " + id + ": file " + path);
//fb.setContent(content);
fb.setKind(FileKind.SOURCE_JAVA_ERROR);
} else
if (debug)
System.err.println("Accepted JLS8: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS4: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS3: revision " + id + ": file " + path);
} else
if (debug)
System.err.println("Accepted JLS2: revision " + id + ": file " + path);
}
return fb;
}
private boolean parseJavaFile(final String path, final ChangedFile.Builder fb, final String content, final String compliance, final int astLevel, final boolean storeOnError, Writer astWriter, String key) {
try {
final ASTParser parser = ASTParser.newParser(astLevel);
parser.setKind(ASTParser.K_COMPILATION_UNIT);
parser.setResolveBindings(true);
parser.setSource(content.toCharArray());
final Map options = JavaCore.getOptions();
JavaCore.setComplianceOptions(compliance, options);
parser.setCompilerOptions(options);
final CompilationUnit cu = (CompilationUnit) parser.createAST(null);
final JavaErrorCheckVisitor errorCheck = new JavaErrorCheckVisitor();
cu.accept(errorCheck);
if (!errorCheck.hasError || storeOnError) {
final ASTRoot.Builder ast = ASTRoot.newBuilder();
//final CommentsRoot.Builder comments = CommentsRoot.newBuilder();
final Java7Visitor visitor;
if (astLevel == AST.JLS8)
visitor = new Java8Visitor(content, connector.nameIndices);
else
visitor = new Java7Visitor(content, connector.nameIndices);
try {
ast.addNamespaces(visitor.getNamespaces(cu));
for (final String s : visitor.getImports())
ast.addImports(s);
/*for (final Comment c : visitor.getComments())
comments.addComments(c);*/
} catch (final UnsupportedOperationException e) {
return false;
} catch (final Exception e) {
if (debug)
System.err.println("Error visiting: " + path);
e.printStackTrace();
return false;
}
if (astWriter != null) {
try {
astWriter.append(new Text(key), new BytesWritable(ast.build().toByteArray()));
} catch (IOException e) {
e.printStackTrace();
}
}
else
fb.setAst(ast);
//fb.setComments(comments);
}
return !errorCheck.hasError;
} catch (final Exception e) {
e.printStackTrace();
return false;
}
}
protected String processLOC(final String path) {
String loc = "";
final String lowerPath = path.toLowerCase();
if (!(lowerPath.endsWith(".txt") || lowerPath.endsWith(".xml") || lowerPath.endsWith(".java")))
return loc;
final String content = getFileContents(path);
final File dir = new File(new File(System.getProperty("java.io.tmpdir")), UUID.randomUUID().toString());
final File tmpPath = new File(dir, path.substring(0, path.lastIndexOf("/")));
tmpPath.mkdirs();
final File tmpFile = new File(tmpPath, path.substring(path.lastIndexOf("/") + 1));
FileIO.writeFileContents(tmpFile, content);
try {
final Process proc = Runtime.getRuntime().exec(new String[] {"/home/boa/ohcount/bin/ohcount", "-i", tmpFile.getPath()});
final BufferedReader outStream = new BufferedReader(new InputStreamReader(proc.getInputStream()));
String line = null;
while ((line = outStream.readLine()) != null)
loc += line;
outStream.close();
proc.waitFor();
} catch (final IOException e) {
e.printStackTrace();
} catch (final InterruptedException e) {
e.printStackTrace();
}
try {
FileIO.delete(dir);
} catch (final IOException e) {
e.printStackTrace();
}
return loc;
}
}
| suppress deprecation warnings on the AST flags
| src/java/boa/datagen/scm/AbstractCommit.java | suppress deprecation warnings on the AST flags |
|
Java | apache-2.0 | 7d2330ac36355b8ae08b9c21ef05cab8c2fca2d1 | 0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.idea.devkit.testAssistant;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.execution.Location;
import com.intellij.execution.junit.JUnitUtil;
import com.intellij.execution.junit2.PsiMemberParameterizedLocation;
import com.intellij.execution.testframework.TestTreeViewAction;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.testFramework.Parameterized;
import com.intellij.ui.awt.RelativePoint;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.devkit.DevKitBundle;
import org.jetbrains.uast.*;
import java.util.Collections;
import java.util.List;
/**
* @author yole
*/
public class NavigateToTestDataAction extends AnAction implements TestTreeViewAction {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
final DataContext dataContext = e.getDataContext();
final Project project = e.getProject();
if (project == null) return;
final Editor editor = e.getData(CommonDataKeys.EDITOR);
final JBPopupFactory popupFactory = JBPopupFactory.getInstance();
final RelativePoint point = editor != null ? popupFactory.guessBestPopupLocation(editor) :
popupFactory.guessBestPopupLocation(dataContext);
List<TestDataFile> fileNames = findTestDataFiles(dataContext, project, true);
if (fileNames.isEmpty()) {
Notification notification = new Notification(
"testdata",
"Found no test data files",
"Cannot find test data files for class",
NotificationType.INFORMATION);
Notifications.Bus.notify(notification, project);
} else {
TestDataNavigationHandler.navigate(point, fileNames, project);
}
}
@NotNull
static List<TestDataFile> findTestDataFiles(@NotNull DataContext dataContext, @NotNull Project project, boolean shouldGuess) {
return ProgressManager.getInstance().runProcessWithProgressSynchronously(() -> {
List<TestDataFile> fileNames = tryFindTestDataFiles(dataContext);
if (fileNames.isEmpty() && shouldGuess) {
//noinspection RedundantTypeArguments
return ReadAction.<List<TestDataFile>, RuntimeException>compute(() -> {
PsiMethod method = findTargetMethod(dataContext);
return method == null ? Collections.emptyList() : TestDataGuessByExistingFilesUtil.guessTestDataName(method);
});
}
return fileNames;
}, DevKitBundle.message("testdata.searching"), true, project);
}
@NotNull
private static List<TestDataFile> tryFindTestDataFiles(@NotNull DataContext context) {
final PsiMethod method = ReadAction.compute(() -> findTargetMethod(context));
if (method == null) {
PsiClass parametrizedTestClass = ReadAction.compute(() -> findParametrizedClass(context));
return parametrizedTestClass == null ? Collections.emptyList() : TestDataGuessByTestDiscoveryUtil.collectTestDataByExistingFiles(parametrizedTestClass);
}
final String name = ReadAction.compute(() -> method.getName());
if (name.startsWith("test")) {
String testDataPath = ReadAction.compute(() -> TestDataLineMarkerProvider.getTestDataBasePath(method.getContainingClass()));
final TestDataReferenceCollector collector = new TestDataReferenceCollector(testDataPath, name.substring(4));
return collector.collectTestDataReferences(method);
}
return ReadAction.compute(() -> {
final Location<?> location = Location.DATA_KEY.getData(context);
if (location instanceof PsiMemberParameterizedLocation) {
PsiClass parametrizedTestClass = findParametrizedClass(context);
if (parametrizedTestClass != null) {
String testDataPath = TestDataLineMarkerProvider.getTestDataBasePath(parametrizedTestClass);
String paramSetName = ((PsiMemberParameterizedLocation)location).getParamSetName();
String baseFileName = StringUtil.trimEnd(StringUtil.trimStart(paramSetName, "["), "]");
return TestDataGuessByExistingFilesUtil.suggestTestDataFiles(baseFileName, testDataPath, parametrizedTestClass);
}
}
return Collections.emptyList();
});
}
@Override
public void update(@NotNull AnActionEvent e) {
e.getPresentation().setEnabledAndVisible(findTargetMethod(e.getDataContext()) != null || findParametrizedClass(e.getDataContext()) != null);
}
@Nullable
static PsiClass findParametrizedClass(@NotNull DataContext context) {
PsiElement element = context.getData(CommonDataKeys.PSI_ELEMENT);
UClass uClass = UastContextKt.getUastParentOfType(element, UClass.class);
if (uClass == null) return null;
final UAnnotation annotation = UastContextKt.toUElement(AnnotationUtil.findAnnotationInHierarchy(uClass.getJavaPsi(), Collections.singleton(JUnitUtil.RUN_WITH)), UAnnotation.class);
if (annotation == null) return null;
UExpression value = annotation.findAttributeValue("value");
if (!(value instanceof UClassLiteralExpression)) return null;
UClassLiteralExpression classLiteralExpression = (UClassLiteralExpression)value;
PsiType type = classLiteralExpression.getType();
return type != null && type.equalsToText(Parameterized.class.getName()) ? uClass.getJavaPsi() : null;
}
@Nullable
private static PsiMethod findTargetMethod(@NotNull DataContext context) {
final Location<?> location = Location.DATA_KEY.getData(context);
if (location != null) {
final PsiElement element = location.getPsiElement();
PsiMethod method = PsiTreeUtil.getParentOfType(element, PsiMethod.class, false);
if (method != null) {
return method;
}
}
final Editor editor = CommonDataKeys.EDITOR.getData(context);
final PsiFile file = CommonDataKeys.PSI_FILE.getData(context);
if (file != null && editor != null) {
return UastContextKt.findUElementAt(file, editor.getCaretModel().getOffset(), UMethod.class);
}
return null;
}
}
| plugins/devkit/devkit-core/src/testAssistant/NavigateToTestDataAction.java | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.idea.devkit.testAssistant;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.execution.Location;
import com.intellij.execution.junit.JUnitUtil;
import com.intellij.execution.junit2.PsiMemberParameterizedLocation;
import com.intellij.execution.testframework.TestTreeViewAction;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.testFramework.Parameterized;
import com.intellij.ui.awt.RelativePoint;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.devkit.DevKitBundle;
import org.jetbrains.uast.*;
import java.util.Collections;
import java.util.List;
/**
* @author yole
*/
public class NavigateToTestDataAction extends AnAction implements TestTreeViewAction {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
final DataContext dataContext = e.getDataContext();
final Project project = e.getProject();
if (project == null) return;
final Editor editor = e.getData(CommonDataKeys.EDITOR);
final JBPopupFactory popupFactory = JBPopupFactory.getInstance();
final RelativePoint point = editor != null ? popupFactory.guessBestPopupLocation(editor) :
popupFactory.guessBestPopupLocation(dataContext);
List<TestDataFile> fileNames = findTestDataFiles(dataContext, project, true);
if (fileNames.isEmpty()) {
Notification notification = new Notification(
"testdata",
"Found no test data files",
"Cannot find test data files for class",
NotificationType.INFORMATION);
Notifications.Bus.notify(notification, project);
} else {
TestDataNavigationHandler.navigate(point, fileNames, project);
}
}
@NotNull
static List<TestDataFile> findTestDataFiles(@NotNull DataContext dataContext, @NotNull Project project, boolean shouldGuess) {
return ProgressManager.getInstance().runProcessWithProgressSynchronously(() -> {
List<TestDataFile> fileNames = tryFindTestDataFiles(dataContext);
if (fileNames.isEmpty() && shouldGuess) {
//noinspection RedundantTypeArguments
return ReadAction.<List<TestDataFile>, RuntimeException>compute(() -> {
PsiMethod method = findTargetMethod(dataContext);
return method == null ? Collections.emptyList() : TestDataGuessByExistingFilesUtil.guessTestDataName(method);
});
}
return fileNames;
}, DevKitBundle.message("testdata.searching"), true, project);
}
@NotNull
private static List<TestDataFile> tryFindTestDataFiles(@NotNull DataContext context) {
final PsiMethod method = ReadAction.compute(() -> findTargetMethod(context));
if (method == null) {
PsiClass parametrizedTestClass = ReadAction.compute(() -> findParametrizedClass(context));
return parametrizedTestClass == null ? Collections.emptyList() : TestDataGuessByTestDiscoveryUtil.collectTestDataByExistingFiles(parametrizedTestClass);
}
final String name = ReadAction.compute(() -> method.getName());
if (name.startsWith("test")) {
String testDataPath = ReadAction.compute(() -> TestDataLineMarkerProvider.getTestDataBasePath(method.getContainingClass()));
final TestDataReferenceCollector collector = new TestDataReferenceCollector(testDataPath, name.substring(4));
return collector.collectTestDataReferences(method);
}
return ReadAction.compute(() -> {
final Location<?> location = Location.DATA_KEY.getData(context);
if (location instanceof PsiMemberParameterizedLocation) {
PsiClass parametrizedTestClass = findParametrizedClass(context);
if (parametrizedTestClass != null) {
String testDataPath = TestDataLineMarkerProvider.getTestDataBasePath(parametrizedTestClass);
String paramSetName = ((PsiMemberParameterizedLocation)location).getParamSetName();
String baseFileName = StringUtil.trimEnd(StringUtil.trimStart(paramSetName, "["), "]");
return TestDataGuessByExistingFilesUtil.suggestTestDataFiles(baseFileName, testDataPath, parametrizedTestClass);
}
}
return Collections.emptyList();
});
}
@Override
public void update(@NotNull AnActionEvent e) {
e.getPresentation().setEnabled(findTargetMethod(e.getDataContext()) != null || findParametrizedClass(e.getDataContext()) != null);
}
@Nullable
static PsiClass findParametrizedClass(@NotNull DataContext context) {
PsiElement element = context.getData(CommonDataKeys.PSI_ELEMENT);
UClass uClass = UastContextKt.getUastParentOfType(element, UClass.class);
if (uClass == null) return null;
final UAnnotation annotation = UastContextKt.toUElement(AnnotationUtil.findAnnotationInHierarchy(uClass.getJavaPsi(), Collections.singleton(JUnitUtil.RUN_WITH)), UAnnotation.class);
if (annotation == null) return null;
UExpression value = annotation.findAttributeValue("value");
if (!(value instanceof UClassLiteralExpression)) return null;
UClassLiteralExpression classLiteralExpression = (UClassLiteralExpression)value;
PsiType type = classLiteralExpression.getType();
return type != null && type.equalsToText(Parameterized.class.getName()) ? uClass.getJavaPsi() : null;
}
@Nullable
private static PsiMethod findTargetMethod(@NotNull DataContext context) {
final Location<?> location = Location.DATA_KEY.getData(context);
if (location != null) {
final PsiElement element = location.getPsiElement();
PsiMethod method = PsiTreeUtil.getParentOfType(element, PsiMethod.class, false);
if (method != null) {
return method;
}
}
final Editor editor = CommonDataKeys.EDITOR.getData(context);
final PsiFile file = CommonDataKeys.PSI_FILE.getData(context);
if (file != null && editor != null) {
return UastContextKt.findUElementAt(file, editor.getCaretModel().getOffset(), UMethod.class);
}
return null;
}
}
| tests view: hide "Navigate to Test Data" from popup
anyway, it can be available for IDEA's tests only
GitOrigin-RevId: 9c80cb6d8bfe266e7dd34074fd512a24fb39dc37
| plugins/devkit/devkit-core/src/testAssistant/NavigateToTestDataAction.java | tests view: hide "Navigate to Test Data" from popup |
|
Java | apache-2.0 | 51be288707ea7e82a1428964a75c3f414de68089 | 0 | Ensembl/ensj-healthcheck,Ensembl/ensj-healthcheck,Ensembl/ensj-healthcheck,thomasmaurel/ensj-healthcheck,thomasmaurel/ensj-healthcheck,Ensembl/ensj-healthcheck,thomasmaurel/ensj-healthcheck,thomasmaurel/ensj-healthcheck | /*
* Copyright (C) 2004 EBI, GRL
*
* This library is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License as published by the Free Software
* Foundation; either version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with
* this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place,
* Suite 330, Boston, MA 02111-1307 USA
*/
package org.ensembl.healthcheck.testcase.generic;
import java.sql.Connection;
import org.apache.commons.lang.StringUtils;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.DatabaseType;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase;
/**
* Check for mistakes relating to LRGs
*/
public class LRG extends SingleDatabaseTestCase {
/**
* Creates a new instance of LRG healthcheck
*/
public LRG() {
addToGroup("release");
addToGroup("lrg");
setDescription("Healthcheck for LRGs");
}
/**
* This only applies to core databases.
*/
public void types() {
removeAppliesToType(DatabaseType.OTHERFEATURES);
removeAppliesToType(DatabaseType.VEGA);
removeAppliesToType(DatabaseType.SANGER_VEGA);
removeAppliesToType(DatabaseType.RNASEQ);
}
/**
* Check that all seq_regions on the lrg coordinate system have gene and transcripts associated with them
*
* @param dbre
* The database to use.
* @return Result.
*/
public boolean run(DatabaseRegistryEntry dbre) {
boolean result = true;
Connection con = dbre.getConnection();
// Get all seq_region_ids on the lrg coordinate system
String stmt = new String("SELECT sr.seq_region_id FROM seq_region sr JOIN coord_system cs ON sr.coord_system_id = cs.coord_system_id WHERE cs.name LIKE 'lrg' ORDER BY sr.seq_region_id ASC");
String[] seq_regions = getColumnValues(con, stmt);
if (seq_regions.length == 0) {
logger.finest("No LRG seq_regions found, skipping test");
return true;
}
String idList = StringUtils.join(seq_regions, ",");
// Check that gene annotations exist
// TODO - this SQL may fail if there are a large number of LRGs, IN list might be exceeded
stmt = new String("SELECT g.seq_region_id, COUNT(*) FROM gene g WHERE g.seq_region_id IN (" + idList + ") GROUP BY g.seq_region_id");
int count = (seq_regions.length - getRowCount(con, stmt));
if (count != 0) {
ReportManager.problem(this, con, String.valueOf(count) + " LRG seq_regions do not have any gene annotations");
result = false;
}
// Check that transcript annotations exist
stmt = new String("SELECT t.seq_region_id, COUNT(*) FROM transcript t WHERE t.seq_region_id IN (" + idList + ") GROUP BY t.seq_region_id");
count = (seq_regions.length - getRowCount(con, stmt));
if (count != 0) {
ReportManager.problem(this, con, String.valueOf(count) + " LRG seq_regions do not have any transcript annotations");
result = false;
}
if (result) {
ReportManager.correct(this, con, "LRG healthcheck passed without any problem");
}
return result;
} // run
} // LRG
| src/org/ensembl/healthcheck/testcase/generic/LRG.java | /*
* Copyright (C) 2004 EBI, GRL
*
* This library is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License as published by the Free Software
* Foundation; either version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with
* this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place,
* Suite 330, Boston, MA 02111-1307 USA
*/
package org.ensembl.healthcheck.testcase.generic;
import java.sql.Connection;
import org.apache.commons.lang.StringUtils;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase;
/**
* Check for mistakes relating to LRGs
*/
public class LRG extends SingleDatabaseTestCase {
/**
* Creates a new instance of LRG healthcheck
*/
public LRG() {
addToGroup("release");
addToGroup("lrg");
setDescription("Healthcheck for LRGs");
}
/**
* Check that all seq_regions on the lrg coordinate system have gene and transcripts associated with them
*
* @param dbre
* The database to use.
* @return Result.
*/
public boolean run(DatabaseRegistryEntry dbre) {
boolean result = true;
Connection con = dbre.getConnection();
// Get all seq_region_ids on the lrg coordinate system
String stmt = new String("SELECT sr.seq_region_id FROM seq_region sr JOIN coord_system cs ON sr.coord_system_id = cs.coord_system_id WHERE cs.name LIKE 'lrg' ORDER BY sr.seq_region_id ASC");
String[] seq_regions = getColumnValues(con, stmt);
if (seq_regions.length == 0) {
logger.finest("No LRG seq_regions found, skipping test");
return true;
}
String idList = StringUtils.join(seq_regions, ",");
// Check that gene annotations exist
// TODO - this SQL may fail if there are a large number of LRGs, IN list might be exceeded
stmt = new String("SELECT g.seq_region_id, COUNT(*) FROM gene g WHERE g.seq_region_id IN (" + idList + ") GROUP BY g.seq_region_id");
int count = (seq_regions.length - getRowCount(con, stmt));
if (count != 0) {
ReportManager.problem(this, con, String.valueOf(count) + " LRG seq_regions do not have any gene annotations");
result = false;
}
// Check that transcript annotations exist
stmt = new String("SELECT t.seq_region_id, COUNT(*) FROM transcript t WHERE t.seq_region_id IN (" + idList + ") GROUP BY t.seq_region_id");
count = (seq_regions.length - getRowCount(con, stmt));
if (count != 0) {
ReportManager.problem(this, con, String.valueOf(count) + " LRG seq_regions do not have any transcript annotations");
result = false;
}
if (result) {
ReportManager.correct(this, con, "LRG healthcheck passed without any problem");
}
return result;
} // run
} // LRG
| Don't run on Vega databases.
| src/org/ensembl/healthcheck/testcase/generic/LRG.java | Don't run on Vega databases. |
|
Java | apache-2.0 | 7c8283f6542b1045377e88566d363b4c2c83088b | 0 | androidx/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package android.support.v17.leanback.widget;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.media.AudioManager;
import android.media.SoundPool;
import android.os.Bundle;
import android.os.Handler;
import android.os.SystemClock;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.util.Log;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.ViewGroup;
import android.view.inputmethod.CompletionInfo;
import android.view.inputmethod.EditorInfo;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.widget.ImageView;
import android.view.inputmethod.InputMethodManager;
import android.widget.RelativeLayout;
import android.support.v17.leanback.R;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
/**
* <p>SearchBar is a search widget.</p>
*
* <p>Note: Your application will need to request android.permission.RECORD_AUDIO</p>
*/
public class SearchBar extends RelativeLayout {
private static final String TAG = SearchBar.class.getSimpleName();
private static final boolean DEBUG = false;
private static final float FULL_LEFT_VOLUME = 1.0f;
private static final float FULL_RIGHT_VOLUME = 1.0f;
private static final int DEFAULT_PRIORITY = 1;
private static final int DO_NOT_LOOP = 0;
private static final float DEFAULT_RATE = 1.0f;
/**
* Listener for search query changes
*/
public interface SearchBarListener {
/**
* Method invoked when the search bar detects a change in the query.
*
* @param query The current full query.
*/
public void onSearchQueryChange(String query);
/**
* <p>Method invoked when the search query is submitted.</p>
*
* <p>This method can be called without a preceeding onSearchQueryChange,
* in particular in the case of a voice input.</p>
*
* @param query The query being submitted.
*/
public void onSearchQuerySubmit(String query);
/**
* Method invoked when the IME is being dismissed.
*
* @param query The query set in the search bar at the time the IME is being dismissed.
*/
public void onKeyboardDismiss(String query);
}
private AudioManager.OnAudioFocusChangeListener mAudioFocusChangeListener =
new AudioManager.OnAudioFocusChangeListener() {
@Override
public void onAudioFocusChange(int focusChange) {
stopRecognition();
}
};
private SearchBarListener mSearchBarListener;
private SearchEditText mSearchTextEditor;
private SpeechOrbView mSpeechOrbView;
private ImageView mBadgeView;
private String mSearchQuery;
private String mHint;
private String mTitle;
private Drawable mBadgeDrawable;
private final Handler mHandler = new Handler();
private final InputMethodManager mInputMethodManager;
private boolean mAutoStartRecognition = false;
private Drawable mBarBackground;
private final int mTextColor;
private final int mTextColorSpeechMode;
private final int mTextHintColor;
private final int mTextHintColorSpeechMode;
private int mBackgroundAlpha;
private int mBackgroundSpeechAlpha;
private int mBarHeight;
private SpeechRecognizer mSpeechRecognizer;
private SpeechRecognitionCallback mSpeechRecognitionCallback;
private boolean mListening;
private SoundPool mSoundPool;
private SparseIntArray mSoundMap = new SparseIntArray();
private boolean mRecognizing = false;
private final Context mContext;
private AudioManager mAudioManager;
public SearchBar(Context context) {
this(context, null);
}
public SearchBar(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public SearchBar(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mContext = context;
Resources r = getResources();
LayoutInflater inflater = LayoutInflater.from(getContext());
inflater.inflate(R.layout.lb_search_bar, this, true);
mBarHeight = getResources().getDimensionPixelSize(R.dimen.lb_search_bar_height);
RelativeLayout.LayoutParams params = new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
mBarHeight);
params.addRule(ALIGN_PARENT_TOP, RelativeLayout.TRUE);
setLayoutParams(params);
setBackgroundColor(Color.TRANSPARENT);
setClipChildren(false);
mSearchQuery = "";
mInputMethodManager =
(InputMethodManager)context.getSystemService(Context.INPUT_METHOD_SERVICE);
mTextColorSpeechMode = r.getColor(R.color.lb_search_bar_text_speech_mode);
mTextColor = r.getColor(R.color.lb_search_bar_text);
mBackgroundSpeechAlpha = r.getInteger(R.integer.lb_search_bar_speech_mode_background_alpha);
mBackgroundAlpha = r.getInteger(R.integer.lb_search_bar_text_mode_background_alpha);
mTextHintColorSpeechMode = r.getColor(R.color.lb_search_bar_hint_speech_mode);
mTextHintColor = r.getColor(R.color.lb_search_bar_hint);
mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
RelativeLayout items = (RelativeLayout)findViewById(R.id.lb_search_bar_items);
mBarBackground = items.getBackground();
mSearchTextEditor = (SearchEditText)findViewById(R.id.lb_search_text_editor);
mBadgeView = (ImageView)findViewById(R.id.lb_search_bar_badge);
if (null != mBadgeDrawable) {
mBadgeView.setImageDrawable(mBadgeDrawable);
}
mSearchTextEditor.setOnFocusChangeListener(new OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean hasFocus) {
if (DEBUG) Log.v(TAG, "EditText.onFocusChange " + hasFocus);
if (hasFocus) {
showNativeKeyboard();
}
updateUi(hasFocus);
}
});
final Runnable mOnTextChangedRunnable = new Runnable() {
@Override
public void run() {
setSearchQueryInternal(mSearchTextEditor.getText().toString());
}
};
mSearchTextEditor.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) {
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i2, int i3) {
// don't propagate event during speech recognition.
if (mRecognizing) {
return;
}
// while IME opens, text editor becomes "" then restores to current value
mHandler.removeCallbacks(mOnTextChangedRunnable);
mHandler.post(mOnTextChangedRunnable);
}
@Override
public void afterTextChanged(Editable editable) {
}
});
mSearchTextEditor.setOnKeyboardDismissListener(
new SearchEditText.OnKeyboardDismissListener() {
@Override
public void onKeyboardDismiss() {
if (null != mSearchBarListener) {
mSearchBarListener.onKeyboardDismiss(mSearchQuery);
}
}
});
mSearchTextEditor.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView textView, int action, KeyEvent keyEvent) {
if (DEBUG) Log.v(TAG, "onEditorAction: " + action + " event: " + keyEvent);
boolean handled = true;
if ((EditorInfo.IME_ACTION_SEARCH == action ||
EditorInfo.IME_NULL == action) && null != mSearchBarListener) {
if (DEBUG) Log.v(TAG, "Action or enter pressed");
hideNativeKeyboard();
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
if (DEBUG) Log.v(TAG, "Delayed action handling (search)");
submitQuery();
}
}, 500);
} else if (EditorInfo.IME_ACTION_NONE == action && null != mSearchBarListener) {
if (DEBUG) Log.v(TAG, "Escaped North");
hideNativeKeyboard();
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
if (DEBUG) Log.v(TAG, "Delayed action handling (escape_north)");
mSearchBarListener.onKeyboardDismiss(mSearchQuery);
}
}, 500);
} else if (EditorInfo.IME_ACTION_GO == action) {
if (DEBUG) Log.v(TAG, "Voice Clicked");
hideNativeKeyboard();
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
if (DEBUG) Log.v(TAG, "Delayed action handling (voice_mode)");
mAutoStartRecognition = true;
mSpeechOrbView.requestFocus();
}
}, 500);
} else {
handled = false;
}
return handled;
}
});
mSearchTextEditor.setPrivateImeOptions("EscapeNorth=1;VoiceDismiss=1;");
mSpeechOrbView = (SpeechOrbView)findViewById(R.id.lb_search_bar_speech_orb);
mSpeechOrbView.setOnOrbClickedListener(new OnClickListener() {
@Override
public void onClick(View view) {
toggleRecognition();
}
});
mSpeechOrbView.setOnFocusChangeListener(new OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean hasFocus) {
if (DEBUG) Log.v(TAG, "SpeechOrb.onFocusChange " + hasFocus);
if (hasFocus) {
hideNativeKeyboard();
if (mAutoStartRecognition) {
startRecognition();
mAutoStartRecognition = false;
}
} else {
stopRecognition();
}
updateUi(hasFocus);
}
});
updateUi(hasFocus());
updateHint();
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (DEBUG) Log.v(TAG, "Loading soundPool");
mSoundPool = new SoundPool(2, AudioManager.STREAM_SYSTEM, 0);
loadSounds(mContext);
}
@Override
protected void onDetachedFromWindow() {
stopRecognition();
if (DEBUG) Log.v(TAG, "Releasing SoundPool");
mSoundPool.release();
super.onDetachedFromWindow();
}
/**
* Set a listener for when the term search changes
* @param listener
*/
public void setSearchBarListener(SearchBarListener listener) {
mSearchBarListener = listener;
}
/**
* Set the search query
* @param query the search query to use
*/
public void setSearchQuery(String query) {
stopRecognition();
mSearchTextEditor.setText(query);
setSearchQueryInternal(query);
}
private void setSearchQueryInternal(String query) {
if (DEBUG) Log.v(TAG, "setSearchQueryInternal " + query);
if (TextUtils.equals(mSearchQuery, query)) {
return;
}
mSearchQuery = query;
if (null != mSearchBarListener) {
mSearchBarListener.onSearchQueryChange(mSearchQuery);
}
}
/**
* Set the title text used in the hint shown in the search bar.
* @param title The hint to use.
*/
public void setTitle(String title) {
mTitle = title;
updateHint();
}
/**
* Returns the current title
*/
public String getTitle() {
return mTitle;
}
/**
* Returns the current search bar hint text.
*/
public CharSequence getHint() {
return mHint;
}
/**
* Set the badge drawable showing inside the search bar.
* @param drawable The drawable to be used in the search bar.
*/
public void setBadgeDrawable(Drawable drawable) {
mBadgeDrawable = drawable;
if (null != mBadgeView) {
mBadgeView.setImageDrawable(drawable);
if (null != drawable) {
mBadgeView.setVisibility(View.VISIBLE);
} else {
mBadgeView.setVisibility(View.GONE);
}
}
}
/**
* Returns the badge drawable
*/
public Drawable getBadgeDrawable() {
return mBadgeDrawable;
}
/**
* Update the completion list shown by the IME
*
* @param completions list of completions shown in the IME, can be null or empty to clear them
*/
public void displayCompletions(List<String> completions) {
List<CompletionInfo> infos = new ArrayList<CompletionInfo>();
if (null != completions) {
for (String completion : completions) {
infos.add(new CompletionInfo(infos.size(), infos.size(), completion));
}
}
mInputMethodManager.displayCompletions(mSearchTextEditor,
infos.toArray(new CompletionInfo[] {}));
}
/**
* Set the speech recognizer to be used when doing voice search. The Activity/Fragment is in
* charge of creating and destroying the recognizer with its own lifecycle.
*
* @param recognizer a SpeechRecognizer
*/
public void setSpeechRecognizer(SpeechRecognizer recognizer) {
stopRecognition();
if (null != mSpeechRecognizer) {
mSpeechRecognizer.setRecognitionListener(null);
if (mListening) {
mSpeechRecognizer.cancel();
mListening = false;
}
}
mSpeechRecognizer = recognizer;
if (mSpeechRecognizer != null) {
enforceAudioRecordPermission();
}
if (mSpeechRecognitionCallback != null && mSpeechRecognizer != null) {
throw new IllegalStateException("Can't have speech recognizer and request");
}
}
public void setSpeechRecognitionCallback(SpeechRecognitionCallback request) {
mSpeechRecognitionCallback = request;
if (mSpeechRecognitionCallback != null && mSpeechRecognizer != null) {
throw new IllegalStateException("Can't have speech recognizer and request");
}
}
private void hideNativeKeyboard() {
mInputMethodManager.hideSoftInputFromWindow(mSearchTextEditor.getWindowToken(),
InputMethodManager.RESULT_UNCHANGED_SHOWN);
}
private void showNativeKeyboard() {
mHandler.post(new Runnable() {
@Override
public void run() {
mSearchTextEditor.requestFocusFromTouch();
mSearchTextEditor.dispatchTouchEvent(MotionEvent.obtain(SystemClock.uptimeMillis(),
SystemClock.uptimeMillis(), MotionEvent.ACTION_DOWN,
mSearchTextEditor.getWidth(), mSearchTextEditor.getHeight(), 0));
mSearchTextEditor.dispatchTouchEvent(MotionEvent.obtain(SystemClock.uptimeMillis(),
SystemClock.uptimeMillis(), MotionEvent.ACTION_UP,
mSearchTextEditor.getWidth(), mSearchTextEditor.getHeight(), 0));
}
});
}
/**
* This will update the hint for the search bar properly depending on state and provided title
*/
private void updateHint() {
String title = getResources().getString(R.string.lb_search_bar_hint);
if (!TextUtils.isEmpty(mTitle)) {
if (isVoiceMode()) {
title = getResources().getString(R.string.lb_search_bar_hint_with_title_speech, mTitle);
} else {
title = getResources().getString(R.string.lb_search_bar_hint_with_title, mTitle);
}
} else if (isVoiceMode()) {
title = getResources().getString(R.string.lb_search_bar_hint_speech);
}
mHint = title;
if (mSearchTextEditor != null) {
mSearchTextEditor.setHint(mHint);
}
}
private void toggleRecognition() {
if (mRecognizing) {
stopRecognition();
} else {
startRecognition();
}
}
/**
* Stop the recognition if already started
*/
public void stopRecognition() {
if (DEBUG) Log.v(TAG, String.format("stopRecognition (listening: %s, recognizing: %s)",
mListening, mRecognizing));
if (!mRecognizing) return;
// Edit text content was cleared when starting recogition; ensure the content is restored
// in error cases
mSearchTextEditor.setText(mSearchQuery);
mSearchTextEditor.setHint(mHint);
mRecognizing = false;
if (mSpeechRecognitionCallback != null || null == mSpeechRecognizer) return;
mSpeechOrbView.showNotListening();
if (mListening) {
mSpeechRecognizer.cancel();
mListening = false;
mAudioManager.abandonAudioFocus(mAudioFocusChangeListener);
}
mSpeechRecognizer.setRecognitionListener(null);
}
/**
* Start the voice recognition
*/
public void startRecognition() {
if (DEBUG) Log.v(TAG, String.format("startRecognition (listening: %s, recognizing: %s)",
mListening, mRecognizing));
if (mRecognizing) return;
mRecognizing = true;
if (!hasFocus()) {
requestFocus();
}
if (mSpeechRecognitionCallback != null) {
mSearchTextEditor.setText("");
mSearchTextEditor.setHint("");
mSpeechRecognitionCallback.recognizeSpeech();
return;
}
if (null == mSpeechRecognizer) return;
// Request audio focus
int result = mAudioManager.requestAudioFocus(mAudioFocusChangeListener,
// Use the music stream.
AudioManager.STREAM_MUSIC,
// Request exclusive transient focus.
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK);
if (result != AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
Log.w(TAG, "Could not get audio focus");
}
mSearchTextEditor.setText("");
Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
mSpeechRecognizer.setRecognitionListener(new RecognitionListener() {
@Override
public void onReadyForSpeech(Bundle bundle) {
if (DEBUG) Log.v(TAG, "onReadyForSpeech");
mSpeechOrbView.showListening();
playSearchOpen();
}
@Override
public void onBeginningOfSpeech() {
if (DEBUG) Log.v(TAG, "onBeginningOfSpeech");
}
@Override
public void onRmsChanged(float rmsdB) {
if (DEBUG) Log.v(TAG, "onRmsChanged " + rmsdB);
int level = rmsdB < 0 ? 0 : (int)(10 * rmsdB);
mSpeechOrbView.setSoundLevel(level);
}
@Override
public void onBufferReceived(byte[] bytes) {
if (DEBUG) Log.v(TAG, "onBufferReceived " + bytes.length);
}
@Override
public void onEndOfSpeech() {
if (DEBUG) Log.v(TAG, "onEndOfSpeech");
}
@Override
public void onError(int error) {
if (DEBUG) Log.v(TAG, "onError " + error);
switch (error) {
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
Log.w(TAG, "recognizer network timeout");
break;
case SpeechRecognizer.ERROR_NETWORK:
Log.w(TAG, "recognizer network error");
break;
case SpeechRecognizer.ERROR_AUDIO:
Log.w(TAG, "recognizer audio error");
break;
case SpeechRecognizer.ERROR_SERVER:
Log.w(TAG, "recognizer server error");
break;
case SpeechRecognizer.ERROR_CLIENT:
Log.w(TAG, "recognizer client error");
break;
case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
Log.w(TAG, "recognizer speech timeout");
break;
case SpeechRecognizer.ERROR_NO_MATCH:
Log.w(TAG, "recognizer no match");
break;
case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
Log.w(TAG, "recognizer busy");
break;
case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
Log.w(TAG, "recognizer insufficient permissions");
break;
default:
Log.d(TAG, "recognizer other error");
break;
}
stopRecognition();
playSearchFailure();
}
@Override
public void onResults(Bundle bundle) {
if (DEBUG) Log.v(TAG, "onResults");
final ArrayList<String> matches =
bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
if (matches != null) {
if (DEBUG) Log.v(TAG, "Got results" + matches);
mSearchQuery = matches.get(0);
mSearchTextEditor.setText(mSearchQuery);
submitQuery();
}
stopRecognition();
playSearchSuccess();
}
@Override
public void onPartialResults(Bundle bundle) {
ArrayList<String> results = bundle.getStringArrayList(
SpeechRecognizer.RESULTS_RECOGNITION);
if (DEBUG) Log.v(TAG, "onPartialResults " + bundle + " results " +
(results == null ? results : results.size()));
if (results == null || results.size() == 0) {
return;
}
// stableText: high confidence text from PartialResults, if any.
// Otherwise, existing stable text.
final String stableText = results.get(0);
if (DEBUG) Log.v(TAG, "onPartialResults stableText " + stableText);
// pendingText: low confidence text from PartialResults, if any.
// Otherwise, empty string.
final String pendingText = results.size() > 1 ? results.get(1) : null;
if (DEBUG) Log.v(TAG, "onPartialResults pendingText " + pendingText);
mSearchTextEditor.updateRecognizedText(stableText, pendingText);
}
@Override
public void onEvent(int i, Bundle bundle) {
}
});
mListening = true;
mSpeechRecognizer.startListening(recognizerIntent);
}
private void updateUi(boolean hasFocus) {
if (hasFocus) {
mBarBackground.setAlpha(mBackgroundSpeechAlpha);
if (isVoiceMode()) {
mSearchTextEditor.setTextColor(mTextHintColorSpeechMode);
mSearchTextEditor.setHintTextColor(mTextHintColorSpeechMode);
} else {
mSearchTextEditor.setTextColor(mTextColorSpeechMode);
mSearchTextEditor.setHintTextColor(mTextHintColorSpeechMode);
}
} else {
mBarBackground.setAlpha(mBackgroundAlpha);
mSearchTextEditor.setTextColor(mTextColor);
mSearchTextEditor.setHintTextColor(mTextHintColor);
}
updateHint();
}
private boolean isVoiceMode() {
return mSpeechOrbView.isFocused();
}
private void submitQuery() {
if (!TextUtils.isEmpty(mSearchQuery) && null != mSearchBarListener) {
mSearchBarListener.onSearchQuerySubmit(mSearchQuery);
}
}
private void enforceAudioRecordPermission() {
String permission = "android.permission.RECORD_AUDIO";
int res = getContext().checkCallingOrSelfPermission(permission);
if (PackageManager.PERMISSION_GRANTED != res) {
throw new IllegalStateException("android.permission.RECORD_AUDIO required for search");
}
}
private void loadSounds(Context context) {
int[] sounds = {
R.raw.lb_voice_failure,
R.raw.lb_voice_open,
R.raw.lb_voice_no_input,
R.raw.lb_voice_success,
};
for (int sound : sounds) {
mSoundMap.put(sound, mSoundPool.load(context, sound, 1));
}
}
private void play(final int resId) {
mHandler.post(new Runnable() {
@Override
public void run() {
int sound = mSoundMap.get(resId);
mSoundPool.play(sound, FULL_LEFT_VOLUME, FULL_RIGHT_VOLUME, DEFAULT_PRIORITY,
DO_NOT_LOOP, DEFAULT_RATE);
}
});
}
private void playSearchOpen() {
play(R.raw.lb_voice_open);
}
private void playSearchFailure() {
play(R.raw.lb_voice_failure);
}
private void playSearchNoInput() {
play(R.raw.lb_voice_no_input);
}
private void playSearchSuccess() {
play(R.raw.lb_voice_success);
}
@Override
public void setNextFocusDownId(int viewId) {
mSpeechOrbView.setNextFocusDownId(viewId);
mSearchTextEditor.setNextFocusDownId(viewId);
}
}
| v17/leanback/src/android/support/v17/leanback/widget/SearchBar.java | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package android.support.v17.leanback.widget;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.media.AudioManager;
import android.media.SoundPool;
import android.os.Bundle;
import android.os.Handler;
import android.os.SystemClock;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.util.Log;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.ViewGroup;
import android.view.inputmethod.CompletionInfo;
import android.view.inputmethod.EditorInfo;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.widget.ImageView;
import android.view.inputmethod.InputMethodManager;
import android.widget.RelativeLayout;
import android.support.v17.leanback.R;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
/**
* <p>SearchBar is a search widget.</p>
*
* <p>Note: Your application will need to request android.permission.RECORD_AUDIO</p>
*/
public class SearchBar extends RelativeLayout {
private static final String TAG = SearchBar.class.getSimpleName();
private static final boolean DEBUG = false;
private static final float FULL_LEFT_VOLUME = 1.0f;
private static final float FULL_RIGHT_VOLUME = 1.0f;
private static final int DEFAULT_PRIORITY = 1;
private static final int DO_NOT_LOOP = 0;
private static final float DEFAULT_RATE = 1.0f;
/**
* Listener for search query changes
*/
public interface SearchBarListener {
/**
* Method invoked when the search bar detects a change in the query.
*
* @param query The current full query.
*/
public void onSearchQueryChange(String query);
/**
* <p>Method invoked when the search query is submitted.</p>
*
* <p>This method can be called without a preceeding onSearchQueryChange,
* in particular in the case of a voice input.</p>
*
* @param query The query being submitted.
*/
public void onSearchQuerySubmit(String query);
/**
* Method invoked when the IME is being dismissed.
*
* @param query The query set in the search bar at the time the IME is being dismissed.
*/
public void onKeyboardDismiss(String query);
}
private AudioManager.OnAudioFocusChangeListener mAudioFocusChangeListener =
new AudioManager.OnAudioFocusChangeListener() {
@Override
public void onAudioFocusChange(int focusChange) {
// Do nothing.
}
};
private SearchBarListener mSearchBarListener;
private SearchEditText mSearchTextEditor;
private SpeechOrbView mSpeechOrbView;
private ImageView mBadgeView;
private String mSearchQuery;
private String mHint;
private String mTitle;
private Drawable mBadgeDrawable;
private final Handler mHandler = new Handler();
private final InputMethodManager mInputMethodManager;
private boolean mAutoStartRecognition = false;
private Drawable mBarBackground;
private final int mTextColor;
private final int mTextColorSpeechMode;
private final int mTextHintColor;
private final int mTextHintColorSpeechMode;
private int mBackgroundAlpha;
private int mBackgroundSpeechAlpha;
private int mBarHeight;
private SpeechRecognizer mSpeechRecognizer;
private SpeechRecognitionCallback mSpeechRecognitionCallback;
private boolean mListening;
private SoundPool mSoundPool;
private SparseIntArray mSoundMap = new SparseIntArray();
private boolean mRecognizing = false;
private final Context mContext;
private AudioManager mAudioManager;
public SearchBar(Context context) {
this(context, null);
}
public SearchBar(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public SearchBar(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mContext = context;
Resources r = getResources();
LayoutInflater inflater = LayoutInflater.from(getContext());
inflater.inflate(R.layout.lb_search_bar, this, true);
mBarHeight = getResources().getDimensionPixelSize(R.dimen.lb_search_bar_height);
RelativeLayout.LayoutParams params = new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
mBarHeight);
params.addRule(ALIGN_PARENT_TOP, RelativeLayout.TRUE);
setLayoutParams(params);
setBackgroundColor(Color.TRANSPARENT);
setClipChildren(false);
mSearchQuery = "";
mInputMethodManager =
(InputMethodManager)context.getSystemService(Context.INPUT_METHOD_SERVICE);
mTextColorSpeechMode = r.getColor(R.color.lb_search_bar_text_speech_mode);
mTextColor = r.getColor(R.color.lb_search_bar_text);
mBackgroundSpeechAlpha = r.getInteger(R.integer.lb_search_bar_speech_mode_background_alpha);
mBackgroundAlpha = r.getInteger(R.integer.lb_search_bar_text_mode_background_alpha);
mTextHintColorSpeechMode = r.getColor(R.color.lb_search_bar_hint_speech_mode);
mTextHintColor = r.getColor(R.color.lb_search_bar_hint);
mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
RelativeLayout items = (RelativeLayout)findViewById(R.id.lb_search_bar_items);
mBarBackground = items.getBackground();
mSearchTextEditor = (SearchEditText)findViewById(R.id.lb_search_text_editor);
mBadgeView = (ImageView)findViewById(R.id.lb_search_bar_badge);
if (null != mBadgeDrawable) {
mBadgeView.setImageDrawable(mBadgeDrawable);
}
mSearchTextEditor.setOnFocusChangeListener(new OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean hasFocus) {
if (DEBUG) Log.v(TAG, "EditText.onFocusChange " + hasFocus);
if (hasFocus) {
showNativeKeyboard();
}
updateUi(hasFocus);
}
});
final Runnable mOnTextChangedRunnable = new Runnable() {
@Override
public void run() {
setSearchQueryInternal(mSearchTextEditor.getText().toString());
}
};
mSearchTextEditor.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) {
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i2, int i3) {
// don't propagate event during speech recognition.
if (mRecognizing) {
return;
}
// while IME opens, text editor becomes "" then restores to current value
mHandler.removeCallbacks(mOnTextChangedRunnable);
mHandler.post(mOnTextChangedRunnable);
}
@Override
public void afterTextChanged(Editable editable) {
}
});
mSearchTextEditor.setOnKeyboardDismissListener(
new SearchEditText.OnKeyboardDismissListener() {
@Override
public void onKeyboardDismiss() {
if (null != mSearchBarListener) {
mSearchBarListener.onKeyboardDismiss(mSearchQuery);
}
}
});
mSearchTextEditor.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView textView, int action, KeyEvent keyEvent) {
if (DEBUG) Log.v(TAG, "onEditorAction: " + action + " event: " + keyEvent);
boolean handled = true;
if ((EditorInfo.IME_ACTION_SEARCH == action ||
EditorInfo.IME_NULL == action) && null != mSearchBarListener) {
if (DEBUG) Log.v(TAG, "Action or enter pressed");
hideNativeKeyboard();
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
if (DEBUG) Log.v(TAG, "Delayed action handling (search)");
submitQuery();
}
}, 500);
} else if (EditorInfo.IME_ACTION_NONE == action && null != mSearchBarListener) {
if (DEBUG) Log.v(TAG, "Escaped North");
hideNativeKeyboard();
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
if (DEBUG) Log.v(TAG, "Delayed action handling (escape_north)");
mSearchBarListener.onKeyboardDismiss(mSearchQuery);
}
}, 500);
} else if (EditorInfo.IME_ACTION_GO == action) {
if (DEBUG) Log.v(TAG, "Voice Clicked");
hideNativeKeyboard();
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
if (DEBUG) Log.v(TAG, "Delayed action handling (voice_mode)");
mAutoStartRecognition = true;
mSpeechOrbView.requestFocus();
}
}, 500);
} else {
handled = false;
}
return handled;
}
});
mSearchTextEditor.setPrivateImeOptions("EscapeNorth=1;VoiceDismiss=1;");
mSpeechOrbView = (SpeechOrbView)findViewById(R.id.lb_search_bar_speech_orb);
mSpeechOrbView.setOnOrbClickedListener(new OnClickListener() {
@Override
public void onClick(View view) {
toggleRecognition();
}
});
mSpeechOrbView.setOnFocusChangeListener(new OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean hasFocus) {
if (DEBUG) Log.v(TAG, "SpeechOrb.onFocusChange " + hasFocus);
if (hasFocus) {
hideNativeKeyboard();
if (mAutoStartRecognition) {
startRecognition();
mAutoStartRecognition = false;
}
} else {
stopRecognition();
}
updateUi(hasFocus);
}
});
updateUi(hasFocus());
updateHint();
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (DEBUG) Log.v(TAG, "Loading soundPool");
mSoundPool = new SoundPool(2, AudioManager.STREAM_SYSTEM, 0);
loadSounds(mContext);
}
@Override
protected void onDetachedFromWindow() {
if (DEBUG) Log.v(TAG, "Releasing SoundPool");
mSoundPool.release();
super.onDetachedFromWindow();
}
/**
* Set a listener for when the term search changes
* @param listener
*/
public void setSearchBarListener(SearchBarListener listener) {
mSearchBarListener = listener;
}
/**
* Set the search query
* @param query the search query to use
*/
public void setSearchQuery(String query) {
stopRecognition();
mSearchTextEditor.setText(query);
setSearchQueryInternal(query);
}
private void setSearchQueryInternal(String query) {
if (DEBUG) Log.v(TAG, "setSearchQueryInternal " + query);
if (TextUtils.equals(mSearchQuery, query)) {
return;
}
mSearchQuery = query;
if (null != mSearchBarListener) {
mSearchBarListener.onSearchQueryChange(mSearchQuery);
}
}
/**
* Set the title text used in the hint shown in the search bar.
* @param title The hint to use.
*/
public void setTitle(String title) {
mTitle = title;
updateHint();
}
/**
* Returns the current title
*/
public String getTitle() {
return mTitle;
}
/**
* Returns the current search bar hint text.
*/
public CharSequence getHint() {
return mHint;
}
/**
* Set the badge drawable showing inside the search bar.
* @param drawable The drawable to be used in the search bar.
*/
public void setBadgeDrawable(Drawable drawable) {
mBadgeDrawable = drawable;
if (null != mBadgeView) {
mBadgeView.setImageDrawable(drawable);
if (null != drawable) {
mBadgeView.setVisibility(View.VISIBLE);
} else {
mBadgeView.setVisibility(View.GONE);
}
}
}
/**
* Returns the badge drawable
*/
public Drawable getBadgeDrawable() {
return mBadgeDrawable;
}
/**
* Update the completion list shown by the IME
*
* @param completions list of completions shown in the IME, can be null or empty to clear them
*/
public void displayCompletions(List<String> completions) {
List<CompletionInfo> infos = new ArrayList<CompletionInfo>();
if (null != completions) {
for (String completion : completions) {
infos.add(new CompletionInfo(infos.size(), infos.size(), completion));
}
}
mInputMethodManager.displayCompletions(mSearchTextEditor,
infos.toArray(new CompletionInfo[] {}));
}
/**
* Set the speech recognizer to be used when doing voice search. The Activity/Fragment is in
* charge of creating and destroying the recognizer with its own lifecycle.
*
* @param recognizer a SpeechRecognizer
*/
public void setSpeechRecognizer(SpeechRecognizer recognizer) {
if (null != mSpeechRecognizer) {
mSpeechRecognizer.setRecognitionListener(null);
if (mListening) {
mSpeechRecognizer.cancel();
mListening = false;
}
}
mSpeechRecognizer = recognizer;
if (mSpeechRecognizer != null) {
enforceAudioRecordPermission();
}
if (mSpeechRecognitionCallback != null && mSpeechRecognizer != null) {
throw new IllegalStateException("Can't have speech recognizer and request");
}
}
public void setSpeechRecognitionCallback(SpeechRecognitionCallback request) {
mSpeechRecognitionCallback = request;
if (mSpeechRecognitionCallback != null && mSpeechRecognizer != null) {
throw new IllegalStateException("Can't have speech recognizer and request");
}
}
private void hideNativeKeyboard() {
mInputMethodManager.hideSoftInputFromWindow(mSearchTextEditor.getWindowToken(),
InputMethodManager.RESULT_UNCHANGED_SHOWN);
}
private void showNativeKeyboard() {
mHandler.post(new Runnable() {
@Override
public void run() {
mSearchTextEditor.requestFocusFromTouch();
mSearchTextEditor.dispatchTouchEvent(MotionEvent.obtain(SystemClock.uptimeMillis(),
SystemClock.uptimeMillis(), MotionEvent.ACTION_DOWN,
mSearchTextEditor.getWidth(), mSearchTextEditor.getHeight(), 0));
mSearchTextEditor.dispatchTouchEvent(MotionEvent.obtain(SystemClock.uptimeMillis(),
SystemClock.uptimeMillis(), MotionEvent.ACTION_UP,
mSearchTextEditor.getWidth(), mSearchTextEditor.getHeight(), 0));
}
});
}
/**
* This will update the hint for the search bar properly depending on state and provided title
*/
private void updateHint() {
String title = getResources().getString(R.string.lb_search_bar_hint);
if (!TextUtils.isEmpty(mTitle)) {
if (isVoiceMode()) {
title = getResources().getString(R.string.lb_search_bar_hint_with_title_speech, mTitle);
} else {
title = getResources().getString(R.string.lb_search_bar_hint_with_title, mTitle);
}
} else if (isVoiceMode()) {
title = getResources().getString(R.string.lb_search_bar_hint_speech);
}
mHint = title;
if (mSearchTextEditor != null) {
mSearchTextEditor.setHint(mHint);
}
}
private void toggleRecognition() {
if (mRecognizing) {
stopRecognition();
} else {
startRecognition();
}
}
/**
* Stop the recognition if already started
*/
public void stopRecognition() {
if (DEBUG) Log.v(TAG, String.format("stopRecognition (listening: %s, recognizing: %s)",
mListening, mRecognizing));
if (!mRecognizing) return;
// Edit text content was cleared when starting recogition; ensure the content is restored
// in error cases
mSearchTextEditor.setText(mSearchQuery);
mSearchTextEditor.setHint(mHint);
mRecognizing = false;
if (mSpeechRecognitionCallback != null || null == mSpeechRecognizer) return;
mSpeechOrbView.showNotListening();
if (mListening) {
mSpeechRecognizer.cancel();
mListening = false;
mAudioManager.abandonAudioFocus(mAudioFocusChangeListener);
}
mSpeechRecognizer.setRecognitionListener(null);
}
/**
* Start the voice recognition
*/
public void startRecognition() {
if (DEBUG) Log.v(TAG, String.format("startRecognition (listening: %s, recognizing: %s)",
mListening, mRecognizing));
if (mRecognizing) return;
mRecognizing = true;
if (!hasFocus()) {
requestFocus();
}
if (mSpeechRecognitionCallback != null) {
mSearchTextEditor.setText("");
mSearchTextEditor.setHint("");
mSpeechRecognitionCallback.recognizeSpeech();
return;
}
if (null == mSpeechRecognizer) return;
// Request audio focus
int result = mAudioManager.requestAudioFocus(mAudioFocusChangeListener,
// Use the music stream.
AudioManager.STREAM_MUSIC,
// Request exclusive transient focus.
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK);
if (result != AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
Log.w(TAG, "Could not get audio focus");
}
mSearchTextEditor.setText("");
Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
mSpeechRecognizer.setRecognitionListener(new RecognitionListener() {
@Override
public void onReadyForSpeech(Bundle bundle) {
if (DEBUG) Log.v(TAG, "onReadyForSpeech");
mSpeechOrbView.showListening();
playSearchOpen();
}
@Override
public void onBeginningOfSpeech() {
if (DEBUG) Log.v(TAG, "onBeginningOfSpeech");
}
@Override
public void onRmsChanged(float rmsdB) {
if (DEBUG) Log.v(TAG, "onRmsChanged " + rmsdB);
int level = rmsdB < 0 ? 0 : (int)(10 * rmsdB);
mSpeechOrbView.setSoundLevel(level);
}
@Override
public void onBufferReceived(byte[] bytes) {
if (DEBUG) Log.v(TAG, "onBufferReceived " + bytes.length);
}
@Override
public void onEndOfSpeech() {
if (DEBUG) Log.v(TAG, "onEndOfSpeech");
}
@Override
public void onError(int error) {
if (DEBUG) Log.v(TAG, "onError " + error);
switch (error) {
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
Log.w(TAG, "recognizer network timeout");
break;
case SpeechRecognizer.ERROR_NETWORK:
Log.w(TAG, "recognizer network error");
break;
case SpeechRecognizer.ERROR_AUDIO:
Log.w(TAG, "recognizer audio error");
break;
case SpeechRecognizer.ERROR_SERVER:
Log.w(TAG, "recognizer server error");
break;
case SpeechRecognizer.ERROR_CLIENT:
Log.w(TAG, "recognizer client error");
break;
case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
Log.w(TAG, "recognizer speech timeout");
break;
case SpeechRecognizer.ERROR_NO_MATCH:
Log.w(TAG, "recognizer no match");
break;
case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
Log.w(TAG, "recognizer busy");
break;
case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
Log.w(TAG, "recognizer insufficient permissions");
break;
default:
Log.d(TAG, "recognizer other error");
break;
}
stopRecognition();
playSearchFailure();
}
@Override
public void onResults(Bundle bundle) {
if (DEBUG) Log.v(TAG, "onResults");
final ArrayList<String> matches =
bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
if (matches != null) {
if (DEBUG) Log.v(TAG, "Got results" + matches);
mSearchQuery = matches.get(0);
mSearchTextEditor.setText(mSearchQuery);
submitQuery();
}
stopRecognition();
playSearchSuccess();
}
@Override
public void onPartialResults(Bundle bundle) {
ArrayList<String> results = bundle.getStringArrayList(
SpeechRecognizer.RESULTS_RECOGNITION);
if (DEBUG) Log.v(TAG, "onPartialResults " + bundle + " results " +
(results == null ? results : results.size()));
if (results == null || results.size() == 0) {
return;
}
// stableText: high confidence text from PartialResults, if any.
// Otherwise, existing stable text.
final String stableText = results.get(0);
if (DEBUG) Log.v(TAG, "onPartialResults stableText " + stableText);
// pendingText: low confidence text from PartialResults, if any.
// Otherwise, empty string.
final String pendingText = results.size() > 1 ? results.get(1) : null;
if (DEBUG) Log.v(TAG, "onPartialResults pendingText " + pendingText);
mSearchTextEditor.updateRecognizedText(stableText, pendingText);
}
@Override
public void onEvent(int i, Bundle bundle) {
}
});
mListening = true;
mSpeechRecognizer.startListening(recognizerIntent);
}
private void updateUi(boolean hasFocus) {
if (hasFocus) {
mBarBackground.setAlpha(mBackgroundSpeechAlpha);
if (isVoiceMode()) {
mSearchTextEditor.setTextColor(mTextHintColorSpeechMode);
mSearchTextEditor.setHintTextColor(mTextHintColorSpeechMode);
} else {
mSearchTextEditor.setTextColor(mTextColorSpeechMode);
mSearchTextEditor.setHintTextColor(mTextHintColorSpeechMode);
}
} else {
mBarBackground.setAlpha(mBackgroundAlpha);
mSearchTextEditor.setTextColor(mTextColor);
mSearchTextEditor.setHintTextColor(mTextHintColor);
}
updateHint();
}
private boolean isVoiceMode() {
return mSpeechOrbView.isFocused();
}
private void submitQuery() {
if (!TextUtils.isEmpty(mSearchQuery) && null != mSearchBarListener) {
mSearchBarListener.onSearchQuerySubmit(mSearchQuery);
}
}
private void enforceAudioRecordPermission() {
String permission = "android.permission.RECORD_AUDIO";
int res = getContext().checkCallingOrSelfPermission(permission);
if (PackageManager.PERMISSION_GRANTED != res) {
throw new IllegalStateException("android.permission.RECORD_AUDIO required for search");
}
}
private void loadSounds(Context context) {
int[] sounds = {
R.raw.lb_voice_failure,
R.raw.lb_voice_open,
R.raw.lb_voice_no_input,
R.raw.lb_voice_success,
};
for (int sound : sounds) {
mSoundMap.put(sound, mSoundPool.load(context, sound, 1));
}
}
private void play(final int resId) {
mHandler.post(new Runnable() {
@Override
public void run() {
int sound = mSoundMap.get(resId);
mSoundPool.play(sound, FULL_LEFT_VOLUME, FULL_RIGHT_VOLUME, DEFAULT_PRIORITY,
DO_NOT_LOOP, DEFAULT_RATE);
}
});
}
private void playSearchOpen() {
play(R.raw.lb_voice_open);
}
private void playSearchFailure() {
play(R.raw.lb_voice_failure);
}
private void playSearchNoInput() {
play(R.raw.lb_voice_no_input);
}
private void playSearchSuccess() {
play(R.raw.lb_voice_success);
}
@Override
public void setNextFocusDownId(int viewId) {
mSpeechOrbView.setNextFocusDownId(viewId);
mSearchTextEditor.setNextFocusDownId(viewId);
}
}
| Internal speech recognizer fixes.
- when SearchFragment pauses or SearchBar is detached, stop recognition
to release audio focus
- when audio focus is stolen, abort speech recognition
b/18194568
Change-Id: I31963f27886c909df25f0fb29a5f728697e8462f
| v17/leanback/src/android/support/v17/leanback/widget/SearchBar.java | Internal speech recognizer fixes. |
|
Java | apache-2.0 | 0d917bea4df6c1e1e76dfaf01d6a2a9a80f56f47 | 0 | subutai-io/Subutai,subutai-io/Subutai,subutai-io/base,subutai-io/Subutai,subutai-io/Subutai,subutai-io/base,subutai-io/base,subutai-io/base,subutai-io/Subutai,subutai-io/Subutai | package io.subutai.core.hubmanager.impl.requestor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.subutai.common.command.CommandException;
import io.subutai.common.peer.ContainerHost;
import io.subutai.common.peer.ResourceHost;
import io.subutai.common.settings.Common;
import io.subutai.core.desktop.api.DesktopManager;
import io.subutai.core.hubmanager.api.HubRequester;
import io.subutai.core.hubmanager.api.RestClient;
import io.subutai.core.hubmanager.api.RestResult;
import io.subutai.core.hubmanager.api.exception.HubManagerException;
import io.subutai.core.hubmanager.impl.HubManagerImpl;
import io.subutai.core.peer.api.PeerManager;
import io.subutai.hub.share.dto.environment.ContainerStateDto;
import io.subutai.hub.share.dto.environment.container.ContainerDesktopInfoDto;
import io.subutai.hub.share.dto.environment.container.ContainerEventDto;
public class ContainerEventProcessor extends HubRequester
{
private final Logger log = LoggerFactory.getLogger( getClass() );
private PeerManager peerManager;
private DesktopManager desktopManager;
public ContainerEventProcessor( final HubManagerImpl hubManager, final PeerManager peerManager,
final RestClient restClient, final DesktopManager desktopManager )
{
super( hubManager, restClient );
this.peerManager = peerManager;
this.desktopManager = desktopManager;
}
@Override
public void request() throws HubManagerException
{
process();
}
public void process() throws HubManagerException
{
try
{
for ( ResourceHost rh : peerManager.getLocalPeer().getResourceHosts() )
{
sendContainerStates( rh );
}
}
catch ( Exception e )
{
log.error( "Oops error: ", e.getMessage() );
}
}
private void sendContainerStates( ResourceHost rh ) throws HubManagerException
{
log.info( "ResourceHost: id={}, hostname={}, containers={}", rh.getId(), rh.getHostname(),
rh.getContainerHosts().size() );
for ( ContainerHost ch : rh.getContainerHosts() )
{
if ( !Common.MANAGEMENT_HOSTNAME.equals( ch.getContainerName() ) )
{
sendContainerState( ch );
}
}
}
private void sendContainerState( ContainerHost ch ) throws HubManagerException
{
log.info( "- ContainerHost: id={}, name={}, environmentId={}, state={}", ch.getId(), ch.getContainerName(),
ch.getEnvironmentId(), ch.getState() );
ContainerStateDto state = ContainerStateDto.valueOf( ch.getState().name() );
ContainerEventDto dto = new ContainerEventDto( ch.getId(), ch.getEnvironmentId().getId(), state );
Boolean isDesktop = null;
if ( !desktopManager.existInCache( ch.getId() ) )
{
try
{
//get information about desktop env and remote desktop server
String deskEnv = desktopManager.getDesktopEnvironmentInfo( ch );
String rDServer = desktopManager.getRDServerInfo( ch );
if ( !deskEnv.isEmpty() && !rDServer.isEmpty() )
{
//add to cache as a desktop container
ContainerDesktopInfoDto desktopInfo = new ContainerDesktopInfoDto( ch.getId(), deskEnv, rDServer );
dto.setDesktopInfo( desktopInfo );
try
{
desktopManager.createDesktopUser( ch );
}
catch ( Exception e )
{
log.error( e.getMessage() );
}
isDesktop = true;
}
else
{
//add to cache as not desktop container
isDesktop = false;
}
}
catch ( CommandException e )
{
log.error( e.getMessage() );
}
}
try
{
desktopManager.copyKeys( ch );
}
catch ( CommandException e )
{
log.error( "Could not copy SSH keys to x2go usr" );
}
RestResult res = doRequest( dto );
if ( isDesktop != null )
{
if ( isDesktop )
{
desktopManager.containerIsDesktop( ch.getId() );
}
else
{
desktopManager.containerIsNotDesktop( ch.getId() );
}
}
log.info( "Response status: {}", res.getStatus() );
}
private RestResult doRequest( ContainerEventDto dto ) throws HubManagerException
{
try
{
String path = String.format( "/rest/v1/containers/%s/events", dto.getContainerId() );
return restClient.post( path, dto );
}
catch ( Exception e )
{
throw new HubManagerException( e );
}
}
}
| management/server/core/hub-manager/hub-manager-impl/src/main/java/io/subutai/core/hubmanager/impl/requestor/ContainerEventProcessor.java | package io.subutai.core.hubmanager.impl.requestor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.subutai.common.command.CommandException;
import io.subutai.common.peer.ContainerHost;
import io.subutai.common.peer.ResourceHost;
import io.subutai.common.settings.Common;
import io.subutai.core.desktop.api.DesktopManager;
import io.subutai.core.hubmanager.api.HubRequester;
import io.subutai.core.hubmanager.api.RestClient;
import io.subutai.core.hubmanager.api.RestResult;
import io.subutai.core.hubmanager.api.exception.HubManagerException;
import io.subutai.core.hubmanager.impl.HubManagerImpl;
import io.subutai.core.peer.api.PeerManager;
import io.subutai.hub.share.dto.environment.ContainerStateDto;
import io.subutai.hub.share.dto.environment.container.ContainerDesktopInfoDto;
import io.subutai.hub.share.dto.environment.container.ContainerEventDto;
public class ContainerEventProcessor extends HubRequester
{
private final Logger log = LoggerFactory.getLogger( getClass() );
private PeerManager peerManager;
private DesktopManager desktopManager;
public ContainerEventProcessor( final HubManagerImpl hubManager, final PeerManager peerManager,
final RestClient restClient, final DesktopManager desktopManager )
{
super( hubManager, restClient );
this.peerManager = peerManager;
this.desktopManager = desktopManager;
}
@Override
public void request() throws HubManagerException
{
process();
}
public void process() throws HubManagerException
{
try
{
for ( ResourceHost rh : peerManager.getLocalPeer().getResourceHosts() )
{
sendContainerStates( rh );
}
}
catch ( Exception e )
{
log.error( "Oops error: ", e.getMessage() );
}
}
private void sendContainerStates( ResourceHost rh ) throws HubManagerException
{
log.info( "ResourceHost: id={}, hostname={}, containers={}", rh.getId(), rh.getHostname(),
rh.getContainerHosts().size() );
for ( ContainerHost ch : rh.getContainerHosts() )
{
if ( !Common.MANAGEMENT_HOSTNAME.equals( ch.getContainerName() ) )
{
sendContainerState( ch );
}
}
}
private void sendContainerState( ContainerHost ch ) throws HubManagerException
{
log.info( "- ContainerHost: id={}, name={}, environmentId={}, state={}", ch.getId(), ch.getContainerName(),
ch.getEnvironmentId(), ch.getState() );
ContainerStateDto state = ContainerStateDto.valueOf( ch.getState().name() );
ContainerEventDto dto = new ContainerEventDto( ch.getId(), ch.getEnvironmentId().getId(), state );
if ( !desktopManager.existInCache( ch.getId() ) )
{
try
{
//get information about desktop env and remote desktop server
String deskEnv = desktopManager.getDesktopEnvironmentInfo( ch );
String rDServer = desktopManager.getRDServerInfo( ch );
if ( !deskEnv.isEmpty() && !rDServer.isEmpty() )
{
//add to cache as a desktop container
desktopManager.containerIsDesktop( ch.getId() );
ContainerDesktopInfoDto desktopInfo = new ContainerDesktopInfoDto( ch.getId(), deskEnv, rDServer );
dto.setDesktopInfo( desktopInfo );
try
{
desktopManager.createDesktopUser( ch );
}
catch ( Exception e )
{
log.error( e.getMessage() );
}
}
else
{
//add to cache as not desktop container
desktopManager.containerIsNotDesktop( ch.getId() );
}
}
catch ( CommandException e )
{
log.error( e.getMessage() );
}
}
try
{
desktopManager.copyKeys( ch );
}
catch ( CommandException e )
{
log.error( "Could not copy SSH keys to x2go usr" );
}
RestResult res = doRequest( dto );
log.info( "Response status: {}", res.getStatus() );
}
private RestResult doRequest( ContainerEventDto dto ) throws HubManagerException
{
try
{
String path = String.format( "/rest/v1/containers/%s/events", dto.getContainerId() );
return restClient.post( path, dto );
}
catch ( Exception e )
{
throw new HubManagerException( e );
}
}
}
| #2340: save desktop info cache after sending data to Bazaar
| management/server/core/hub-manager/hub-manager-impl/src/main/java/io/subutai/core/hubmanager/impl/requestor/ContainerEventProcessor.java | #2340: save desktop info cache after sending data to Bazaar |
|
Java | apache-2.0 | d25c4ed71f33725ac2182abb5d36c9d5307a62cd | 0 | wbond/subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion | /**
* @copyright
* ====================================================================
* Licensed to the Subversion Corporation (SVN Corp.) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SVN Corp. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
* @endcopyright
*/
package org.tigris.subversion.javahl;
import org.tigris.subversion.javahl.*;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.ByteArrayOutputStream;
import java.text.ParseException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Map;
import junit.framework.Assert;
/**
* Tests the basic functionality of javahl binding (inspired by the
* tests in subversion/tests/cmdline/basic_tests.py).
*/
public class BasicTests extends SVNTests
{
/**
* Base name of all our tests.
*/
public final static String testName = "basic_test";
public BasicTests()
{
init();
}
public BasicTests(String name)
{
super(name);
init();
}
/**
* Initialize the testBaseName and the testCounter, if this is the
* first test of this class.
*/
private void init()
{
if (!testName.equals(testBaseName))
{
testCounter = 0;
testBaseName = testName;
}
}
/**
* Test LogDate().
* @throws Throwable
*/
public void testLogDate() throws Throwable
{
String goodDate = "2007-10-04T03:00:52.134992Z";
String badDate = "2008-01-14";
LogDate logDate;
try
{
logDate = new LogDate(goodDate);
assertEquals(1191466852134992L, logDate.getTimeMicros());
} catch (ParseException e) {
fail("Failed to parse date " + goodDate);
}
try
{
logDate = new LogDate(badDate);
fail("Failed to throw exception on bad date " + badDate);
} catch (ParseException e) {
}
}
/**
* Test SVNClient.getVersion().
* @throws Throwable
*/
public void testVersion() throws Throwable
{
try
{
Version version = client.getVersion();
String versionString = version.toString();
if (versionString == null || versionString.trim().length() == 0)
{
throw new Exception("Version string empty");
}
}
catch (Exception e)
{
fail("Version should always be available unless the " +
"native libraries failed to initialize: " + e);
}
}
/**
* Tests Subversion path validation.
*/
public void testPathValidation() throws Throwable
{
// Rather than segfaulting, JavaHL considers null an invalid path.
assertFalse("Path validation produced false-positive for null path",
Path.isValid(null));
String path = "valid-path";
assertTrue("Validation check of valid path '" + path +
"' should succeed", Path.isValid(path));
// File names cannot contain control characters.
path = "invalid-\u0001-path";
assertFalse("Validation check of invalid path '" + path +
"' (which contains control characters) should fail",
Path.isValid(path));
}
/**
* Tests Subversion path as URL predicate.
*/
public void testPathIsURL() throws Throwable
{
try
{
Path.isURL(null);
fail("A null path should raise an exception");
}
catch (IllegalArgumentException expected)
{
}
// Subversion "paths" which aren't URLs.
String[] paths = { "/path", "c:\\path" };
for (int i = 0; i < paths.length; i++)
{
assertFalse("'" + paths[i] + "' should not be considered a URL",
Path.isURL(paths[i]));
}
// Subversion "paths" which are URLs.
paths = new String[] { "http://example.com", "svn://example.com",
"svn+ssh://example.com", "file:///src/svn/" };
for (int i = 0; i < paths.length; i++)
{
assertTrue("'" + paths[i] + "' should be considered a URL",
Path.isURL(paths[i]));
}
}
/**
* Tests Mergeinfo and RevisionRange classes.
* @since 1.5
*/
public void testMergeinfoParser() throws Throwable
{
String mergeInfoPropertyValue =
"/trunk:1-300,305,307,400-405\n/branches/branch:308-400";
Mergeinfo info = new Mergeinfo(mergeInfoPropertyValue);
String[] paths = info.getPaths();
assertEquals(2, paths.length);
RevisionRange[] trunkRange = info.getRevisionRange("/trunk");
assertEquals(4, trunkRange.length);
assertEquals("1-300", trunkRange[0].toString());
assertEquals("305", trunkRange[1].toString());
assertEquals("307", trunkRange[2].toString());
assertEquals("400-405", trunkRange[3].toString());
RevisionRange[] branchRange =
info.getRevisionRange("/branches/branch");
assertEquals(1, branchRange.length);
}
/**
* Test the basic SVNClient.status functionality.
* @throws Throwable
*/
public void testBasicStatus() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
// check the status of the working copy
thisTest.checkStatus();
// Test status of non-existent file
File fileC = new File(thisTest.getWorkingCopy() + "/A", "foo.c");
Status s = client.singleStatus(fileToSVNPath(fileC, false), false);
if (s != null)
fail("File foo.c should not return a status.");
}
/**
* Test the "out of date" info from {@link
* org.tigris.subversion.javahl.SVNClient#status()}.
*
* @throws SubversionException
* @throws IOException
*/
public void testOODStatus() throws SubversionException, IOException
{
// build the test setup
OneTest thisTest = new OneTest();
// Make a whole slew of changes to a WC:
//
// (root) r7 - prop change
// iota
// A
// |__mu
// |
// |__B
// | |__lambda
// | |
// | |__E r12 - deleted
// | | |__alpha
// | | |__beta
// | |
// | |__F r9 - prop change
// | |__I r6 - added dir
// |
// |__C r5 - deleted
// |
// |__D
// |__gamma
// |
// |__G
// | |__pi r3 - deleted
// | |__rho r2 - modify text
// | |__tau r4 - modify text
// |
// |__H
// |__chi r10-11 replaced with file
// |__psi r13-14 replaced with dir
// |__omega
// |__nu r8 - added file
File file, dir;
PrintWriter pw;
Status status;
long rev; // Resulting rev from co or update
long expectedRev = 2; // Keeps track of the latest rev committed
// ----- r2: modify file A/D/G/rho --------------------------
file = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("modification to rho");
pw.close();
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[]{thisTest.getWCPath()},
"log msg", true), expectedRev++);
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", rev);
thisTest.getWc().setItemContent("A/D/G/rho",
thisTest.getWc().getItemContent("A/D/G/rho")
+ "modification to rho");
status = client.singleStatus(thisTest.getWCPath() + "/A/D/G/rho",
false);
long rhoCommitDate = status.getLastChangedDate().getTime();
long rhoCommitRev = rev;
String rhoAuthor = status.getLastCommitAuthor();
// ----- r3: delete file A/D/G/pi ---------------------------
client.remove(new String[] {thisTest.getWCPath() + "/A/D/G/pi"}, null,
false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/G/pi", NodeKind.file,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/D/G/pi");
thisTest.getWc().setItemWorkingCopyRevision("A/D/G", rev);
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath() + "/A/D/G",
null, true),
rev);
long GCommitRev = rev;
// ----- r4: modify file A/D/G/tau --------------------------
file = new File(thisTest.getWorkingCopy(), "A/D/G/tau");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("modification to tau");
pw.close();
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/G/tau",NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/tau", rev);
thisTest.getWc().setItemContent("A/D/G/tau",
thisTest.getWc().getItemContent("A/D/G/tau")
+ "modification to tau");
status = client.singleStatus(thisTest.getWCPath() + "/A/D/G/tau",
false);
long tauCommitDate = status.getLastChangedDate().getTime();
long tauCommitRev = rev;
String tauAuthor = status.getLastCommitAuthor();
// ----- r5: delete dir with no children A/C ---------------
client.remove(new String[] {thisTest.getWCPath() + "/A/C"}, null,
false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/C", NodeKind.dir,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/C");
long CCommitRev = rev;
// ----- r6: Add dir A/B/I ----------------------------------
dir = new File(thisTest.getWorkingCopy(), "A/B/I");
dir.mkdir();
client.add(dir.getAbsolutePath(), true);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/I", NodeKind.dir, CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().addItem("A/B/I", null);
status = client.singleStatus(thisTest.getWCPath() + "/A/B/I", false);
long ICommitDate = status.getLastChangedDate().getTime();
long ICommitRev = rev;
String IAuthor = status.getLastCommitAuthor();
// ----- r7: Update then commit prop change on root dir -----
thisTest.getWc().setRevision(rev);
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(), null, true), rev);
thisTest.checkStatus();
client.propertySet(thisTest.getWCPath(), "propname", "propval", false);
thisTest.getWc().setItemPropStatus("", Status.Kind.modified);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(), null,
NodeKind.dir, CommitItemStateFlags.PropMods);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().setItemWorkingCopyRevision("", rev);
thisTest.getWc().setItemPropStatus("", Status.Kind.normal);
// ----- r8: Add a file A/D/H/nu ----------------------------
file = new File(thisTest.getWorkingCopy(), "A/D/H/nu");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the file 'nu'.");
pw.close();
client.add(file.getAbsolutePath(), false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/nu", NodeKind.file,
CommitItemStateFlags.TextMods +
CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().addItem("A/D/H/nu", "This is the file 'nu'.");
status = client.singleStatus(thisTest.getWCPath() + "/A/D/H/nu",
false);
long nuCommitDate = status.getLastChangedDate().getTime();
long nuCommitRev = rev;
String nuAuthor = status.getLastCommitAuthor();
// ----- r9: Prop change on A/B/F ---------------------------
client.propertySet(thisTest.getWCPath() + "/A/B/F", "propname",
"propval", false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/F", NodeKind.dir,
CommitItemStateFlags.PropMods);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().setItemPropStatus("A/B/F", Status.Kind.normal);
thisTest.getWc().setItemWorkingCopyRevision("A/B/F", rev);
status = client.singleStatus(thisTest.getWCPath() + "/A/B/F", false);
long FCommitDate = status.getLastChangedDate().getTime();
long FCommitRev = rev;
String FAuthor = status.getLastCommitAuthor();
// ----- r10-11: Replace file A/D/H/chi with file -----------
client.remove(new String[] {thisTest.getWCPath() + "/A/D/H/chi"},
null, false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/chi", NodeKind.file,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/D/G/pi");
file = new File(thisTest.getWorkingCopy(), "A/D/H/chi");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the replacement file 'chi'.");
pw.close();
client.add(file.getAbsolutePath(), false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/chi", NodeKind.file,
CommitItemStateFlags.TextMods +
CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().addItem("A/D/H/chi",
"This is the replacement file 'chi'.");
status = client.singleStatus(thisTest.getWCPath() + "/A/D/H/chi",
false);
long chiCommitDate = status.getLastChangedDate().getTime();
long chiCommitRev = rev;
String chiAuthor = status.getLastCommitAuthor();
// ----- r12: Delete dir A/B/E with children ----------------
client.remove(new String[] {thisTest.getWCPath() + "/A/B/E"}, null,
false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/E", NodeKind.dir,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/B/E/alpha");
thisTest.getWc().removeItem("A/B/E/beta");
thisTest.getWc().removeItem("A/B/E");
thisTest.getWc().setItemWorkingCopyRevision("A/B", rev);
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath() + "/A/B", null, true),
rev);
Info Binfo = client.info(thisTest.getWCPath() + "/A/B");
long BCommitDate = Binfo.getLastChangedDate().getTime();
long BCommitRev = rev;
long ECommitRev = BCommitRev;
String BAuthor = Binfo.getAuthor();
// ----- r13-14: Replace file A/D/H/psi with dir ------------
client.remove(new String[]{thisTest.getWCPath() + "/A/D/H/psi"}, null,
false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/psi", NodeKind.file,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/D/H/psi");
thisTest.getWc().setRevision(rev);
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(), null, true), rev);
thisTest.getWc().addItem("A/D/H/psi", null);
dir = new File(thisTest.getWorkingCopy(), "A/D/H/psi");
dir.mkdir();
client.add(dir.getAbsolutePath(), true);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/psi", NodeKind.dir,
CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
status = client.singleStatus(thisTest.getWCPath() + "/A/D/H/psi",
false);
long psiCommitDate = status.getLastChangedDate().getTime();
long psiCommitRev = rev;
String psiAuthor = status.getLastCommitAuthor();
// ----- Check status of modfied WC then update it back
// ----- to rev 1 so it's out of date
thisTest.checkStatus();
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(),
Revision.getInstance(1), true),
1);
thisTest.getWc().setRevision(1);
thisTest.getWc().setItemOODInfo("A", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.getWc().setItemOODInfo("A/B", BCommitRev, BAuthor,
BCommitDate, NodeKind.dir);
thisTest.getWc().addItem("A/B/I", null);
thisTest.getWc().setItemOODInfo("A/B/I", ICommitRev, IAuthor,
ICommitDate, NodeKind.dir);
thisTest.getWc().setItemTextStatus("A/B/I", Status.Kind.none);
thisTest.getWc().setItemNodeKind("A/B/I", NodeKind.unknown);
thisTest.getWc().addItem("A/C", null);
thisTest.getWc().setItemReposLastCmtRevision("A/C", CCommitRev);
thisTest.getWc().setItemReposKind("A/C", NodeKind.dir);
thisTest.getWc().addItem("A/B/E", null);
thisTest.getWc().setItemReposLastCmtRevision("A/B/E", ECommitRev);
thisTest.getWc().setItemReposKind("A/B/E", NodeKind.dir);
thisTest.getWc().addItem("A/B/E/alpha", "This is the file 'alpha'.");
thisTest.getWc().addItem("A/B/E/beta", "This is the file 'beta'.");
thisTest.getWc().setItemPropStatus("A/B/F", Status.Kind.none);
thisTest.getWc().setItemOODInfo("A/B/F", FCommitRev, FAuthor,
FCommitDate, NodeKind.dir);
thisTest.getWc().setItemOODInfo("A/D", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.getWc().setItemOODInfo("A/D/G", tauCommitRev, tauAuthor,
tauCommitDate, NodeKind.dir);
thisTest.getWc().addItem("A/D/G/pi", "This is the file 'pi'.");
thisTest.getWc().setItemReposLastCmtRevision("A/D/G/pi", GCommitRev);
thisTest.getWc().setItemReposKind("A/D/G/pi", NodeKind.file);
thisTest.getWc().setItemContent("A/D/G/rho",
"This is the file 'rho'.");
thisTest.getWc().setItemOODInfo("A/D/G/rho", rhoCommitRev, rhoAuthor,
rhoCommitDate, NodeKind.file);
thisTest.getWc().setItemContent("A/D/G/tau",
"This is the file 'tau'.");
thisTest.getWc().setItemOODInfo("A/D/G/tau", tauCommitRev, tauAuthor,
tauCommitDate, NodeKind.file);
thisTest.getWc().setItemOODInfo("A/D/H", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.getWc().setItemWorkingCopyRevision("A/D/H/nu",
Revision.SVN_INVALID_REVNUM);
thisTest.getWc().setItemTextStatus("A/D/H/nu", Status.Kind.none);
thisTest.getWc().setItemNodeKind("A/D/H/nu", NodeKind.unknown);
thisTest.getWc().setItemOODInfo("A/D/H/nu", nuCommitRev, nuAuthor,
nuCommitDate, NodeKind.file);
thisTest.getWc().setItemContent("A/D/H/chi",
"This is the file 'chi'.");
thisTest.getWc().setItemOODInfo("A/D/H/chi", chiCommitRev, chiAuthor,
chiCommitDate, NodeKind.file);
thisTest.getWc().removeItem("A/D/H/psi");
thisTest.getWc().addItem("A/D/H/psi", "This is the file 'psi'.");
// psi was replaced with a directory
thisTest.getWc().setItemOODInfo("A/D/H/psi", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.getWc().setItemPropStatus("", Status.Kind.none);
thisTest.getWc().setItemOODInfo("", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.checkStatus(true);
}
/**
* Test the basic SVNClient.checkout functionality.
* @throws Throwable
*/
public void testBasicCheckout() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
try
{
// obstructed checkout must fail
client.checkout(thisTest.getUrl() + "/A", thisTest.getWCPath(),
null, true);
fail("missing exception");
}
catch (ClientException expected)
{
}
// modify file A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("appended mu text");
muWriter.close();
thisTest.getWc().setItemTextStatus("A/mu", Status.Kind.modified);
// delete A/B/lambda without svn
File lambda = new File(thisTest.getWorkingCopy(), "A/B/lambda");
lambda.delete();
thisTest.getWc().setItemTextStatus("A/B/lambda", Status.Kind.missing);
// remove A/D/G
client.remove(new String[]{thisTest.getWCPath() + "/A/D/G"}, null,
false);
thisTest.getWc().setItemTextStatus("A/D/G", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/pi", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/tau", Status.Kind.deleted);
// check the status of the working copy
thisTest.checkStatus();
// recheckout the working copy
client.checkout(thisTest.getUrl(), thisTest.getWCPath(), null, true);
// deleted file should reapear
thisTest.getWc().setItemTextStatus("A/B/lambda", Status.Kind.normal);
// check the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.commit functionality.
* @throws Throwable
*/
public void testBasicCommit() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
// modify file A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("appended mu text");
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu",
thisTest.getWc().getItemContent("A/mu") + "appended mu text");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
CommitItemStateFlags.TextMods);
// modify file A/D/G/rho
File rho = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
PrintWriter rhoWriter =
new PrintWriter(new FileOutputStream(rho, true));
rhoWriter.print("new appended text for rho");
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
thisTest.getWc().setItemContent("A/D/G/rho",
thisTest.getWc().getItemContent("A/D/G/rho")
+ "new appended text for rho");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho",NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
2);
// check the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic property setting/getting functionality.
* @throws Throwable
*/
public void testBasicProperties() throws Throwable
{
OneTest thisTest = new OneTest();
WC wc = thisTest.getWc();
// Check getting properties the non-callback way
String itemPath = fileToSVNPath(new File(thisTest.getWCPath(),
"iota"),
false);
client.propertySet(itemPath, "abc", "def", false);
PropertyData[] properties = client.properties(itemPath);
PropertyData prop = properties[0];
assertEquals("abc", prop.getName());
assertEquals("def", prop.getValue());
wc.setItemPropStatus("iota", Status.Kind.modified);
thisTest.checkStatus();
// Check getting properties the callback way
itemPath = fileToSVNPath(new File(thisTest.getWCPath(),
"/A/B/E/alpha"),
false);
client.propertyCreate(itemPath, "cqcq", "qrz", false, false);
ProplistCallbackImpl callback = new ProplistCallbackImpl();
client.properties(itemPath, null, null, Depth.empty, null, callback);
Map propMap = callback.getProperties(itemPath);
Iterator it = propMap.keySet().iterator();
while (it.hasNext())
{
String key = (String) it.next();
assertEquals("cqcq", key);
assertEquals("qrz", (String) propMap.get(key));
}
wc.setItemPropStatus("A/B/E/alpha", Status.Kind.modified);
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.update functionality.
* @throws Throwable
*/
public void testBasicUpdate() throws Throwable
{
// build the test setup. Used for the changes
OneTest thisTest = new OneTest();
// build the backup test setup. That is the one that will be updated
OneTest backupTest = thisTest.copy(".backup");
// modify A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("appended mu text");
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu",
thisTest.getWc().getItemContent("A/mu") + "appended mu text");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
CommitItemStateFlags.TextMods);
// modify A/D/G/rho
File rho = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
PrintWriter rhoWriter =
new PrintWriter(new FileOutputStream(rho, true));
rhoWriter.print("new appended text for rho");
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
thisTest.getWc().setItemContent("A/D/G/rho",
thisTest.getWc().getItemContent("A/D/G/rho")
+ "new appended text for rho");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho",NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
2);
// check the status of the working copy
thisTest.checkStatus();
// update the backup test
assertEquals("wrong revision number from update",
client.update(backupTest.getWCPath(), null, true),
2);
// set the expected working copy layout for the backup test
backupTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
backupTest.getWc().setItemContent("A/mu",
backupTest.getWc().getItemContent("A/mu") + "appended mu text");
backupTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
backupTest.getWc().setItemContent("A/D/G/rho",
backupTest.getWc().getItemContent("A/D/G/rho")
+ "new appended text for rho");
// check the status of the working copy of the backup test
backupTest.checkStatus();
}
/**
* Test basic SVNClient.mkdir with URL parameter functionality.
* @throws Throwable
*/
public void testBasicMkdirUrl() throws Throwable
{
// build the test setup.
OneTest thisTest = new OneTest();
// create Y and Y/Z directories in the repository
addExpectedCommitItem(null, thisTest.getUrl(), "Y", NodeKind.none,
CommitItemStateFlags.Add);
addExpectedCommitItem(null, thisTest.getUrl(), "Y/Z", NodeKind.none,
CommitItemStateFlags.Add);
client.mkdir(new String[]{thisTest.getUrl() + "/Y",
thisTest.getUrl() + "/Y/Z"}, "log_msg");
// add the new directories the expected working copy layout
thisTest.getWc().addItem("Y", null);
thisTest.getWc().setItemWorkingCopyRevision("Y", 2);
thisTest.getWc().addItem("Y/Z", null);
thisTest.getWc().setItemWorkingCopyRevision("Y/Z", 2);
// update the working copy
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(), null, true),
2);
// check the status of the working copy
thisTest.checkStatus();
}
/**
* Test the {@link SVNClientInterface.copy()} API.
* @since 1.5
*/
public void testCopy()
throws SubversionException, IOException
{
OneTest thisTest = new OneTest();
WC wc = thisTest.getWc();
final Revision firstRevision = Revision.getInstance(1);
final Revision pegRevision = null; // Defaults to Revision.HEAD.
// Copy files from A/B/E to A/B/F.
String[] srcPaths = { "alpha", "beta" };
CopySource[] sources = new CopySource[srcPaths.length];
for (int i = 0; i < srcPaths.length; i++)
{
String fileName = srcPaths[i];
sources[i] =
new CopySource(new File(thisTest.getWorkingCopy(),
"A/B/E/" + fileName).getPath(),
firstRevision, pegRevision);
wc.addItem("A/B/F/" + fileName,
wc.getItemContent("A/B/E/" + fileName));
wc.setItemWorkingCopyRevision("A/B/F/" + fileName, 2);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/F/" + fileName, NodeKind.file,
CommitItemStateFlags.Add |
CommitItemStateFlags.IsCopy);
}
client.copy(sources,
new File(thisTest.getWorkingCopy(), "A/B/F").getPath(),
null, true, false, null);
// Commit the changes, and check the state of the WC.
assertEquals("Unexpected WC revision number after commit",
client.commit(new String[] { thisTest.getWCPath() },
"Copy files", true),
2);
thisTest.checkStatus();
assertExpectedSuggestion(thisTest.getUrl() + "/A/B/E/alpha", "A/B/F/alpha", thisTest);
// Now test a WC to URL copy
CopySource wcSource[] = new CopySource[1];
wcSource[0] = new CopySource(new File(thisTest.getWorkingCopy(),
"A/B").getPath(), Revision.WORKING, Revision.WORKING);
client.commitMessageHandler(null);
client.copy(wcSource,
thisTest.getUrl() + "/parent/A/B",
"Copy WC to URL", true, true, null);
// update the WC to get new folder and confirm the copy
assertEquals("wrong revision number from update",
client.update(thisTest.getWCPath(), null, true),
3);
}
/**
* Test the {@link SVNClientInterface.move()} API.
* @since 1.5
*/
public void testMove()
throws SubversionException, IOException
{
OneTest thisTest = new OneTest();
WC wc = thisTest.getWc();
// Move files from A/B/E to A/B/F.
String[] srcPaths = { "alpha", "beta" };
for (int i = 0; i < srcPaths.length; i++)
{
String fileName = srcPaths[i];
srcPaths[i] = new File(thisTest.getWorkingCopy(),
"A/B/E/" + fileName).getPath();
wc.addItem("A/B/F/" + fileName,
wc.getItemContent("A/B/E/" + fileName));
wc.setItemWorkingCopyRevision("A/B/F/" + fileName, 2);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/F/" + fileName, NodeKind.file,
CommitItemStateFlags.Add |
CommitItemStateFlags.IsCopy);
wc.removeItem("A/B/E/" + fileName);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/E/" + fileName, NodeKind.file,
CommitItemStateFlags.Delete);
}
client.move(srcPaths,
new File(thisTest.getWorkingCopy(), "A/B/F").getPath(),
null, false, true, false, null);
// Commit the changes, and check the state of the WC.
assertEquals("Unexpected WC revision number after commit",
client.commit(new String[] { thisTest.getWCPath() },
"Move files", true), 2);
thisTest.checkStatus();
assertExpectedSuggestion(thisTest.getUrl() + "/A/B/E/alpha", "A/B/F/alpha", thisTest);
}
/**
* Assert that the first merge source suggested for
* <code>destPath</code> at {@link Revision#WORKING} and {@link
* Revision#HEAD} is equivalent to <code>expectedSrc</code>.
* @exception SubversionException If retrieval of the copy source fails.
* @since 1.5
*/
private void assertExpectedSuggestion(String expectedSrc,
String destPath, OneTest thisTest)
throws SubversionException
{
String wcPath = fileToSVNPath(new File(thisTest.getWCPath(),
destPath), false);
String[] suggestions = client.suggestMergeSources(wcPath,
Revision.WORKING);
assertNotNull(suggestions);
assertTrue(suggestions.length >= 1);
assertTrue("Unexpected copy source path, expected " +
expectedSrc + ", got " + suggestions[0],
expectedSrc.equals(suggestions[0]));
// Same test using URL
String url = thisTest.getUrl() + "/" + destPath;
suggestions = client.suggestMergeSources(url, Revision.HEAD);
assertNotNull(suggestions);
assertTrue(suggestions.length >= 1);
assertTrue("Unexpected copy source path, expected " +
expectedSrc + ", got " + suggestions[0],
expectedSrc.equals(suggestions[0]));
}
/**
* Tests that the passed start and end revision are contained
* within the array of revisions.
* @since 1.5
*/
private void assertExpectedMergeRange(long start, long end,
long[] revisions)
{
Arrays.sort(revisions);
for (int i = 0; i < revisions.length; i++) {
if (revisions[i] <= start) {
for (int j = i; j < revisions.length; j++)
{
if (end <= revisions[j])
return;
}
fail("End revision: " + end + " was not in range: " + revisions[0] +
" : " + revisions[revisions.length - 1]);
return;
}
}
fail("Start revision: " + start + " was not in range: " + revisions[0] +
" : " + revisions[revisions.length - 1]);
}
/**
* Test the basic SVNClient.update functionality with concurrent
* changes in the repository and the working copy.
* @throws Throwable
*/
public void testBasicMergingUpdate() throws Throwable
{
// build the first working copy
OneTest thisTest = new OneTest();
// append 10 lines to A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
String muContent = thisTest.getWc().getItemContent("A/mu");
for (int i = 2; i < 11; i++)
{
muWriter.print("\nThis is line " + i + " in mu");
muContent = muContent + "\nThis is line " + i + " in mu";
}
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu", muContent);
addExpectedCommitItem(thisTest.getWorkingCopy().getAbsolutePath(),
thisTest.getUrl(), "A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
// append 10 line to A/D/G/rho
File rho = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
PrintWriter rhoWriter =
new PrintWriter(new FileOutputStream(rho, true));
String rhoContent = thisTest.getWc().getItemContent("A/D/G/rho");
for (int i = 2; i < 11; i++)
{
rhoWriter.print("\nThis is line " + i + " in rho");
rhoContent = rhoContent + "\nThis is line " + i + " in rho";
}
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
thisTest.getWc().setItemContent("A/D/G/rho", rhoContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
2);
// check the status of the first working copy
thisTest.checkStatus();
// create a backup copy of the working copy
OneTest backupTest = thisTest.copy(".backup");
// change the last line of A/mu in the first working copy
muWriter = new PrintWriter(new FileOutputStream(mu, true));
muContent = thisTest.getWc().getItemContent("A/mu");
muWriter.print(" Appended to line 10 of mu");
muContent = muContent + " Appended to line 10 of mu";
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 3);
thisTest.getWc().setItemContent("A/mu", muContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
// change the last line of A/mu in the first working copy
rhoWriter = new PrintWriter(new FileOutputStream(rho, true));
rhoContent = thisTest.getWc().getItemContent("A/D/G/rho");
rhoWriter.print(" Appended to line 10 of rho");
rhoContent = rhoContent + " Appended to line 10 of rho";
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 3);
thisTest.getWc().setItemContent("A/D/G/rho", rhoContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
// commit these changes to the repository
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
3);
// check the status of the first working copy
thisTest.checkStatus();
// modify the first line of A/mu in the backup working copy
mu = new File(backupTest.getWorkingCopy(), "A/mu");
muWriter = new PrintWriter(new FileOutputStream(mu));
muWriter.print("This is the new line 1 in the backup copy of mu");
muContent = "This is the new line 1 in the backup copy of mu";
for (int i = 2; i < 11; i++)
{
muWriter.print("\nThis is line " + i + " in mu");
muContent = muContent + "\nThis is line " + i + " in mu";
}
muWriter.close();
backupTest.getWc().setItemWorkingCopyRevision("A/mu", 3);
muContent = muContent + " Appended to line 10 of mu";
backupTest.getWc().setItemContent("A/mu", muContent);
backupTest.getWc().setItemTextStatus("A/mu", Status.Kind.modified);
// modify the first line of A/D/G/rho in the backup working copy
rho = new File(backupTest.getWorkingCopy(), "A/D/G/rho");
rhoWriter = new PrintWriter(new FileOutputStream(rho));
rhoWriter.print("This is the new line 1 in the backup copy of rho");
rhoContent = "This is the new line 1 in the backup copy of rho";
for (int i = 2; i < 11; i++)
{
rhoWriter.print("\nThis is line " + i + " in rho");
rhoContent = rhoContent + "\nThis is line " + i + " in rho";
}
rhoWriter.close();
backupTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 3);
rhoContent = rhoContent + " Appended to line 10 of rho";
backupTest.getWc().setItemContent("A/D/G/rho", rhoContent);
backupTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.modified);
// update the backup working copy
assertEquals("wrong revision number from update",
client.update(backupTest.getWCPath(), null, true),
3);
// check the status of the backup working copy
backupTest.checkStatus();
}
/**
* Test the basic SVNClient.update functionality with concurrent
* changes in the repository and the working copy that generate
* conflicts.
* @throws Throwable
*/
public void testBasicConflict() throws Throwable
{
// build the first working copy
OneTest thisTest = new OneTest();
// copy the first working copy to the backup working copy
OneTest backupTest = thisTest.copy(".backup");
// append a line to A/mu in the first working copy
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
String muContent = thisTest.getWc().getItemContent("A/mu");
muWriter.print("\nOriginal appended text for mu");
muContent = muContent + "\nOriginal appended text for mu";
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu", muContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
// append a line to A/D/G/rho in the first working copy
File rho = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
PrintWriter rhoWriter =
new PrintWriter(new FileOutputStream(rho, true));
String rhoContent = thisTest.getWc().getItemContent("A/D/G/rho");
rhoWriter.print("\nOriginal appended text for rho");
rhoContent = rhoContent + "\nOriginal appended text for rho";
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
thisTest.getWc().setItemContent("A/D/G/rho", rhoContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes in the first working copy
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
2);
// test the status of the working copy after the commit
thisTest.checkStatus();
// append a different line to A/mu in the backup working copy
mu = new File(backupTest.getWorkingCopy(), "A/mu");
muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("\nConflicting appended text for mu");
muContent = "<<<<<<< .mine\nThis is the file 'mu'.\n"+
"Conflicting appended text for mu=======\n"+
"This is the file 'mu'.\n"+
"Original appended text for mu>>>>>>> .r2";
muWriter.close();
backupTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
backupTest.getWc().setItemContent("A/mu", muContent);
backupTest.getWc().setItemTextStatus("A/mu", Status.Kind.conflicted);
backupTest.getWc().addItem("A/mu.r1", "");
backupTest.getWc().setItemNodeKind("A/mu.r1", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/mu.r1",
Status.Kind.unversioned);
backupTest.getWc().addItem("A/mu.r2", "");
backupTest.getWc().setItemNodeKind("A/mu.r2", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/mu.r2",
Status.Kind.unversioned);
backupTest.getWc().addItem("A/mu.mine", "");
backupTest.getWc().setItemNodeKind("A/mu.mine", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/mu.mine",
Status.Kind.unversioned);
// append a different line to A/D/G/rho in the backup working copy
rho = new File(backupTest.getWorkingCopy(), "A/D/G/rho");
rhoWriter = new PrintWriter(new FileOutputStream(rho, true));
rhoWriter.print("\nConflicting appended text for rho");
rhoContent = "<<<<<<< .mine\nThis is the file 'rho'.\n"+
"Conflicting appended text for rho=======\n"+
"his is the file 'rho'.\n"+
"Original appended text for rho>>>>>>> .r2";
rhoWriter.close();
backupTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
backupTest.getWc().setItemContent("A/D/G/rho", rhoContent);
backupTest.getWc().setItemTextStatus("A/D/G/rho",
Status.Kind.conflicted);
backupTest.getWc().addItem("A/D/G/rho.r1", "");
backupTest.getWc().setItemNodeKind("A/D/G/rho.r1", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/D/G/rho.r1",
Status.Kind.unversioned);
backupTest.getWc().addItem("A/D/G/rho.r2", "");
backupTest.getWc().setItemNodeKind("A/D/G/rho.r2", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/D/G/rho.r2",
Status.Kind.unversioned);
backupTest.getWc().addItem("A/D/G/rho.mine", "");
backupTest.getWc().setItemNodeKind("A/D/G/rho.mine", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/D/G/rho.mine",
Status.Kind.unversioned);
// update the backup working copy from the repository
assertEquals("wrong revision number from update",
client.update(backupTest.getWCPath(), null, true),
2);
// check the status of the backup working copy
backupTest.checkStatus();
// flag A/mu as resolved
client.resolved(backupTest.getWCPath()+"/A/mu", false);
backupTest.getWc().setItemTextStatus("A/mu", Status.Kind.modified);
backupTest.getWc().removeItem("A/mu.r1");
backupTest.getWc().removeItem("A/mu.r2");
backupTest.getWc().removeItem("A/mu.mine");
// flag A/D/G/rho as resolved
client.resolved(backupTest.getWCPath()+"/A/D/G/rho", false);
backupTest.getWc().setItemTextStatus("A/D/G/rho",
Status.Kind.modified);
backupTest.getWc().removeItem("A/D/G/rho.r1");
backupTest.getWc().removeItem("A/D/G/rho.r2");
backupTest.getWc().removeItem("A/D/G/rho.mine");
// check the status after the conflicts are flaged as resolved
backupTest.checkStatus();
}
/**
* Test the basic SVNClient.cleanup functionality.
* @throws Throwable
*/
public void testBasicCleanup() throws Throwable
{
// create a test working copy
OneTest thisTest = new OneTest();
// create a lock file in A/B
File adminLock = new File(thisTest.getWorkingCopy(),"A/B/" +
getAdminDirectoryName() + "/lock");
PrintWriter pw = new PrintWriter(new FileOutputStream(adminLock));
pw.print("stop looking!");
pw.close();
thisTest.getWc().setItemIsLocked("A/B", true);
// create a lock file in A/D/G
adminLock = new File(thisTest.getWorkingCopy(),"A/D/G/" +
getAdminDirectoryName() + "/lock");
pw = new PrintWriter(new FileOutputStream(adminLock));
pw.print("stop looking!");
pw.close();
thisTest.getWc().setItemIsLocked("A/D/G", true);
// create a lock file in A/C
adminLock = new File(thisTest.getWorkingCopy(),"A/C/" +
getAdminDirectoryName() + "/lock");
pw = new PrintWriter(new FileOutputStream(adminLock));
pw.print("stop looking!");
pw.close();
thisTest.getWc().setItemIsLocked("A/C", true);
// test the status of the working copy
thisTest.checkStatus();
// run cleanup
client.cleanup(thisTest.getWCPath());
thisTest.getWc().setItemIsLocked("A/B", false);
thisTest.getWc().setItemIsLocked("A/D/G", false);
thisTest.getWc().setItemIsLocked("A/C", false);
// test the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.revert functionality.
* @throws Throwable
*/
public void testBasicRevert() throws Throwable
{
// create a test working copy
OneTest thisTest = new OneTest();
// modify A/B/E/beta
File file = new File(thisTest.getWorkingCopy(), "A/B/E/beta");
PrintWriter pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("Added some text to 'beta'.");
pw.close();
thisTest.getWc().setItemTextStatus("A/B/E/beta", Status.Kind.modified);
// modify iota
file = new File(thisTest.getWorkingCopy(), "iota");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("Added some text to 'iota'.");
pw.close();
thisTest.getWc().setItemTextStatus("iota", Status.Kind.modified);
// modify A/D/G/rho
file = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("Added some text to 'rho'.");
pw.close();
thisTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.modified);
// create new file A/D/H/zeta and add it to subversion
file = new File(thisTest.getWorkingCopy(), "A/D/H/zeta");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("Added some text to 'zeta'.");
pw.close();
thisTest.getWc().addItem("A/D/H/zeta", "Added some text to 'zeta'.");
thisTest.getWc().setItemTextStatus("A/D/H/zeta", Status.Kind.added);
client.add(file.getAbsolutePath(), false);
// test the status of the working copy
thisTest.checkStatus();
// revert the changes
client.revert(thisTest.getWCPath()+"/A/B/E/beta", false);
thisTest.getWc().setItemTextStatus("A/B/E/beta", Status.Kind.normal);
client.revert(thisTest.getWCPath()+"/iota", false);
thisTest.getWc().setItemTextStatus("iota", Status.Kind.normal);
client.revert(thisTest.getWCPath()+"/A/D/G/rho", false);
thisTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.normal);
client.revert(thisTest.getWCPath()+"/A/D/H/zeta", false);
thisTest.getWc().setItemTextStatus("A/D/H/zeta",
Status.Kind.unversioned);
thisTest.getWc().setItemNodeKind("A/D/H/zeta", NodeKind.unknown);
// test the status of the working copy
thisTest.checkStatus();
// delete A/B/E/beta and revert the change
file = new File(thisTest.getWorkingCopy(), "A/B/E/beta");
file.delete();
client.revert(file.getAbsolutePath(), false);
// resurected file should not be readonly
assertTrue("reverted file is not readonly",
file.canWrite()&& file.canRead());
// test the status of the working copy
thisTest.checkStatus();
// create & add the directory X
client.mkdir(new String[] {thisTest.getWCPath()+"/X"}, null);
thisTest.getWc().addItem("X", null);
thisTest.getWc().setItemTextStatus("X", Status.Kind.added);
// test the status of the working copy
thisTest.checkStatus();
// remove & revert X
removeDirOrFile(new File(thisTest.getWorkingCopy(), "X"));
client.revert(thisTest.getWCPath()+"/X", false);
thisTest.getWc().removeItem("X");
// test the status of the working copy
thisTest.checkStatus();
// delete the directory A/B/E
client.remove(new String[] {thisTest.getWCPath()+"/A/B/E"}, null, true);
removeDirOrFile(new File(thisTest.getWorkingCopy(), "A/B/E"));
thisTest.getWc().setItemTextStatus("A/B/E", Status.Kind.deleted);
thisTest.getWc().removeItem("A/B/E/alpha");
thisTest.getWc().removeItem("A/B/E/beta");
// test the status of the working copy
thisTest.checkStatus();
// revert A/B/E -> this will not resurect it
client.revert(thisTest.getWCPath()+"/A/B/E", true);
// test the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.switch functionality.
* @throws Throwable
*/
public void testBasicSwitch() throws Throwable
{
// create the test working copy
OneTest thisTest = new OneTest();
// switch iota to A/D/gamma
String iotaPath = thisTest.getWCPath() + "/iota";
String gammaUrl = thisTest.getUrl() + "/A/D/gamma";
thisTest.getWc().setItemContent("iota",
greekWC.getItemContent("A/D/gamma"));
thisTest.getWc().setItemIsSwitched("iota", true);
client.doSwitch(iotaPath, gammaUrl, null, true);
// check the status of the working copy
thisTest.checkStatus();
// switch A/D/H to /A/D/G
String adhPath = thisTest.getWCPath() + "/A/D/H";
String adgURL = thisTest.getUrl() + "/A/D/G";
thisTest.getWc().setItemIsSwitched("A/D/H",true);
thisTest.getWc().removeItem("A/D/H/chi");
thisTest.getWc().removeItem("A/D/H/omega");
thisTest.getWc().removeItem("A/D/H/psi");
thisTest.getWc().addItem("A/D/H/pi",
thisTest.getWc().getItemContent("A/D/G/pi"));
thisTest.getWc().addItem("A/D/H/rho",
thisTest.getWc().getItemContent("A/D/G/rho"));
thisTest.getWc().addItem("A/D/H/tau",
thisTest.getWc().getItemContent("A/D/G/tau"));
client.doSwitch(adhPath, adgURL, null, true);
// check the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.remove functionality.
* @throws Throwable
*/
public void testBasicDelete() throws Throwable
{
// create the test working copy
OneTest thisTest = new OneTest();
// modify A/D/H/chi
File file = new File(thisTest.getWorkingCopy(), "A/D/H/chi");
PrintWriter pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("added to chi");
pw.close();
thisTest.getWc().setItemTextStatus("A/D/H/chi", Status.Kind.modified);
// set a property on A/D/G/rho file
client.propertySet(thisTest.getWCPath()+"/A/D/G/rho", "abc", "def",
true);
thisTest.getWc().setItemPropStatus("A/D/G/rho", Status.Kind.modified);
// set a property on A/B/F directory
client.propertySet(thisTest.getWCPath()+"/A/B/F", "abc", "def", false);
thisTest.getWc().setItemPropStatus("A/B/F", Status.Kind.modified);
// create a unversioned A/C/sigma file
file = new File(thisTest.getWCPath(),"A/C/sigma");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("unversioned sigma");
pw.close();
thisTest.getWc().addItem("A/C/sigma", "unversioned sigma");
thisTest.getWc().setItemTextStatus("A/C/sigma", Status.Kind.unversioned);
thisTest.getWc().setItemNodeKind("A/C/sigma", NodeKind.unknown);
// create unversioned directory A/C/Q
file = new File(thisTest.getWCPath(), "A/C/Q");
file.mkdir();
thisTest.getWc().addItem("A/C/Q", null);
thisTest.getWc().setItemNodeKind("A/C/Q", NodeKind.unknown);
thisTest.getWc().setItemTextStatus("A/C/Q", Status.Kind.unversioned);
// create & add the directory A/B/X
file = new File(thisTest.getWCPath(), "A/B/X");
client.mkdir(new String[] {file.getAbsolutePath()}, null);
thisTest.getWc().addItem("A/B/X", null);
thisTest.getWc().setItemTextStatus("A/B/X", Status.Kind.added);
// create & add the file A/B/X/xi
file = new File(file, "xi");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("added xi");
pw.close();
client.add(file.getAbsolutePath(), false);
thisTest.getWc().addItem("A/B/X/xi", "added xi");
thisTest.getWc().setItemTextStatus("A/B/X/xi", Status.Kind.added);
// create & add the directory A/B/Y
file = new File(thisTest.getWCPath(), "A/B/Y");
client.mkdir(new String[] {file.getAbsolutePath()}, null);
thisTest.getWc().addItem("A/B/Y", null);
thisTest.getWc().setItemTextStatus("A/B/Y", Status.Kind.added);
// test the status of the working copy
thisTest.checkStatus();
// the following removes should all fail without force
try
{
// remove of A/D/H/chi without force should fail, because it is
// modified
client.remove(new String[] {thisTest.getWCPath()+"/A/D/H/chi"},
null, false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/D/H without force should fail, because A/D/H/chi is
// modified
client.remove(new String[] {thisTest.getWCPath()+"/A/D/H"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/D/G/rho without force should fail, because it has
// a new property
client.remove(new String[] {thisTest.getWCPath()+"/A/D/G/rho"},
null, false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/D/G without force should fail, because A/D/G/rho has
// a new property
client.remove(new String[] {thisTest.getWCPath()+"/A/D/G"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/B/F without force should fail, because it has
// a new property
client.remove(new String[] {thisTest.getWCPath()+"/A/B/F"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/B without force should fail, because A/B/F has
// a new property
client.remove(new String[] {thisTest.getWCPath()+"/A/B"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/C/sigma without force should fail, because it is
// unversioned
client.remove(new String[] {thisTest.getWCPath()+"/A/C/sigma"},
null, false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/C without force should fail, because A/C/sigma is
// unversioned
client.remove(new String[] {thisTest.getWCPath()+"/A/C"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/B/X without force should fail, because it is new
client.remove(new String[] {thisTest.getWCPath()+"/A/B/X"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
// check the status of the working copy
thisTest.checkStatus();
// the following removes should all work
client.remove(new String[] {thisTest.getWCPath()+"/A/B/E"}, null,
false);
thisTest.getWc().setItemTextStatus("A/B/E",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/B/E/alpha",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/B/E/beta",Status.Kind.deleted);
client.remove(new String[] {thisTest.getWCPath()+"/A/D/H"}, null, true);
thisTest.getWc().setItemTextStatus("A/D/H",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/chi",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/omega",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/psi",Status.Kind.deleted);
client.remove(new String[] {thisTest.getWCPath()+"/A/D/G"}, null, true);
thisTest.getWc().setItemTextStatus("A/D/G",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/rho",Status.Kind.deleted);
thisTest.getWc().setItemPropStatus("A/D/G/rho", Status.Kind.none);
thisTest.getWc().setItemTextStatus("A/D/G/pi",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/tau",Status.Kind.deleted);
client.remove(new String[] {thisTest.getWCPath()+"/A/B/F"}, null, true);
thisTest.getWc().setItemTextStatus("A/B/F",Status.Kind.deleted);
thisTest.getWc().setItemPropStatus("A/B/F", Status.Kind.none);
client.remove(new String[] {thisTest.getWCPath()+"/A/C"}, null, true);
thisTest.getWc().setItemTextStatus("A/C",Status.Kind.deleted);
client.remove(new String[] {thisTest.getWCPath()+"/A/B/X"}, null, true);
file = new File(thisTest.getWorkingCopy(), "iota");
file.delete();
client.remove(new String[] {file.getAbsolutePath()}, null, true);
thisTest.getWc().setItemTextStatus("iota",Status.Kind.deleted);
file = new File(thisTest.getWorkingCopy(), "A/D/gamma");
file.delete();
client.remove(new String[] {file.getAbsolutePath()}, null, false);
thisTest.getWc().setItemTextStatus("A/D/gamma",Status.Kind.deleted);
client.remove(new String[] {file.getAbsolutePath()}, null, true);
client.remove(new String[] {thisTest.getWCPath()+"/A/B/E"}, null,
false);
thisTest.getWc().removeItem("A/B/X");
thisTest.getWc().removeItem("A/B/X/xi");
thisTest.getWc().removeItem("A/C/sigma");
thisTest.getWc().removeItem("A/C/Q");
thisTest.checkStatus();
client.remove(new String[] {thisTest.getWCPath()+"/A/D"},null, true);
thisTest.getWc().setItemTextStatus("A/D", Status.Kind.deleted);
thisTest.getWc().removeItem("A/D/Y");
// check the status of the working copy
thisTest.checkStatus();
// confirm that the file are realy deleted
assertFalse("failed to remove text modified file",
new File(thisTest.getWorkingCopy(), "A/D/G/rho").exists());
assertFalse("failed to remove prop modified file",
new File(thisTest.getWorkingCopy(), "A/D/H/chi").exists());
assertFalse("failed to remove unversioned file",
new File(thisTest.getWorkingCopy(), "A/C/sigma").exists());
assertFalse("failed to remove unmodified file",
new File(thisTest.getWorkingCopy(), "A/B/E/alpha").exists());
file = new File(thisTest.getWorkingCopy(),"A/B/F");
assertTrue("removed versioned dir", file.exists()
&& file.isDirectory());
assertFalse("failed to remove unversioned dir",
new File(thisTest.getWorkingCopy(), "A/C/Q").exists());
assertFalse("failed to remove added dir",
new File(thisTest.getWorkingCopy(), "A/B/X").exists());
// delete unversioned file foo
file = new File(thisTest.getWCPath(),"foo");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("unversioned foo");
pw.close();
client.remove(new String[] {file.getAbsolutePath()}, null, true);
assertFalse("failed to remove unversioned file foo", file.exists());
try
{
// delete non-existant file foo
client.remove(new String[] {file.getAbsolutePath()}, null, true);
fail("missing exception");
}
catch(ClientException expected)
{
}
// delete file iota in the repository
addExpectedCommitItem(null, thisTest.getUrl(), "iota", NodeKind.none,
CommitItemStateFlags.Delete);
client.remove(new String[] {thisTest.getUrl()+"/iota"},
"delete iota URL", false);
}
public void testBasicCheckoutDeleted() throws Throwable
{
// create working copy
OneTest thisTest = new OneTest();
// delete A/D and its content
client.remove(new String[] {thisTest.getWCPath()+"/A/D"}, null, true);
thisTest.getWc().setItemTextStatus("A/D", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/pi", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/tau", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/chi", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/psi", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/omega", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/gamma", Status.Kind.deleted);
// check the working copy status
thisTest.checkStatus();
// commit the change
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D", NodeKind.dir,
CommitItemStateFlags.Delete);
assertEquals("wrong revision from commit",
client.commit(new String[]{thisTest.getWCPath()}, "log message",
true),2);
thisTest.getWc().removeItem("A/D");
thisTest.getWc().removeItem("A/D/G");
thisTest.getWc().removeItem("A/D/G/rho");
thisTest.getWc().removeItem("A/D/G/pi");
thisTest.getWc().removeItem("A/D/G/tau");
thisTest.getWc().removeItem("A/D/H");
thisTest.getWc().removeItem("A/D/H/chi");
thisTest.getWc().removeItem("A/D/H/psi");
thisTest.getWc().removeItem("A/D/H/omega");
thisTest.getWc().removeItem("A/D/gamma");
// check the working copy status
thisTest.checkStatus();
// check out the previous revision
client.checkout(thisTest.getUrl()+"/A/D", thisTest.getWCPath()+"/new_D",
new Revision.Number(1), true);
}
/**
* Test if Subversion will detect the change of a file to a
* direcory.
* @throws Throwable
*/
public void testBasicNodeKindChange() throws Throwable
{
// create working copy
OneTest thisTest = new OneTest();
// remove A/D/gamma
client.remove(new String[] {thisTest.getWCPath()+"/A/D/gamma"}, null,
false);
thisTest.getWc().setItemTextStatus("A/D/gamma", Status.Kind.deleted);
// check the working copy status
thisTest.checkStatus();
try
{
// creating a directory in the place of the deleted file should
// fail
client.mkdir(new String[] {thisTest.getWCPath()+"/A/D/gamma"},
null);
fail("can change node kind");
}
catch(ClientException e)
{
}
// check the working copy status
thisTest.checkStatus();
// commit the deletion
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/gamma", NodeKind.file,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},"log message",
true), 2);
thisTest.getWc().removeItem("A/D/gamma");
// check the working copy status
thisTest.checkStatus();
try
{
// creating a directory in the place of the deleted file should
// still fail
client.mkdir(
new String[] {thisTest.getWCPath()+"/A/D/gamma"}, null);
fail("can change node kind");
}
catch(ClientException e)
{
}
// check the working copy status
thisTest.checkStatus();
// update the working copy
client.update(thisTest.getWCPath(), null, true);
// check the working copy status
thisTest.checkStatus();
// now creating the directory should succeed
client.mkdir(new String[] {thisTest.getWCPath()+"/A/D/gamma"}, null);
thisTest.getWc().addItem("A/D/gamma", null);
thisTest.getWc().setItemTextStatus("A/D/gamma", Status.Kind.added);
// check the working copy status
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.import functionality.
* @throws Throwable
*/
public void testBasicImport() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// create new_file
File file = new File(thisTest.getWCPath(),"new_file");
PrintWriter pw = new PrintWriter(new FileOutputStream(file));
pw.print("some text");
pw.close();
// import new_file info dirA/dirB/newFile
addExpectedCommitItem(thisTest.getWCPath(),
null, "new_file", NodeKind.none, CommitItemStateFlags.Add);
client.doImport(file.getAbsolutePath(),
thisTest.getUrl()+"/dirA/dirB/new_file",
"log message for new import", true);
// delete new_file
file.delete();
// update the working
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(), null, true),2);
thisTest.getWc().addItem("dirA", null);
thisTest.getWc().setItemWorkingCopyRevision("dirA",2);
thisTest.getWc().addItem("dirA/dirB", null);
thisTest.getWc().setItemWorkingCopyRevision("dirA/dirB",2);
thisTest.getWc().addItem("dirA/dirB/new_file", "some text");
thisTest.getWc().setItemWorkingCopyRevision("dirA/dirB/new_file",2);
// test the working copy status
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.fileContent functionality.
* @throws Throwable
*/
public void testBasicCat() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// modify A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter pw = new PrintWriter(new FileOutputStream(mu, true));
pw.print("some text");
pw.close();
// get the content from the repository
byte[] content = client.fileContent(thisTest.getWCPath()+"/A/mu", null);
byte[] testContent = thisTest.getWc().getItemContent("A/mu").getBytes();
// the content should be the same
assertTrue("content changed", Arrays.equals(content, testContent));
}
/**
* Test the basic SVNClient.fileContent functionality.
* @throws Throwable
*/
public void testBasicCatStream() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// modify A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter pw = new PrintWriter(new FileOutputStream(mu, true));
pw.print("some text");
pw.close();
// get the content from the repository
ByteArrayOutputStream baos = new ByteArrayOutputStream();
client.streamFileContent(thisTest.getWCPath() + "/A/mu", null, null,
100, baos);
byte[] content = baos.toByteArray();
byte[] testContent = thisTest.getWc().getItemContent("A/mu").getBytes();
// the content should be the same
assertTrue("content changed", Arrays.equals(content, testContent));
}
/**
* Test the basic SVNClient.list functionality.
* @throws Throwable
*/
public void testBasicLs() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// list the repository root dir
DirEntry[] entries = client.list(thisTest.getWCPath(), null, false);
thisTest.getWc().check(entries, "", false);
// list directory A
entries = client.list(thisTest.getWCPath() + "/A", null, false);
thisTest.getWc().check(entries, "A", false);
// list directory A in BASE revision
entries = client.list(thisTest.getWCPath() + "/A", Revision.BASE,
false);
thisTest.getWc().check(entries, "A", false);
// list file A/mu
entries = client.list(thisTest.getWCPath() + "/A/mu", null, false);
thisTest.getWc().check(entries, "A/mu");
}
/**
* Test the basis SVNClient.add functionality with files that
* should be ignored.
* @throws Throwable
*/
public void testBasicAddIgnores() throws Throwable
{
// create working copy
OneTest thisTest = new OneTest();
// create dir
File dir = new File(thisTest.getWorkingCopy(), "dir");
dir.mkdir();
// create dir/foo.c
File fileC = new File(dir, "foo.c");
new FileOutputStream(fileC).close();
// create dir/foo.o (should be ignored)
File fileO = new File(dir, "foo.o");
new FileOutputStream(fileO).close();
// add dir
client.add(dir.getAbsolutePath(), true);
thisTest.getWc().addItem("dir", null);
thisTest.getWc().setItemTextStatus("dir",Status.Kind.added);
thisTest.getWc().addItem("dir/foo.c", "");
thisTest.getWc().setItemTextStatus("dir/foo.c",Status.Kind.added);
thisTest.getWc().addItem("dir/foo.o", "");
thisTest.getWc().setItemTextStatus("dir/foo.o",Status.Kind.ignored);
thisTest.getWc().setItemNodeKind("dir/foo.o", NodeKind.unknown);
// test the working copy status
thisTest.checkStatus();
}
/**
* Test the basis SVNClient.import functionality with files that
* should be ignored.
* @throws Throwable
*/
public void testBasicImportIgnores() throws Throwable
{
// create working copy
OneTest thisTest = new OneTest();
// create dir
File dir = new File(thisTest.getWorkingCopy(), "dir");
dir.mkdir();
// create dir/foo.c
File fileC = new File(dir, "foo.c");
new FileOutputStream(fileC).close();
// create dir/foo.o (should be ignored)
File fileO = new File(dir, "foo.o");
new FileOutputStream(fileO).close();
// import dir
addExpectedCommitItem(thisTest.getWCPath(),
null, "dir", NodeKind.none, CommitItemStateFlags.Add);
client.doImport(dir.getAbsolutePath(), thisTest.getUrl()+"/dir",
"log message for import", true);
// remove dir
removeDirOrFile(dir);
// udpate the working copy
assertEquals("wrong revision from update", 2,
client.update(thisTest.getWCPath(), null, true));
thisTest.getWc().addItem("dir", null);
thisTest.getWc().addItem("dir/foo.c", "");
// test the working copy status
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.info functionality.
* @throws Throwable
*/
public void testBasicInfo() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// get the item information and test it
Info info = client.info(thisTest.getWCPath()+"/A/mu");
assertEquals("wrong revision from info", 1,
info.getLastChangedRevision());
assertEquals("wrong schedule kind from info", ScheduleKind.normal,
info.getSchedule());
assertEquals("wrong node kind from info", NodeKind.file,
info.getNodeKind());
}
/**
* Test the basic SVNClient.logMessages functionality.
* @throws Throwable
*/
public void testBasicLogMessage() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// get the commit message of the initial import and test it
LogMessage lm[] = client.logMessages(thisTest.getWCPath(), null,
null, false, true);
assertEquals("wrong number of objects", 1, lm.length);
assertEquals("wrong message", "Log Message", lm[0].getMessage());
assertEquals("wrong revision", 1, lm[0].getRevisionNumber());
assertEquals("wrong user", "jrandom", lm[0].getAuthor());
assertNotNull("changed paths set", lm[0].getChangedPaths());
ChangePath cp[] = lm[0].getChangedPaths();
assertEquals("wrong number of chang pathes", 20, cp.length);
assertEquals("wrong path", "/A", cp[0].getPath());
assertEquals("wrong copy source rev", -1, cp[0].getCopySrcRevision());
assertNull("wrong copy source path", cp[0].getCopySrcPath());
assertEquals("wrong action", 'A', cp[0].getAction());
assertEquals("wrong time with getTimeMicros()",
lm[0].getTimeMicros()/1000,
lm[0].getDate().getTime());
assertEquals("wrong time with getTimeMillis()",
lm[0].getTimeMillis(),
lm[0].getDate().getTime());
assertEquals("wrong date with getTimeMicros()",
lm[0].getDate(),
new java.util.Date(lm[0].getTimeMicros()/1000));
assertEquals("wrong date with getTimeMillis()",
lm[0].getDate(),
new java.util.Date(lm[0].getTimeMillis()));
}
/**
* Test the basic SVNClient.getVersionInfo functionality.
* @throws Throwable
* @since 1.2
*/
public void testBasicVersionInfo() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
assertEquals("wrong version info",
"1",
client.getVersionInfo(thisTest.getWCPath(), null, false));
}
/**
* Test the basic SVNClient locking functionality.
* @throws Throwable
* @since 1.2
*/
public void testBasicLocking() throws Throwable
{
// build the first working copy
OneTest thisTest = new OneTest();
client.propertySet(thisTest.getWCPath()+"/A/mu",
PropertyData.NEEDS_LOCK, "*", false);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
CommitItemStateFlags.PropMods);
assertEquals("bad revision number on commit", 2,
client.commit(new String[] {thisTest.getWCPath()},
"message", true));
File f = new File(thisTest.getWCPath()+"/A/mu");
assertEquals("file should be read only now", false, f.canWrite());
client.lock(new String[] {thisTest.getWCPath()+"/A/mu"},
"comment", false);
assertEquals("file should be read write now", true, f.canWrite());
client.unlock(new String[]{thisTest.getWCPath()+"/A/mu"}, false);
assertEquals("file should be read only now", false, f.canWrite());
client.lock(new String[]{thisTest.getWCPath()+"/A/mu"},
"comment", false);
assertEquals("file should be read write now", true, f.canWrite());
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
0);
assertEquals("rev number from commit", -1,
client.commit(new String[]{thisTest.getWCPath()},
"message", true));
assertEquals("file should be read write now", true, f.canWrite());
try
{
// Attempt to lock an invalid path
client.lock(new String[]{thisTest.getWCPath()+"/A/mu2"}, "comment",
false);
fail("missing exception");
}
catch (ClientException expected)
{
}
}
/**
* Test the basic SVNClient.info2 functionality.
* @throws Throwable
* @since 1.2
*/
public void testBasicInfo2() throws Throwable
{
// build the first working copy
OneTest thisTest = new OneTest();
final String failureMsg = "Incorrect number of info objects";
Info2[] infos = client.info2(thisTest.getWCPath(), null, null, false);
assertEquals(failureMsg, 1, infos.length);
infos = client.info2(thisTest.getWCPath(), null, null, true);
assertEquals(failureMsg, 21, infos.length);
for (int i = 0; i < infos.length; i++)
{
Info2 info = infos[i];
assertNull("Unexpected changelist present",
info.getChangelistName());
boolean isFile = info.getKind() == NodeKind.file;
assertTrue("Unexpected working file size " + info.getWorkingSize()
+ " for '" + info + '\'',
(isFile ? info.getWorkingSize() > -1 :
info.getWorkingSize() == -1));
// We shouldn't know the repository file size when only
// examining the WC.
assertEquals("Unexpected repos file size for '" + info + '\'',
-1, info.getReposSize());
// Examine depth
assertEquals(Depth.infinity, info.getDepth());
}
// Create wc with a depth of Depth.empty
String secondWC = thisTest.getWCPath() + ".empty";
removeDirOrFile(new File(secondWC));
client.checkout(thisTest.getUrl(), secondWC, null, null, Depth.empty,
false, true);
infos = client.info2(secondWC, null, null, false);
// Examine that depth is Depth.empty
assertEquals(Depth.empty, infos[0].getDepth());
}
/**
* Test basic changelist functionality.
* @throws Throwable
* @since 1.5
*/
public void testBasicChangelist() throws Throwable
{
// build the working copy
OneTest thisTest = new OneTest();
String changelistName = "changelist1";
String[] changelists = new String[] { changelistName };
MyChangelistCallback clCallback = new MyChangelistCallback();
String[] paths = new String[]
{thisTest.getWCPath() + "/iota"};
// Add a path to a changelist, and check to see if it got added
client.addToChangelist(paths, changelistName, Depth.infinity, null);
String[] cl = new String[1];
client.getChangelists(thisTest.getWCPath(), changelists,
Depth.infinity, clCallback);
cl[0] = (String) clCallback.get(paths[0]).get(0);
assertTrue(java.util.Arrays.equals(cl, changelists));
// Does status report this changelist?
Status[] status = client.status(paths[0], false, false, false, false,
false);
assertEquals(status[0].getChangelist(), changelistName);
// Remove the path from the changelist, and check to see if the path is
// actually removed.
client.removeFromChangelists(paths, Depth.infinity, changelists);
clCallback.clear();
client.getChangelists(thisTest.getWCPath(), changelists,
Depth.infinity, clCallback);
assertTrue(clCallback.isEmpty());
}
/**
* Helper method for testing mergeinfo retrieval. Assumes
* that <code>targetPath</code> has both merge history and
* available merges.
* @param expectedMergedStart The expected start revision from the
* merge history for <code>mergeSrc</code>.
* @param expectedMergedEnd The expected end revision from the
* merge history for <code>mergeSrc</code>.
* @param expectedAvailableStart The expected start available revision
* from the merge history for <code>mergeSrc</code>. Zero if no need
* to test the available range.
* @param expectedAvailableEnd The expected end available revision
* from the merge history for <code>mergeSrc</code>.
* @param targetPath The path for which to acquire mergeinfo.
* @param mergeSrc The URL from which to consider merges.
*/
private void acquireMergeinfoAndAssertEquals(long expectedMergeStart,
long expectedMergeEnd,
long expectedAvailableStart,
long expectedAvailableEnd,
String targetPath,
String mergeSrc)
throws SubversionException
{
// Verify expected merge history.
Mergeinfo mergeInfo = client.getMergeinfo(targetPath, Revision.HEAD);
assertNotNull("Missing merge info on '" + targetPath + '\'',
mergeInfo);
List ranges = mergeInfo.getRevisions(mergeSrc);
assertTrue("Missing merge info for source '" + mergeSrc + "' on '" +
targetPath + '\'', ranges != null && !ranges.isEmpty());
RevisionRange range = (RevisionRange) ranges.get(0);
String expectedMergedRevs = expectedMergeStart + "-" + expectedMergeEnd;
assertEquals("Unexpected first merged revision range for '" +
mergeSrc + "' on '" + targetPath + '\'',
expectedMergedRevs, range.toString());
// Verify expected available merges.
if (expectedAvailableStart > 0)
{
long[] availableRevs =
getMergeinfoRevisions(MergeinfoLogKind.eligible, targetPath,
Revision.HEAD, mergeSrc,
Revision.HEAD);
assertNotNull("Missing eligible merge info on '"+targetPath + '\'',
availableRevs);
assertExpectedMergeRange(expectedAvailableStart,
expectedAvailableEnd, availableRevs);
}
}
/**
* Calls the API to get mergeinfo revisions and returns
* the revision numbers in a sorted array, or null if there
* are no revisions to return.
* @since 1.5
*/
private long[] getMergeinfoRevisions(int kind, String pathOrUrl,
Revision pegRevision,
String mergeSourceUrl,
Revision srcPegRevision) {
class Callback implements LogMessageCallback {
List revList = new ArrayList();
public void singleMessage(ChangePath[] changedPaths, long revision,
Map revprops, boolean hasChildren) {
revList.add(new Long(revision));
}
public long[] getRevisions() {
long[] revisions = new long[revList.size()];
int i = 0;
for (Iterator iter = revList.iterator(); iter.hasNext();) {
Long revision = (Long) iter.next();
revisions[i] = revision.longValue();
i++;
}
return revisions;
}
}
try {
Callback callback = new Callback();
client.getMergeinfoLog(kind, pathOrUrl, pegRevision, mergeSourceUrl,
srcPegRevision, false, null, callback);
return callback.getRevisions();
} catch (ClientException e) {
return null;
}
}
/**
* Append the text <code>toAppend</code> to the WC file at
* <code>path</code>, and update the expected WC state
* accordingly.
*
* @param thisTest The test whose expected WC to tweak.
* @param path The working copy-relative path to change.
* @param toAppend The text to append to <code>path</code>.
* @param rev The expected revision number for thisTest's WC.
* @return The file created during the setup.
* @since 1.5
*/
private File appendText(OneTest thisTest, String path, String toAppend,
int rev)
throws FileNotFoundException
{
File f = new File(thisTest.getWorkingCopy(), path);
PrintWriter writer = new PrintWriter(new FileOutputStream(f, true));
writer.print(toAppend);
writer.close();
if (rev > 0)
{
WC wc = thisTest.getWc();
wc.setItemWorkingCopyRevision(path, rev);
wc.setItemContent(path, wc.getItemContent(path) + toAppend);
}
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(), path,
NodeKind.file, CommitItemStateFlags.TextMods);
return f;
}
/**
* Test the basic functionality of SVNClient.merge().
* @throws Throwable
* @since 1.2
*/
public void testBasicMerge() throws Throwable
{
OneTest thisTest = setupAndPerformMerge();
// Verify that there are now potential merge sources.
String[] suggestedSrcs =
client.suggestMergeSources(thisTest.getWCPath() + "/branches/A",
Revision.WORKING);
assertNotNull(suggestedSrcs);
assertEquals(1, suggestedSrcs.length);
// Test that getMergeinfo() returns null.
assertNull(client.getMergeinfo(new File(thisTest.getWCPath(), "A")
.toString(), Revision.HEAD));
// Merge and commit some changes (r4).
appendText(thisTest, "A/mu", "xxx", 4);
appendText(thisTest, "A/D/G/rho", "yyy", 4);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
4);
// Add a "begin merge" notification handler.
final Revision[] actualRange = new Revision[2];
Notify2 notify = new Notify2()
{
public void onNotify(NotifyInformation info)
{
if (info.getAction() == NotifyAction.merge_begin)
{
RevisionRange r = info.getMergeRange();
actualRange[0] = r.getFromRevision();
actualRange[1] = r.getToRevision();
}
}
};
client.notification2(notify);
// merge changes in A to branches/A
String branchPath = thisTest.getWCPath() + "/branches/A";
String modUrl = thisTest.getUrl() + "/A";
// test --dry-run
client.merge(modUrl, new Revision.Number(2), modUrl, Revision.HEAD,
branchPath, false, true, false, true);
assertEquals("Notification of beginning of merge reported incorrect " +
"start revision", new Revision.Number(2), actualRange[0]);
assertEquals("Notification of beginning of merge reported incorrect " +
"end revision", new Revision.Number(4), actualRange[1]);
// now do the real merge
client.merge(modUrl, new Revision.Number(2), modUrl, Revision.HEAD,
branchPath, false, true, false, false);
assertEquals("Notification of beginning of merge reported incorrect " +
"start revision", new Revision.Number(2), actualRange[0]);
assertEquals("Notification of beginning of merge reported incorrect " +
"end revision", new Revision.Number(4), actualRange[1]);
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A", NodeKind.dir,
CommitItemStateFlags.PropMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 5);
// Merge and commit some more changes (r6).
appendText(thisTest, "A/mu", "xxxr6", 6);
appendText(thisTest, "A/D/G/rho", "yyyr6", 6);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
6);
// Test retrieval of mergeinfo from a WC path.
String targetPath =
new File(thisTest.getWCPath(), "branches/A/mu").getPath();
final String mergeSrc = thisTest.getUrl() + "/A/mu";
acquireMergeinfoAndAssertEquals(2, 4, 6, 6, targetPath, mergeSrc);
// Test retrieval of mergeinfo from the repository.
targetPath = thisTest.getUrl() + "/branches/A/mu";
acquireMergeinfoAndAssertEquals(2, 4, 6, 6, targetPath, mergeSrc);
}
/**
* Test merge with automatic source and revision determination
* (e.g. 'svn merge -g').
* @throws Throwable
* @since 1.5
*/
public void testMergeUsingHistory() throws Throwable
{
OneTest thisTest = setupAndPerformMerge();
// Test that getMergeinfo() returns null.
assertNull(client.getMergeinfo(new File(thisTest.getWCPath(), "A")
.toString(), Revision.HEAD));
// Merge and commit some changes (r4).
appendText(thisTest, "A/mu", "xxx", 4);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
4);
String branchPath = thisTest.getWCPath() + "/branches/A";
String modUrl = thisTest.getUrl() + "/A";
Revision unspec = new Revision(RevisionKind.unspecified);
client.merge(modUrl, Revision.HEAD,
new RevisionRange[] { new RevisionRange(unspec, unspec) },
branchPath, true, Depth.infinity, false, false, false);
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A", NodeKind.dir,
CommitItemStateFlags.PropMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 5);
}
/**
* Test reintegrating a branch with trunk
* (e.g. 'svn merge --reintegrate').
* @throws Throwable
* @since 1.5
*/
public void testMergeReintegrate() throws Throwable
{
OneTest thisTest = setupAndPerformMerge();
// Test that getMergeinfo() returns null.
assertNull(client.getMergeinfo(new File(thisTest.getWCPath(), "A")
.toString(), Revision.HEAD));
// Merge and commit some changes to main (r4).
appendText(thisTest, "A/mu", "xxx", 4);
assertEquals("wrong revision number from main commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
4);
// Merge and commit some changes to branch (r5).
appendText(thisTest, "branches/A/D/G/rho", "yyy", -1);
assertEquals("wrong revision number from branch commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
5);
// update the branch WC (to r5) before merge
client.update(thisTest.getWCPath() + "/branches", Revision.HEAD, true);
String branchPath = thisTest.getWCPath() + "/branches/A";
String modUrl = thisTest.getUrl() + "/A";
Revision unspec = new Revision(RevisionKind.unspecified);
client.merge(modUrl, Revision.HEAD,
new RevisionRange[] { new RevisionRange(unspec, unspec) },
branchPath, true, Depth.infinity, false, false, false);
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A", NodeKind.dir,
CommitItemStateFlags.PropMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 6);
// now we --reintegrate the branch with main
String branchUrl = thisTest.getUrl() + "/branches/A";
try
{
client.mergeReintegrate(branchUrl, Revision.HEAD,
thisTest.getWCPath() + "/A", false);
fail("reintegrate merged into a mixed-revision WC");
}
catch(ClientException e)
{
// update the WC (to r6) and try again
client.update(thisTest.getWCPath(), Revision.HEAD, true);
client.mergeReintegrate(branchUrl, Revision.HEAD,
thisTest.getWCPath() + "/A", false);
}
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A", NodeKind.dir,
CommitItemStateFlags.PropMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 7);
}
/**
* Test automatic merge conflict resolution.
* @throws Throwable
* @since 1.5
*/
public void testMergeConflictResolution() throws Throwable
{
// Add a conflict resolution callback which always chooses the
// user's version of a conflicted file.
client.setConflictResolver(new ConflictResolverCallback()
{
public ConflictResult resolve(ConflictDescriptor descrip)
{
return new ConflictResult(ConflictResult.chooseTheirsFull,
null);
}
});
OneTest thisTest = new OneTest();
String originalContents = thisTest.getWc().getItemContent("A/mu");
String expectedContents = originalContents + "xxx";
// Merge and commit a change (r2).
File mu = appendText(thisTest, "A/mu", "xxx", 2);
assertEquals("wrong revision number from commit", 2,
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true));
// Backdate the WC to the previous revision (r1).
client.update(thisTest.getWCPath(), Revision.getInstance(1), true);
// Prep for a merge conflict by changing A/mu in a different
// way.
mu = appendText(thisTest, "A/mu", "yyy", 1);
// Merge in the previous changes to A/mu (from r2).
RevisionRange[] ranges = new RevisionRange[1];
ranges[0] = new RevisionRange(new Revision.Number(1),
new Revision.Number(2));
client.merge(thisTest.getUrl(), Revision.HEAD, ranges,
thisTest.getWCPath(), false, Depth.infinity, false,
false, false);
assertFileContentsEquals("Unexpected conflict resolution",
expectedContents, mu);
}
/**
* Test merge --record-only
* @throws Throwable
* @since 1.5
*/
public void testRecordOnlyMerge() throws Throwable
{
OneTest thisTest = setupAndPerformMerge();
// Verify that there are now potential merge sources.
String[] suggestedSrcs =
client.suggestMergeSources(thisTest.getWCPath() + "/branches/A",
Revision.WORKING);
assertNotNull(suggestedSrcs);
assertEquals(1, suggestedSrcs.length);
// Test that getMergeinfo() returns null.
assertNull(client.getMergeinfo(new File(thisTest.getWCPath(), "A")
.toString(), Revision.HEAD));
// Merge and commit some changes (r4).
appendText(thisTest, "A/mu", "xxx", 4);
appendText(thisTest, "A/D/G/rho", "yyy", 4);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
4);
// --record-only merge changes in A to branches/A
String branchPath = thisTest.getWCPath() + "/branches/A";
String modUrl = thisTest.getUrl() + "/A";
RevisionRange[] ranges = new RevisionRange[1];
ranges[0] = new RevisionRange(new Revision.Number(2),
new Revision.Number(4));
client.merge(modUrl, Revision.HEAD, ranges,
branchPath, true, Depth.infinity, false, false, true);
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A", NodeKind.dir,
CommitItemStateFlags.PropMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 5);
// Test retrieval of mergeinfo from a WC path.
String targetPath =
new File(thisTest.getWCPath(), "branches/A").getPath();
final String mergeSrc = thisTest.getUrl() + "/A";
acquireMergeinfoAndAssertEquals(2, 4, 0, 0, targetPath, mergeSrc);
}
/**
* Setup a test with a WC. In the repository, create a
* "/branches" directory, with a branch of "/A" underneath it.
* Update the WC to reflect these modifications.
* @return This test.
*/
private OneTest setupAndPerformMerge()
throws Exception
{
OneTest thisTest = new OneTest();
// Verify that there are initially no potential merge sources.
String[] suggestedSrcs =
client.suggestMergeSources(thisTest.getWCPath(),
Revision.WORKING);
assertNotNull(suggestedSrcs);
assertEquals(0, suggestedSrcs.length);
// create branches directory in the repository (r2)
addExpectedCommitItem(null, thisTest.getUrl(), "branches",
NodeKind.none, CommitItemStateFlags.Add);
client.mkdir(new String[]{thisTest.getUrl() + "/branches"}, "log_msg");
// copy A to branches (r3)
addExpectedCommitItem(null, thisTest.getUrl(), "branches/A",
NodeKind.none, CommitItemStateFlags.Add);
client.copy(thisTest.getUrl() + "/A", thisTest.getUrl() +
"/branches/A", "create A branch", Revision.HEAD);
// update the WC (to r3) so that it has the branches folder
client.update(thisTest.getWCPath(), Revision.HEAD, true);
return thisTest;
}
/**
* Test the {@link SVNClientInterface.diff()} APIs.
* @since 1.5
*/
public void testDiff()
throws SubversionException, IOException
{
OneTest thisTest = new OneTest(true);
File diffOutput = new File(super.localTmp, thisTest.testName);
final String NL = System.getProperty("line.separator");
final String sepLine =
"===================================================================" + NL;
final String underSepLine =
"___________________________________________________________________" + NL;
final String expectedDiffBody =
"@@ -1 +1 @@" + NL +
"-This is the file 'iota'." + NL +
"\\ No newline at end of file" + NL +
"+This is the file 'mu'." + NL +
"\\ No newline at end of file" + NL;
final String iotaPath = thisTest.getWCPath().replace('\\', '/') + "/iota";
final String wcPath = fileToSVNPath(new File(thisTest.getWCPath()),
false);
// make edits to iota
PrintWriter writer = new PrintWriter(new FileOutputStream(iotaPath));
writer.print("This is the file 'mu'.");
writer.flush();
writer.close();
/*
* This test does tests with and without svn:eol-style set to native
* We will first run all of the tests where this does not matter so
* that they are not run twice.
*/
// Two-path diff of URLs.
String expectedDiffOutput = "Index: iota" + NL + sepLine +
"--- iota\t(.../iota)\t(revision 1)" + NL +
"+++ iota\t(.../A/mu)\t(revision 1)" + NL +
expectedDiffBody;
client.diff(thisTest.getUrl() + "/iota", Revision.HEAD,
thisTest.getUrl() + "/A/mu", Revision.HEAD,
diffOutput.getPath(), false, true, true, false);
assertFileContentsEquals("Unexpected diff output in file '" +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
// Test relativeToDir fails with urls. */
try
{
client.diff(thisTest.getUrl() + "/iota", Revision.HEAD,
thisTest.getUrl() + "/A/mu", Revision.HEAD,
thisTest.getUrl(), diffOutput.getPath(),
Depth.infinity, null, true, true, false);
fail("This test should fail becaus the relativeToDir parameter " +
"does not work with URLs");
}
catch (Exception ignored)
{
}
/* Testing the expected failure when relativeToDir is not a parent
path of the target. */
try
{
client.diff(iotaPath, Revision.BASE, iotaPath, Revision.WORKING,
"/non/existent/path", diffOutput.getPath(),
Depth.infinity, null, true, true, false);
fail("This test should fail because iotaPath is not a child of " +
"the relativeToDir parameter");
}
catch (Exception ignored)
{
}
// Test diff with a relative path on a directory with prop
// changes.
String aPath = fileToSVNPath(new File(thisTest.getWCPath() + "/A"),
false);
expectedDiffOutput = NL + "Property changes on: A" + NL +
underSepLine +
"Added: testprop" + NL +
"## -0,0 +1 ##" + NL +
"+Test property value." + NL;
client.propertySet(aPath, "testprop", "Test property value.", false);
client.diff(aPath, Revision.BASE, aPath, Revision.WORKING, wcPath,
diffOutput.getPath(), Depth.infinity, null, true, true,
false);
assertFileContentsEquals("Unexpected diff output in file '" +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
// Test diff where relativeToDir and path are the same.
expectedDiffOutput = NL + "Property changes on: ." + NL +
underSepLine +
"Added: testprop" + NL +
"## -0,0 +1 ##" + NL +
"+Test property value." + NL;
client.propertySet(aPath, "testprop", "Test property value.", false);
client.diff(aPath, Revision.BASE, aPath, Revision.WORKING, aPath,
diffOutput.getPath(), Depth.infinity, null, true, true,
false);
assertFileContentsEquals("Unexpected diff output in file '" +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
/*
* The rest of these tests are run twice. The first time
* without svn:eol-style set and the second time with the
* property set to native. This is tracked by the int named
* operativeRevision. It will have a value = 2 after the
* commit which sets the property
*/
for (int operativeRevision = 1; operativeRevision < 3; operativeRevision++)
{
String revisionPrefix = "While processing operativeRevison=" + operativeRevision + ". ";
String assertPrefix = revisionPrefix + "Unexpected diff output in file '";
// Undo previous edits to working copy
client.revert(wcPath, true);
if (operativeRevision == 2) {
// Set svn:eol-style=native on iota
client.propertyCreate(iotaPath, "svn:eol-style", "native", false);
String[] paths = new String[] {iotaPath};
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "iota",NodeKind.file,
CommitItemStateFlags.PropMods);
client.commit(paths, "Set svn:eol-style to native", false);
}
// make edits to iota and set expected output.
writer = new PrintWriter(new FileOutputStream(iotaPath));
writer.print("This is the file 'mu'.");
writer.flush();
writer.close();
expectedDiffOutput = "Index: " + iotaPath + NL + sepLine +
"--- " + iotaPath + "\t(revision " + operativeRevision + ")" + NL +
"+++ " + iotaPath + "\t(working copy)" + NL +
expectedDiffBody;
try
{
// Two-path diff of WC paths.
client.diff(iotaPath, Revision.BASE,
iotaPath, Revision.WORKING,
diffOutput.getPath(), false, true, true, false);
assertFileContentsEquals(assertPrefix +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
diffOutput.delete();
}
catch (ClientException e)
{
fail(revisionPrefix + e.getMessage());
}
try
{
// Peg revision diff of a single file.
client.diff(thisTest.getUrl() + "/iota", Revision.HEAD,
new Revision.Number(operativeRevision), Revision.HEAD,
diffOutput.getPath(), false, true, true, false);
assertFileContentsEquals(assertPrefix +
diffOutput.getPath() + '\'',
"", diffOutput);
diffOutput.delete();
}
catch (ClientException e)
{
fail(revisionPrefix + e.getMessage());
}
// Test svn diff with a relative path.
expectedDiffOutput = "Index: iota" + NL + sepLine +
"--- iota\t(revision " + operativeRevision + ")" + NL +
"+++ iota\t(working copy)" + NL +
expectedDiffBody;
try
{
client.diff(iotaPath, Revision.BASE, iotaPath,
Revision.WORKING, wcPath, diffOutput.getPath(),
Depth.infinity, null, true, true, false);
assertFileContentsEquals(assertPrefix +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
diffOutput.delete();
}
catch (ClientException e)
{
fail(revisionPrefix + e.getMessage());
}
try
{
// Test svn diff with a relative path and trailing slash.
client.diff(iotaPath, Revision.BASE, iotaPath,
Revision.WORKING, wcPath + "/",
diffOutput.getPath(), Depth.infinity, null,
true, true, false);
assertFileContentsEquals(assertPrefix +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
diffOutput.delete();
}
catch (ClientException e)
{
fail(revisionPrefix + e.getMessage());
}
}
}
private void assertFileContentsEquals(String msg, String expected,
File actual)
throws IOException
{
FileReader reader = new FileReader(actual);
StringBuffer buf = new StringBuffer();
int ch;
while ((ch = reader.read()) != -1)
{
buf.append((char) ch);
}
assertEquals(msg, expected, buf.toString());
}
/**
* Test the {@link SVNClientInterface.diffSummarize()} API.
* @since 1.5
*/
public void testDiffSummarize()
throws SubversionException, IOException
{
OneTest thisTest = new OneTest(false);
DiffSummaries summaries = new DiffSummaries();
// Perform a recursive diff summary, ignoring ancestry.
client.diffSummarize(thisTest.getUrl(), new Revision.Number(0),
thisTest.getUrl(), Revision.HEAD, Depth.infinity,
null, false, summaries);
assertExpectedDiffSummaries(summaries);
summaries.clear();
// Perform a recursive diff summary with a peg revision,
// ignoring ancestry.
client.diffSummarize(thisTest.getUrl(), Revision.HEAD,
new Revision.Number(0), Revision.HEAD,
Depth.infinity, null, false, summaries);
assertExpectedDiffSummaries(summaries);
}
private void assertExpectedDiffSummaries(DiffSummaries summaries)
{
assertEquals("Wrong number of diff summary descriptors", 20,
summaries.size());
// Rigorously inspect one of our DiffSummary notifications.
final String BETA_PATH = "A/B/E/beta";
DiffSummary betaDiff = (DiffSummary) summaries.get(BETA_PATH);
assertNotNull("No diff summary for " + BETA_PATH, betaDiff);
assertEquals("Incorrect path for " + BETA_PATH, BETA_PATH,
betaDiff.getPath());
assertTrue("Incorrect diff kind for " + BETA_PATH,
DiffSummary.DiffKind.ADDED.equals(betaDiff.getDiffKind()));
assertEquals("Incorrect props changed notice for " + BETA_PATH,
false, betaDiff.propsChanged());
assertEquals("Incorrect node kind for " + BETA_PATH, 1,
betaDiff.getNodeKind());
}
/**
* test the basic SVNClient.isAdminDirectory functionality
* @throws Throwable
* @since 1.2
*/
public void testBasicIsAdminDirectory() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
Notify2 notify = new Notify2()
{
public void onNotify(NotifyInformation info)
{
client.isAdminDirectory(".svn");
}
};
client.notification2(notify);
// update the test
assertEquals("wrong revision number from update",
client.update(thisTest.getWCPath(), null, true), 1);
}
public void testBasicCancelOperation() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
Notify2 notify = new Notify2()
{
public void onNotify(NotifyInformation info)
{
try
{
client.cancelOperation();
}
catch (ClientException e)
{
fail(e.getMessage());
}
}
};
client.notification2(notify);
// update the test to try to cancel an operation
try
{
client.update(thisTest.getWCPath(), null, true);
fail("missing exception for canceled operation");
}
catch (ClientException e)
{
// this is expected
}
}
public void testDataTransferProgressReport() throws Throwable
{
// ### FIXME: This isn't working over ra_local, because
// ### ra_local is not invoking the progress callback.
if (SVNTests.rootUrl.startsWith("file://"))
return;
// build the test setup
OneTest thisTest = new OneTest();
ProgressListener listener = new ProgressListener()
{
public void onProgress(ProgressEvent event)
{
// TODO: Examine the byte counts from "event".
throw new RuntimeException("Progress reported as expected");
}
};
client.setProgressListener(listener);
// Perform an update to exercise the progress notification.
try
{
client.update(thisTest.getWCPath(), null, true);
fail("No progress reported");
}
catch (RuntimeException progressReported)
{
}
}
/**
* Test the basic tree conflict functionality.
* @throws Throwable
*/
public void testTreeConflict() throws Throwable
{
// build the test setup. Used for the changes
OneTest thisTest = new OneTest();
WC wc = thisTest.getWc();
// build the backup test setup. That is the one that will be updated
OneTest tcTest = thisTest.copy(".tree-conflict");
// Move files from A/B/E to A/B/F.
String[] srcPaths = { "alpha" };
for (int i = 0; i < srcPaths.length; i++)
{
String fileName = srcPaths[i];
srcPaths[i] = new File(thisTest.getWorkingCopy(),
"A/B/E/" + fileName).getPath();
wc.addItem("A/B/F/" + fileName,
wc.getItemContent("A/B/E/" + fileName));
wc.setItemWorkingCopyRevision("A/B/F/" + fileName, 2);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/F/" + fileName, NodeKind.file,
CommitItemStateFlags.Add |
CommitItemStateFlags.IsCopy);
wc.removeItem("A/B/E/" + fileName);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/E/" + fileName, NodeKind.file,
CommitItemStateFlags.Delete);
}
client.move(srcPaths,
new File(thisTest.getWorkingCopy(), "A/B/F").getPath(),
null, false, true, false, null);
// Commit the changes, and check the state of the WC.
assertEquals("Unexpected WC revision number after commit",
client.commit(new String[] { thisTest.getWCPath() },
"Move files", true), 2);
thisTest.checkStatus();
// modify A/B/E/alpha in second working copy
File alpha = new File(tcTest.getWorkingCopy(), "A/B/E/alpha");
PrintWriter alphaWriter = new PrintWriter(new FileOutputStream(alpha, true));
alphaWriter.print("appended alpha text");
alphaWriter.close();
// update the tc test
assertEquals("wrong revision number from update",
client.update(tcTest.getWCPath(), null, true),
2);
// set the expected working copy layout for the tc test
tcTest.getWc().addItem("A/B/F/alpha",
tcTest.getWc().getItemContent("A/B/E/alpha"));
tcTest.getWc().setItemWorkingCopyRevision("A/B/F/alpha", 2);
// we expect the tree conflict to turn the existing item into
// a scheduled-add with history. We expect the modifications in
// the local file to have been copied to the new file.
tcTest.getWc().setItemTextStatus("A/B/E/alpha", StatusKind.added);
tcTest.getWc().setItemTextStatus("A/B/F/alpha", StatusKind.modified);
// check the status of the working copy of the tc test
tcTest.checkStatus();
// get the Info2 of the tree conflict
MyInfoCallback callback = new MyInfoCallback();
client.info2(tcTest.getWCPath() + "/A/B/E/alpha", null,
null, Depth.unknown, null, callback);
ConflictDescriptor conflict = callback.getInfo().getConflictDescriptor();
assertNotNull("Conflict should not be null", conflict);
assertEquals(conflict.getSrcLeftVersion().getNodeKind(), NodeKind.file);
assertEquals(conflict.getSrcLeftVersion().getReposURL() + "/" +
conflict.getSrcLeftVersion().getPathInRepos(), tcTest.getUrl() + "/A/B/E/alpha");
assertEquals(conflict.getSrcLeftVersion().getPegRevision(), 1L);
assertEquals(conflict.getSrcRightVersion().getNodeKind(), NodeKind.none);
assertEquals(conflict.getSrcRightVersion().getReposURL(), tcTest.getUrl());
assertEquals(conflict.getSrcRightVersion().getPegRevision(), 2L);
}
/**
* Test tolerance of unversioned obstructions when adding paths with
* {@link org.tigris.subversion.javahl.SVNClient#checkout()},
* {@link org.tigris.subversion.javahl.SVNClient#update()}, and
* {@link org.tigris.subversion.javahl.SVNClient#doSwitch()}
* @throws IOException
* @throws SubversionException
*/
public void testObstructionTolerance()
throws SubversionException, IOException
{
// build the test setup
OneTest thisTest = new OneTest();
File file;
PrintWriter pw;
// ----- TEST CHECKOUT -----
// Use export to make unversioned obstructions for a second
// WC checkout (deleting export target from previous tests
// first if it exists).
String secondWC = thisTest.getWCPath() + ".backup1";
removeDirOrFile(new File(secondWC));
client.doExport(thisTest.getUrl(), secondWC, null, false);
// Make an obstructing file that conflicts with add coming from repos
file = new File(secondWC, "A/B/lambda");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the conflicting obstructiong file 'lambda'.");
pw.close();
// Attempt to checkout backup WC without "--force"...
try
{
// ...should fail
client.checkout(thisTest.getUrl(), secondWC, null, null,
Depth.infinity, false, false);
fail("obstructed checkout should fail by default");
}
catch (ClientException expected)
{
}
// Attempt to checkout backup WC with "--force"
// so obstructions are tolerated
client.checkout(thisTest.getUrl(), secondWC, null, null,
Depth.infinity, false, true);
// Check the WC status, the only status should be a text
// mod to lambda. All the other obstructing files were identical
Status[] secondWCStatus = client.status(secondWC, true, false,
false, false, false);
if (!(secondWCStatus.length == 1 &&
secondWCStatus[0].getPath().endsWith("A/B/lambda") &&
secondWCStatus[0].getTextStatus() == StatusKind.modified &&
secondWCStatus[0].getPropStatus() == StatusKind.none))
{
fail("Unexpected WC status after co with " +
"unversioned obstructions");
}
// Make a third WC to test obstruction tolerance of sw and up.
OneTest backupTest = thisTest.copy(".backup2");
// ----- TEST UPDATE -----
// r2: Add a file A/D/H/nu
file = new File(thisTest.getWorkingCopy(), "A/D/H/nu");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the file 'nu'.");
pw.close();
client.add(file.getAbsolutePath(), false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/nu", NodeKind.file,
CommitItemStateFlags.TextMods +
CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
client.commit(new String[] {thisTest.getWCPath()},
"log msg", true), 2);
thisTest.getWc().addItem("A/D/H/nu", "This is the file 'nu'.");
Status status = client.singleStatus(thisTest.getWCPath() +
"/A/D/H/nu",
false);
// Add an unversioned file A/D/H/nu to the backup WC
file = new File(backupTest.getWorkingCopy(), "A/D/H/nu");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the file 'nu'.");
pw.close();
// Attempt to update backup WC without "--force"
try
{
// obstructed update should fail
client.update(backupTest.getWCPath(), null, true);
fail("obstructed update should fail by default");
}
catch (ClientException expected)
{
}
// Attempt to update backup WC with "--force"
assertEquals("wrong revision from update",
client.update(backupTest.getWCPath(),
null, Depth.infinity, false, false, true),
2);
// ----- TEST SWITCH -----
// Add an unversioned file A/B/E/nu to the backup WC
// The file differs from A/D/H/nu
file = new File(backupTest.getWorkingCopy(), "A/B/E/nu");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is yet another file 'nu'.");
pw.close();
// Add an unversioned file A/B/E/chi to the backup WC
// The file is identical to A/D/H/chi.
file = new File(backupTest.getWorkingCopy(), "A/B/E/chi");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the file 'chi'.");
pw.close();
// Attempt to switch A/B/E to A/D/H without "--force"
try
{
// obstructed switch should fail
client.doSwitch(backupTest.getWCPath() + "/A/B/E",
backupTest.getUrl() + "/A/D/H",
null, true);
fail("obstructed switch should fail by default");
}
catch (ClientException expected)
{
}
// Complete the switch using "--force" and check the status
client.doSwitch(backupTest.getWCPath() + "/A/B/E",
backupTest.getUrl() + "/A/D/H",
Revision.HEAD, Revision.HEAD, Depth.infinity,
false, false, true);
backupTest.getWc().setItemIsSwitched("A/B/E",true);
backupTest.getWc().removeItem("A/B/E/alpha");
backupTest.getWc().removeItem("A/B/E/beta");
backupTest.getWc().addItem("A/B/E/nu",
"This is yet another file 'nu'.");
backupTest.getWc().setItemTextStatus("A/B/E/nu", Status.Kind.modified);
backupTest.getWc().addItem("A/D/H/nu",
"This is the file 'nu'.");
backupTest.getWc().addItem("A/B/E/chi",
backupTest.getWc().getItemContent("A/D/H/chi"));
backupTest.getWc().addItem("A/B/E/psi",
backupTest.getWc().getItemContent("A/D/H/psi"));
backupTest.getWc().addItem("A/B/E/omega",
backupTest.getWc().getItemContent("A/D/H/omega"));
backupTest.checkStatus();
}
/**
* Test basic blame functionality. This test marginally tests blame
* correctness, mainly just that the blame APIs link correctly.
* @throws Throwable
* @since 1.5
*/
public void testBasicBlame() throws Throwable
{
OneTest thisTest = new OneTest();
// Test the old interface to be sure it still works
byte[] result = client.blame(thisTest.getWCPath() + "/iota", Revision
.getInstance(1), Revision.getInstance(1));
assertEquals(" 1 jrandom This is the file 'iota'.\n",
new String(result));
// Test the current interface
BlameCallbackImpl callback = new BlameCallbackImpl();
client.blame(thisTest.getWCPath() + "/iota", Revision.getInstance(1),
Revision.getInstance(1), callback);
assertEquals(1, callback.numberOfLines());
BlameCallbackImpl.BlameLine line = callback.getBlameLine(0);
if (line != null)
{
assertEquals(1, line.getRevision());
assertEquals("jrandom", line.getAuthor());
}
}
/**
* Test commit of arbitrary revprops.
* @throws Throwable
* @since 1.5
*/
public void testCommitRevprops() throws Throwable
{
class RevpropLogCallback implements LogMessageCallback
{
Map revprops;
public void singleMessage(ChangePath[] changedPaths,
long revision,
Map revprops,
boolean hasChildren)
{
this.revprops = revprops;
}
public Map getRevprops()
{
return revprops;
}
}
// build the test setup
OneTest thisTest = new OneTest();
// modify file A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("appended mu text");
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu",
thisTest.getWc().getItemContent("A/mu") + "appended mu text");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes, with some extra revprops
Map revprops = new HashMap();
revprops.put("kfogel", "rockstar");
revprops.put("cmpilato", "theman");
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg", Depth.infinity, true, true,
null, revprops),
2);
// check the status of the working copy
thisTest.checkStatus();
// Fetch our revprops from the server
RevpropLogCallback callback = new RevpropLogCallback();
client.logMessages(thisTest.getWCPath(), Revision.getInstance(2),
Revision.getInstance(2),
Revision.getInstance(2), false, false, false,
new String[] {"kfogel", "cmpilato"}, 0,
callback);
Map fetchedProps = callback.getRevprops();
assertEquals("wrong number of fetched revprops", revprops.size(),
fetchedProps.size());
Set keys = fetchedProps.keySet();
for (Iterator it = keys.iterator(); it.hasNext(); )
{
String key = (String) it.next();
assertEquals("revprops check", revprops.get(key),
fetchedProps.get(key));
}
}
/**
* @return <code>file</code> converted into a -- possibly
* <code>canonical</code>-ized -- Subversion-internal path
* representation.
*/
private String fileToSVNPath(File file, boolean canonical)
{
// JavaHL need paths with '/' separators
if (canonical)
{
try
{
return file.getCanonicalPath().replace('\\', '/');
}
catch (IOException e)
{
return null;
}
}
else
{
return file.getPath().replace('\\', '/');
}
}
/**
* A DiffSummaryReceiver implementation which collects all DiffSummary
* notifications.
*/
private static class DiffSummaries extends HashMap
implements DiffSummaryReceiver
{
// Update the serialVersionUID when there is a incompatible
// change made to this class.
private static final long serialVersionUID = 1L;
public void onSummary(DiffSummary descriptor)
{
super.put(descriptor.getPath(), descriptor);
}
}
private class MyChangelistCallback extends HashMap
implements ChangelistCallback
{
public void doChangelist(String path, String changelist)
{
if (super.containsKey(path))
{
// Append the changelist to the existing list
List changelistList = (List) super.get(path);
changelistList.add(changelist);
}
else
{
// Create a new changelist with that list
List changelistList = new ArrayList();
changelistList.add(changelist);
super.put(path, changelistList);
}
}
public List get(String path)
{
return (List) super.get(path);
}
}
private class MyInfoCallback implements InfoCallback {
private Info2 info;
public void singleInfo(Info2 info) {
this.info = info;
}
public Info2 getInfo() {
return info;
}
}
}
| subversion/bindings/javahl/tests/org/tigris/subversion/javahl/BasicTests.java | /**
* @copyright
* ====================================================================
* Licensed to the Subversion Corporation (SVN Corp.) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SVN Corp. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
* @endcopyright
*/
package org.tigris.subversion.javahl;
import org.tigris.subversion.javahl.*;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.ByteArrayOutputStream;
import java.text.ParseException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Map;
import junit.framework.Assert;
/**
* Tests the basic functionality of javahl binding (inspired by the
* tests in subversion/tests/cmdline/basic_tests.py).
*/
public class BasicTests extends SVNTests
{
/**
* Base name of all our tests.
*/
public final static String testName = "basic_test";
public BasicTests()
{
init();
}
public BasicTests(String name)
{
super(name);
init();
}
/**
* Initialize the testBaseName and the testCounter, if this is the
* first test of this class.
*/
private void init()
{
if (!testName.equals(testBaseName))
{
testCounter = 0;
testBaseName = testName;
}
}
/**
* Test LogDate().
* @throws Throwable
*/
public void testLogDate() throws Throwable
{
String goodDate = "2007-10-04T03:00:52.134992Z";
String badDate = "2008-01-14";
LogDate logDate;
try
{
logDate = new LogDate(goodDate);
assertEquals(1191466852134992L, logDate.getTimeMicros());
} catch (ParseException e) {
fail("Failed to parse date " + goodDate);
}
try
{
logDate = new LogDate(badDate);
fail("Failed to throw exception on bad date " + badDate);
} catch (ParseException e) {
}
}
/**
* Test SVNClient.getVersion().
* @throws Throwable
*/
public void testVersion() throws Throwable
{
try
{
Version version = client.getVersion();
String versionString = version.toString();
if (versionString == null || versionString.trim().length() == 0)
{
throw new Exception("Version string empty");
}
}
catch (Exception e)
{
fail("Version should always be available unless the " +
"native libraries failed to initialize: " + e);
}
}
/**
* Tests Subversion path validation.
*/
public void testPathValidation() throws Throwable
{
// Rather than segfaulting, JavaHL considers null an invalid path.
assertFalse("Path validation produced false-positive for null path",
Path.isValid(null));
String path = "valid-path";
assertTrue("Validation check of valid path '" + path +
"' should succeed", Path.isValid(path));
// File names cannot contain control characters.
path = "invalid-\u0001-path";
assertFalse("Validation check of invalid path '" + path +
"' (which contains control characters) should fail",
Path.isValid(path));
}
/**
* Tests Subversion path as URL predicate.
*/
public void testPathIsURL() throws Throwable
{
try
{
Path.isURL(null);
fail("A null path should raise an exception");
}
catch (IllegalArgumentException expected)
{
}
// Subversion "paths" which aren't URLs.
String[] paths = { "/path", "c:\\path" };
for (int i = 0; i < paths.length; i++)
{
assertFalse("'" + paths[i] + "' should not be considered a URL",
Path.isURL(paths[i]));
}
// Subversion "paths" which are URLs.
paths = new String[] { "http://example.com", "svn://example.com",
"svn+ssh://example.com", "file:///src/svn/" };
for (int i = 0; i < paths.length; i++)
{
assertTrue("'" + paths[i] + "' should be considered a URL",
Path.isURL(paths[i]));
}
}
/**
* Tests Mergeinfo and RevisionRange classes.
* @since 1.5
*/
public void testMergeinfoParser() throws Throwable
{
String mergeInfoPropertyValue =
"/trunk:1-300,305,307,400-405\n/branches/branch:308-400";
Mergeinfo info = new Mergeinfo(mergeInfoPropertyValue);
String[] paths = info.getPaths();
assertEquals(2, paths.length);
RevisionRange[] trunkRange = info.getRevisionRange("/trunk");
assertEquals(4, trunkRange.length);
assertEquals("1-300", trunkRange[0].toString());
assertEquals("305", trunkRange[1].toString());
assertEquals("307", trunkRange[2].toString());
assertEquals("400-405", trunkRange[3].toString());
RevisionRange[] branchRange =
info.getRevisionRange("/branches/branch");
assertEquals(1, branchRange.length);
}
/**
* Test the basic SVNClient.status functionality.
* @throws Throwable
*/
public void testBasicStatus() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
// check the status of the working copy
thisTest.checkStatus();
// Test status of non-existent file
File fileC = new File(thisTest.getWorkingCopy() + "/A", "foo.c");
Status s = client.singleStatus(fileToSVNPath(fileC, false), false);
if (s != null)
fail("File foo.c should not return a status.");
}
/**
* Test the "out of date" info from {@link
* org.tigris.subversion.javahl.SVNClient#status()}.
*
* @throws SubversionException
* @throws IOException
*/
public void testOODStatus() throws SubversionException, IOException
{
// build the test setup
OneTest thisTest = new OneTest();
// Make a whole slew of changes to a WC:
//
// (root) r7 - prop change
// iota
// A
// |__mu
// |
// |__B
// | |__lambda
// | |
// | |__E r12 - deleted
// | | |__alpha
// | | |__beta
// | |
// | |__F r9 - prop change
// | |__I r6 - added dir
// |
// |__C r5 - deleted
// |
// |__D
// |__gamma
// |
// |__G
// | |__pi r3 - deleted
// | |__rho r2 - modify text
// | |__tau r4 - modify text
// |
// |__H
// |__chi r10-11 replaced with file
// |__psi r13-14 replaced with dir
// |__omega
// |__nu r8 - added file
File file, dir;
PrintWriter pw;
Status status;
long rev; // Resulting rev from co or update
long expectedRev = 2; // Keeps track of the latest rev committed
// ----- r2: modify file A/D/G/rho --------------------------
file = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("modification to rho");
pw.close();
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[]{thisTest.getWCPath()},
"log msg", true), expectedRev++);
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", rev);
thisTest.getWc().setItemContent("A/D/G/rho",
thisTest.getWc().getItemContent("A/D/G/rho")
+ "modification to rho");
status = client.singleStatus(thisTest.getWCPath() + "/A/D/G/rho",
false);
long rhoCommitDate = status.getLastChangedDate().getTime();
long rhoCommitRev = rev;
String rhoAuthor = status.getLastCommitAuthor();
// ----- r3: delete file A/D/G/pi ---------------------------
client.remove(new String[] {thisTest.getWCPath() + "/A/D/G/pi"}, null,
false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/G/pi", NodeKind.file,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/D/G/pi");
thisTest.getWc().setItemWorkingCopyRevision("A/D/G", rev);
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath() + "/A/D/G",
null, true),
rev);
long GCommitRev = rev;
// ----- r4: modify file A/D/G/tau --------------------------
file = new File(thisTest.getWorkingCopy(), "A/D/G/tau");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("modification to tau");
pw.close();
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/G/tau",NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/tau", rev);
thisTest.getWc().setItemContent("A/D/G/tau",
thisTest.getWc().getItemContent("A/D/G/tau")
+ "modification to tau");
status = client.singleStatus(thisTest.getWCPath() + "/A/D/G/tau",
false);
long tauCommitDate = status.getLastChangedDate().getTime();
long tauCommitRev = rev;
String tauAuthor = status.getLastCommitAuthor();
// ----- r5: delete dir with no children A/C ---------------
client.remove(new String[] {thisTest.getWCPath() + "/A/C"}, null,
false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/C", NodeKind.dir,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/C");
long CCommitRev = rev;
// ----- r6: Add dir A/B/I ----------------------------------
dir = new File(thisTest.getWorkingCopy(), "A/B/I");
dir.mkdir();
client.add(dir.getAbsolutePath(), true);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/I", NodeKind.dir, CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().addItem("A/B/I", null);
status = client.singleStatus(thisTest.getWCPath() + "/A/B/I", false);
long ICommitDate = status.getLastChangedDate().getTime();
long ICommitRev = rev;
String IAuthor = status.getLastCommitAuthor();
// ----- r7: Update then commit prop change on root dir -----
thisTest.getWc().setRevision(rev);
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(), null, true), rev);
thisTest.checkStatus();
client.propertySet(thisTest.getWCPath(), "propname", "propval", false);
thisTest.getWc().setItemPropStatus("", Status.Kind.modified);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(), null,
NodeKind.dir, CommitItemStateFlags.PropMods);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().setItemWorkingCopyRevision("", rev);
thisTest.getWc().setItemPropStatus("", Status.Kind.normal);
// ----- r8: Add a file A/D/H/nu ----------------------------
file = new File(thisTest.getWorkingCopy(), "A/D/H/nu");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the file 'nu'.");
pw.close();
client.add(file.getAbsolutePath(), false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/nu", NodeKind.file,
CommitItemStateFlags.TextMods +
CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().addItem("A/D/H/nu", "This is the file 'nu'.");
status = client.singleStatus(thisTest.getWCPath() + "/A/D/H/nu",
false);
long nuCommitDate = status.getLastChangedDate().getTime();
long nuCommitRev = rev;
String nuAuthor = status.getLastCommitAuthor();
// ----- r9: Prop change on A/B/F ---------------------------
client.propertySet(thisTest.getWCPath() + "/A/B/F", "propname",
"propval", false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/F", NodeKind.dir,
CommitItemStateFlags.PropMods);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().setItemPropStatus("A/B/F", Status.Kind.normal);
thisTest.getWc().setItemWorkingCopyRevision("A/B/F", rev);
status = client.singleStatus(thisTest.getWCPath() + "/A/B/F", false);
long FCommitDate = status.getLastChangedDate().getTime();
long FCommitRev = rev;
String FAuthor = status.getLastCommitAuthor();
// ----- r10-11: Replace file A/D/H/chi with file -----------
client.remove(new String[] {thisTest.getWCPath() + "/A/D/H/chi"},
null, false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/chi", NodeKind.file,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/D/G/pi");
file = new File(thisTest.getWorkingCopy(), "A/D/H/chi");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the replacement file 'chi'.");
pw.close();
client.add(file.getAbsolutePath(), false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/chi", NodeKind.file,
CommitItemStateFlags.TextMods +
CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().addItem("A/D/H/chi",
"This is the replacement file 'chi'.");
status = client.singleStatus(thisTest.getWCPath() + "/A/D/H/chi",
false);
long chiCommitDate = status.getLastChangedDate().getTime();
long chiCommitRev = rev;
String chiAuthor = status.getLastCommitAuthor();
// ----- r12: Delete dir A/B/E with children ----------------
client.remove(new String[] {thisTest.getWCPath() + "/A/B/E"}, null,
false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/E", NodeKind.dir,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/B/E/alpha");
thisTest.getWc().removeItem("A/B/E/beta");
thisTest.getWc().removeItem("A/B/E");
thisTest.getWc().setItemWorkingCopyRevision("A/B", rev);
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath() + "/A/B", null, true),
rev);
Info Binfo = client.info(thisTest.getWCPath() + "/A/B");
long BCommitDate = Binfo.getLastChangedDate().getTime();
long BCommitRev = rev;
long ECommitRev = BCommitRev;
String BAuthor = Binfo.getAuthor();
// ----- r13-14: Replace file A/D/H/psi with dir ------------
client.remove(new String[]{thisTest.getWCPath() + "/A/D/H/psi"}, null,
false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/psi", NodeKind.file,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
thisTest.getWc().removeItem("A/D/H/psi");
thisTest.getWc().setRevision(rev);
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(), null, true), rev);
thisTest.getWc().addItem("A/D/H/psi", null);
dir = new File(thisTest.getWorkingCopy(), "A/D/H/psi");
dir.mkdir();
client.add(dir.getAbsolutePath(), true);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/psi", NodeKind.dir,
CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
rev = client.commit(new String[] {thisTest.getWCPath()},
"log msg", true),
expectedRev++);
status = client.singleStatus(thisTest.getWCPath() + "/A/D/H/psi",
false);
long psiCommitDate = status.getLastChangedDate().getTime();
long psiCommitRev = rev;
String psiAuthor = status.getLastCommitAuthor();
// ----- Check status of modfied WC then update it back
// ----- to rev 1 so it's out of date
thisTest.checkStatus();
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(),
Revision.getInstance(1), true),
1);
thisTest.getWc().setRevision(1);
thisTest.getWc().setItemOODInfo("A", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.getWc().setItemOODInfo("A/B", BCommitRev, BAuthor,
BCommitDate, NodeKind.dir);
thisTest.getWc().addItem("A/B/I", null);
thisTest.getWc().setItemOODInfo("A/B/I", ICommitRev, IAuthor,
ICommitDate, NodeKind.dir);
thisTest.getWc().setItemTextStatus("A/B/I", Status.Kind.none);
thisTest.getWc().setItemNodeKind("A/B/I", NodeKind.unknown);
thisTest.getWc().addItem("A/C", null);
thisTest.getWc().setItemReposLastCmtRevision("A/C", CCommitRev);
thisTest.getWc().setItemReposKind("A/C", NodeKind.dir);
thisTest.getWc().addItem("A/B/E", null);
thisTest.getWc().setItemReposLastCmtRevision("A/B/E", ECommitRev);
thisTest.getWc().setItemReposKind("A/B/E", NodeKind.dir);
thisTest.getWc().addItem("A/B/E/alpha", "This is the file 'alpha'.");
thisTest.getWc().addItem("A/B/E/beta", "This is the file 'beta'.");
thisTest.getWc().setItemPropStatus("A/B/F", Status.Kind.none);
thisTest.getWc().setItemOODInfo("A/B/F", FCommitRev, FAuthor,
FCommitDate, NodeKind.dir);
thisTest.getWc().setItemOODInfo("A/D", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.getWc().setItemOODInfo("A/D/G", tauCommitRev, tauAuthor,
tauCommitDate, NodeKind.dir);
thisTest.getWc().addItem("A/D/G/pi", "This is the file 'pi'.");
thisTest.getWc().setItemReposLastCmtRevision("A/D/G/pi", GCommitRev);
thisTest.getWc().setItemReposKind("A/D/G/pi", NodeKind.file);
thisTest.getWc().setItemContent("A/D/G/rho",
"This is the file 'rho'.");
thisTest.getWc().setItemOODInfo("A/D/G/rho", rhoCommitRev, rhoAuthor,
rhoCommitDate, NodeKind.file);
thisTest.getWc().setItemContent("A/D/G/tau",
"This is the file 'tau'.");
thisTest.getWc().setItemOODInfo("A/D/G/tau", tauCommitRev, tauAuthor,
tauCommitDate, NodeKind.file);
thisTest.getWc().setItemOODInfo("A/D/H", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.getWc().setItemWorkingCopyRevision("A/D/H/nu",
Revision.SVN_INVALID_REVNUM);
thisTest.getWc().setItemTextStatus("A/D/H/nu", Status.Kind.none);
thisTest.getWc().setItemNodeKind("A/D/H/nu", NodeKind.unknown);
thisTest.getWc().setItemOODInfo("A/D/H/nu", nuCommitRev, nuAuthor,
nuCommitDate, NodeKind.file);
thisTest.getWc().setItemContent("A/D/H/chi",
"This is the file 'chi'.");
thisTest.getWc().setItemOODInfo("A/D/H/chi", chiCommitRev, chiAuthor,
chiCommitDate, NodeKind.file);
thisTest.getWc().removeItem("A/D/H/psi");
thisTest.getWc().addItem("A/D/H/psi", "This is the file 'psi'.");
// psi was replaced with a directory
thisTest.getWc().setItemOODInfo("A/D/H/psi", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.getWc().setItemPropStatus("", Status.Kind.none);
thisTest.getWc().setItemOODInfo("", psiCommitRev, psiAuthor,
psiCommitDate, NodeKind.dir);
thisTest.checkStatus(true);
}
/**
* Test the basic SVNClient.checkout functionality.
* @throws Throwable
*/
public void testBasicCheckout() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
try
{
// obstructed checkout must fail
client.checkout(thisTest.getUrl() + "/A", thisTest.getWCPath(),
null, true);
fail("missing exception");
}
catch (ClientException expected)
{
}
// modify file A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("appended mu text");
muWriter.close();
thisTest.getWc().setItemTextStatus("A/mu", Status.Kind.modified);
// delete A/B/lambda without svn
File lambda = new File(thisTest.getWorkingCopy(), "A/B/lambda");
lambda.delete();
thisTest.getWc().setItemTextStatus("A/B/lambda", Status.Kind.missing);
// remove A/D/G
client.remove(new String[]{thisTest.getWCPath() + "/A/D/G"}, null,
false);
thisTest.getWc().setItemTextStatus("A/D/G", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/pi", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/tau", Status.Kind.deleted);
// check the status of the working copy
thisTest.checkStatus();
// recheckout the working copy
client.checkout(thisTest.getUrl(), thisTest.getWCPath(), null, true);
// deleted file should reapear
thisTest.getWc().setItemTextStatus("A/B/lambda", Status.Kind.normal);
// check the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.commit functionality.
* @throws Throwable
*/
public void testBasicCommit() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
// modify file A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("appended mu text");
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu",
thisTest.getWc().getItemContent("A/mu") + "appended mu text");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
CommitItemStateFlags.TextMods);
// modify file A/D/G/rho
File rho = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
PrintWriter rhoWriter =
new PrintWriter(new FileOutputStream(rho, true));
rhoWriter.print("new appended text for rho");
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
thisTest.getWc().setItemContent("A/D/G/rho",
thisTest.getWc().getItemContent("A/D/G/rho")
+ "new appended text for rho");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho",NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
2);
// check the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic property setting/getting functionality.
* @throws Throwable
*/
public void testBasicProperties() throws Throwable
{
OneTest thisTest = new OneTest();
WC wc = thisTest.getWc();
// Check getting properties the non-callback way
String itemPath = fileToSVNPath(new File(thisTest.getWCPath(),
"iota"),
false);
client.propertySet(itemPath, "abc", "def", false);
PropertyData[] properties = client.properties(itemPath);
PropertyData prop = properties[0];
assertEquals("abc", prop.getName());
assertEquals("def", prop.getValue());
wc.setItemPropStatus("iota", Status.Kind.modified);
thisTest.checkStatus();
// Check getting properties the callback way
itemPath = fileToSVNPath(new File(thisTest.getWCPath(),
"/A/B/E/alpha"),
false);
client.propertyCreate(itemPath, "cqcq", "qrz", false, false);
ProplistCallbackImpl callback = new ProplistCallbackImpl();
client.properties(itemPath, null, null, Depth.empty, null, callback);
Map propMap = callback.getProperties(itemPath);
Iterator it = propMap.keySet().iterator();
while (it.hasNext())
{
String key = (String) it.next();
assertEquals("cqcq", key);
assertEquals("qrz", (String) propMap.get(key));
}
wc.setItemPropStatus("A/B/E/alpha", Status.Kind.modified);
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.update functionality.
* @throws Throwable
*/
public void testBasicUpdate() throws Throwable
{
// build the test setup. Used for the changes
OneTest thisTest = new OneTest();
// build the backup test setup. That is the one that will be updated
OneTest backupTest = thisTest.copy(".backup");
// modify A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("appended mu text");
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu",
thisTest.getWc().getItemContent("A/mu") + "appended mu text");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
CommitItemStateFlags.TextMods);
// modify A/D/G/rho
File rho = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
PrintWriter rhoWriter =
new PrintWriter(new FileOutputStream(rho, true));
rhoWriter.print("new appended text for rho");
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
thisTest.getWc().setItemContent("A/D/G/rho",
thisTest.getWc().getItemContent("A/D/G/rho")
+ "new appended text for rho");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho",NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
2);
// check the status of the working copy
thisTest.checkStatus();
// update the backup test
assertEquals("wrong revision number from update",
client.update(backupTest.getWCPath(), null, true),
2);
// set the expected working copy layout for the backup test
backupTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
backupTest.getWc().setItemContent("A/mu",
backupTest.getWc().getItemContent("A/mu") + "appended mu text");
backupTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
backupTest.getWc().setItemContent("A/D/G/rho",
backupTest.getWc().getItemContent("A/D/G/rho")
+ "new appended text for rho");
// check the status of the working copy of the backup test
backupTest.checkStatus();
}
/**
* Test basic SVNClient.mkdir with URL parameter functionality.
* @throws Throwable
*/
public void testBasicMkdirUrl() throws Throwable
{
// build the test setup.
OneTest thisTest = new OneTest();
// create Y and Y/Z directories in the repository
addExpectedCommitItem(null, thisTest.getUrl(), "Y", NodeKind.none,
CommitItemStateFlags.Add);
addExpectedCommitItem(null, thisTest.getUrl(), "Y/Z", NodeKind.none,
CommitItemStateFlags.Add);
client.mkdir(new String[]{thisTest.getUrl() + "/Y",
thisTest.getUrl() + "/Y/Z"}, "log_msg");
// add the new directories the expected working copy layout
thisTest.getWc().addItem("Y", null);
thisTest.getWc().setItemWorkingCopyRevision("Y", 2);
thisTest.getWc().addItem("Y/Z", null);
thisTest.getWc().setItemWorkingCopyRevision("Y/Z", 2);
// update the working copy
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(), null, true),
2);
// check the status of the working copy
thisTest.checkStatus();
}
/**
* Test the {@link SVNClientInterface.copy()} API.
* @since 1.5
*/
public void testCopy()
throws SubversionException, IOException
{
OneTest thisTest = new OneTest();
WC wc = thisTest.getWc();
final Revision firstRevision = Revision.getInstance(1);
final Revision pegRevision = null; // Defaults to Revision.HEAD.
// Copy files from A/B/E to A/B/F.
String[] srcPaths = { "alpha", "beta" };
CopySource[] sources = new CopySource[srcPaths.length];
for (int i = 0; i < srcPaths.length; i++)
{
String fileName = srcPaths[i];
sources[i] =
new CopySource(new File(thisTest.getWorkingCopy(),
"A/B/E/" + fileName).getPath(),
firstRevision, pegRevision);
wc.addItem("A/B/F/" + fileName,
wc.getItemContent("A/B/E/" + fileName));
wc.setItemWorkingCopyRevision("A/B/F/" + fileName, 2);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/F/" + fileName, NodeKind.file,
CommitItemStateFlags.Add |
CommitItemStateFlags.IsCopy);
}
client.copy(sources,
new File(thisTest.getWorkingCopy(), "A/B/F").getPath(),
null, true, false, null);
// Commit the changes, and check the state of the WC.
assertEquals("Unexpected WC revision number after commit",
client.commit(new String[] { thisTest.getWCPath() },
"Copy files", true),
2);
thisTest.checkStatus();
assertExpectedSuggestion(thisTest.getUrl() + "/A/B/E/alpha", "A/B/F/alpha", thisTest);
// Now test a WC to URL copy
CopySource wcSource[] = new CopySource[1];
wcSource[0] = new CopySource(new File(thisTest.getWorkingCopy(),
"A/B").getPath(), Revision.WORKING, Revision.WORKING);
client.commitMessageHandler(null);
client.copy(wcSource,
thisTest.getUrl() + "/parent/A/B",
"Copy WC to URL", true, true, null);
// update the WC to get new folder and confirm the copy
assertEquals("wrong revision number from update",
client.update(thisTest.getWCPath(), null, true),
3);
}
/**
* Test the {@link SVNClientInterface.move()} API.
* @since 1.5
*/
public void testMove()
throws SubversionException, IOException
{
OneTest thisTest = new OneTest();
WC wc = thisTest.getWc();
// Move files from A/B/E to A/B/F.
String[] srcPaths = { "alpha", "beta" };
for (int i = 0; i < srcPaths.length; i++)
{
String fileName = srcPaths[i];
srcPaths[i] = new File(thisTest.getWorkingCopy(),
"A/B/E/" + fileName).getPath();
wc.addItem("A/B/F/" + fileName,
wc.getItemContent("A/B/E/" + fileName));
wc.setItemWorkingCopyRevision("A/B/F/" + fileName, 2);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/F/" + fileName, NodeKind.file,
CommitItemStateFlags.Add |
CommitItemStateFlags.IsCopy);
wc.removeItem("A/B/E/" + fileName);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/E/" + fileName, NodeKind.file,
CommitItemStateFlags.Delete);
}
client.move(srcPaths,
new File(thisTest.getWorkingCopy(), "A/B/F").getPath(),
null, false, true, false, null);
// Commit the changes, and check the state of the WC.
assertEquals("Unexpected WC revision number after commit",
client.commit(new String[] { thisTest.getWCPath() },
"Move files", true), 2);
thisTest.checkStatus();
assertExpectedSuggestion(thisTest.getUrl() + "/A/B/E/alpha", "A/B/F/alpha", thisTest);
}
/**
* Assert that the first merge source suggested for
* <code>destPath</code> at {@link Revision#WORKING} and {@link
* Revision#HEAD} is equivalent to <code>expectedSrc</code>.
* @exception SubversionException If retrieval of the copy source fails.
* @since 1.5
*/
private void assertExpectedSuggestion(String expectedSrc,
String destPath, OneTest thisTest)
throws SubversionException
{
String wcPath = fileToSVNPath(new File(thisTest.getWCPath(),
destPath), false);
String[] suggestions = client.suggestMergeSources(wcPath,
Revision.WORKING);
assertNotNull(suggestions);
assertTrue(suggestions.length >= 1);
assertTrue("Unexpected copy source path, expected " +
expectedSrc + ", got " + suggestions[0],
expectedSrc.equals(suggestions[0]));
// Same test using URL
String url = thisTest.getUrl() + "/" + destPath;
suggestions = client.suggestMergeSources(url, Revision.HEAD);
assertNotNull(suggestions);
assertTrue(suggestions.length >= 1);
assertTrue("Unexpected copy source path, expected " +
expectedSrc + ", got " + suggestions[0],
expectedSrc.equals(suggestions[0]));
}
/**
* Tests that the passed start and end revision are contained
* within the array of revisions.
* @since 1.5
*/
private void assertExpectedMergeRange(long start, long end,
long[] revisions)
{
Arrays.sort(revisions);
for (int i = 0; i < revisions.length; i++) {
if (revisions[i] <= start) {
for (int j = i; j < revisions.length; j++)
{
if (end <= revisions[j])
return;
}
fail("End revision: " + end + " was not in range: " + revisions[0] +
" : " + revisions[revisions.length - 1]);
return;
}
}
fail("Start revision: " + start + " was not in range: " + revisions[0] +
" : " + revisions[revisions.length - 1]);
}
/**
* Test the basic SVNClient.update functionality with concurrent
* changes in the repository and the working copy.
* @throws Throwable
*/
public void testBasicMergingUpdate() throws Throwable
{
// build the first working copy
OneTest thisTest = new OneTest();
// append 10 lines to A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
String muContent = thisTest.getWc().getItemContent("A/mu");
for (int i = 2; i < 11; i++)
{
muWriter.print("\nThis is line " + i + " in mu");
muContent = muContent + "\nThis is line " + i + " in mu";
}
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu", muContent);
addExpectedCommitItem(thisTest.getWorkingCopy().getAbsolutePath(),
thisTest.getUrl(), "A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
// append 10 line to A/D/G/rho
File rho = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
PrintWriter rhoWriter =
new PrintWriter(new FileOutputStream(rho, true));
String rhoContent = thisTest.getWc().getItemContent("A/D/G/rho");
for (int i = 2; i < 11; i++)
{
rhoWriter.print("\nThis is line " + i + " in rho");
rhoContent = rhoContent + "\nThis is line " + i + " in rho";
}
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
thisTest.getWc().setItemContent("A/D/G/rho", rhoContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
2);
// check the status of the first working copy
thisTest.checkStatus();
// create a backup copy of the working copy
OneTest backupTest = thisTest.copy(".backup");
// change the last line of A/mu in the first working copy
muWriter = new PrintWriter(new FileOutputStream(mu, true));
muContent = thisTest.getWc().getItemContent("A/mu");
muWriter.print(" Appended to line 10 of mu");
muContent = muContent + " Appended to line 10 of mu";
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 3);
thisTest.getWc().setItemContent("A/mu", muContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
// change the last line of A/mu in the first working copy
rhoWriter = new PrintWriter(new FileOutputStream(rho, true));
rhoContent = thisTest.getWc().getItemContent("A/D/G/rho");
rhoWriter.print(" Appended to line 10 of rho");
rhoContent = rhoContent + " Appended to line 10 of rho";
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 3);
thisTest.getWc().setItemContent("A/D/G/rho", rhoContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
// commit these changes to the repository
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
3);
// check the status of the first working copy
thisTest.checkStatus();
// modify the first line of A/mu in the backup working copy
mu = new File(backupTest.getWorkingCopy(), "A/mu");
muWriter = new PrintWriter(new FileOutputStream(mu));
muWriter.print("This is the new line 1 in the backup copy of mu");
muContent = "This is the new line 1 in the backup copy of mu";
for (int i = 2; i < 11; i++)
{
muWriter.print("\nThis is line " + i + " in mu");
muContent = muContent + "\nThis is line " + i + " in mu";
}
muWriter.close();
backupTest.getWc().setItemWorkingCopyRevision("A/mu", 3);
muContent = muContent + " Appended to line 10 of mu";
backupTest.getWc().setItemContent("A/mu", muContent);
backupTest.getWc().setItemTextStatus("A/mu", Status.Kind.modified);
// modify the first line of A/D/G/rho in the backup working copy
rho = new File(backupTest.getWorkingCopy(), "A/D/G/rho");
rhoWriter = new PrintWriter(new FileOutputStream(rho));
rhoWriter.print("This is the new line 1 in the backup copy of rho");
rhoContent = "This is the new line 1 in the backup copy of rho";
for (int i = 2; i < 11; i++)
{
rhoWriter.print("\nThis is line " + i + " in rho");
rhoContent = rhoContent + "\nThis is line " + i + " in rho";
}
rhoWriter.close();
backupTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 3);
rhoContent = rhoContent + " Appended to line 10 of rho";
backupTest.getWc().setItemContent("A/D/G/rho", rhoContent);
backupTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.modified);
// update the backup working copy
assertEquals("wrong revision number from update",
client.update(backupTest.getWCPath(), null, true),
3);
// check the status of the backup working copy
backupTest.checkStatus();
}
/**
* Test the basic SVNClient.update functionality with concurrent
* changes in the repository and the working copy that generate
* conflicts.
* @throws Throwable
*/
public void testBasicConflict() throws Throwable
{
// build the first working copy
OneTest thisTest = new OneTest();
// copy the first working copy to the backup working copy
OneTest backupTest = thisTest.copy(".backup");
// append a line to A/mu in the first working copy
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
String muContent = thisTest.getWc().getItemContent("A/mu");
muWriter.print("\nOriginal appended text for mu");
muContent = muContent + "\nOriginal appended text for mu";
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu", muContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
// append a line to A/D/G/rho in the first working copy
File rho = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
PrintWriter rhoWriter =
new PrintWriter(new FileOutputStream(rho, true));
String rhoContent = thisTest.getWc().getItemContent("A/D/G/rho");
rhoWriter.print("\nOriginal appended text for rho");
rhoContent = rhoContent + "\nOriginal appended text for rho";
rhoWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
thisTest.getWc().setItemContent("A/D/G/rho", rhoContent);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes in the first working copy
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg",
true),
2);
// test the status of the working copy after the commit
thisTest.checkStatus();
// append a different line to A/mu in the backup working copy
mu = new File(backupTest.getWorkingCopy(), "A/mu");
muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("\nConflicting appended text for mu");
muContent = "<<<<<<< .mine\nThis is the file 'mu'.\n"+
"Conflicting appended text for mu=======\n"+
"This is the file 'mu'.\n"+
"Original appended text for mu>>>>>>> .r2";
muWriter.close();
backupTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
backupTest.getWc().setItemContent("A/mu", muContent);
backupTest.getWc().setItemTextStatus("A/mu", Status.Kind.conflicted);
backupTest.getWc().addItem("A/mu.r1", "");
backupTest.getWc().setItemNodeKind("A/mu.r1", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/mu.r1",
Status.Kind.unversioned);
backupTest.getWc().addItem("A/mu.r2", "");
backupTest.getWc().setItemNodeKind("A/mu.r2", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/mu.r2",
Status.Kind.unversioned);
backupTest.getWc().addItem("A/mu.mine", "");
backupTest.getWc().setItemNodeKind("A/mu.mine", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/mu.mine",
Status.Kind.unversioned);
// append a different line to A/D/G/rho in the backup working copy
rho = new File(backupTest.getWorkingCopy(), "A/D/G/rho");
rhoWriter = new PrintWriter(new FileOutputStream(rho, true));
rhoWriter.print("\nConflicting appended text for rho");
rhoContent = "<<<<<<< .mine\nThis is the file 'rho'.\n"+
"Conflicting appended text for rho=======\n"+
"his is the file 'rho'.\n"+
"Original appended text for rho>>>>>>> .r2";
rhoWriter.close();
backupTest.getWc().setItemWorkingCopyRevision("A/D/G/rho", 2);
backupTest.getWc().setItemContent("A/D/G/rho", rhoContent);
backupTest.getWc().setItemTextStatus("A/D/G/rho",
Status.Kind.conflicted);
backupTest.getWc().addItem("A/D/G/rho.r1", "");
backupTest.getWc().setItemNodeKind("A/D/G/rho.r1", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/D/G/rho.r1",
Status.Kind.unversioned);
backupTest.getWc().addItem("A/D/G/rho.r2", "");
backupTest.getWc().setItemNodeKind("A/D/G/rho.r2", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/D/G/rho.r2",
Status.Kind.unversioned);
backupTest.getWc().addItem("A/D/G/rho.mine", "");
backupTest.getWc().setItemNodeKind("A/D/G/rho.mine", NodeKind.unknown);
backupTest.getWc().setItemTextStatus("A/D/G/rho.mine",
Status.Kind.unversioned);
// update the backup working copy from the repository
assertEquals("wrong revision number from update",
client.update(backupTest.getWCPath(), null, true),
2);
// check the status of the backup working copy
backupTest.checkStatus();
// flag A/mu as resolved
client.resolved(backupTest.getWCPath()+"/A/mu", false);
backupTest.getWc().setItemTextStatus("A/mu", Status.Kind.modified);
backupTest.getWc().removeItem("A/mu.r1");
backupTest.getWc().removeItem("A/mu.r2");
backupTest.getWc().removeItem("A/mu.mine");
// flag A/D/G/rho as resolved
client.resolved(backupTest.getWCPath()+"/A/D/G/rho", false);
backupTest.getWc().setItemTextStatus("A/D/G/rho",
Status.Kind.modified);
backupTest.getWc().removeItem("A/D/G/rho.r1");
backupTest.getWc().removeItem("A/D/G/rho.r2");
backupTest.getWc().removeItem("A/D/G/rho.mine");
// check the status after the conflicts are flaged as resolved
backupTest.checkStatus();
}
/**
* Test the basic SVNClient.cleanup functionality.
* @throws Throwable
*/
public void testBasicCleanup() throws Throwable
{
// create a test working copy
OneTest thisTest = new OneTest();
// create a lock file in A/B
File adminLock = new File(thisTest.getWorkingCopy(),"A/B/" +
getAdminDirectoryName() + "/lock");
PrintWriter pw = new PrintWriter(new FileOutputStream(adminLock));
pw.print("stop looking!");
pw.close();
thisTest.getWc().setItemIsLocked("A/B", true);
// create a lock file in A/D/G
adminLock = new File(thisTest.getWorkingCopy(),"A/D/G/" +
getAdminDirectoryName() + "/lock");
pw = new PrintWriter(new FileOutputStream(adminLock));
pw.print("stop looking!");
pw.close();
thisTest.getWc().setItemIsLocked("A/D/G", true);
// create a lock file in A/C
adminLock = new File(thisTest.getWorkingCopy(),"A/C/" +
getAdminDirectoryName() + "/lock");
pw = new PrintWriter(new FileOutputStream(adminLock));
pw.print("stop looking!");
pw.close();
thisTest.getWc().setItemIsLocked("A/C", true);
// test the status of the working copy
thisTest.checkStatus();
// run cleanup
client.cleanup(thisTest.getWCPath());
thisTest.getWc().setItemIsLocked("A/B", false);
thisTest.getWc().setItemIsLocked("A/D/G", false);
thisTest.getWc().setItemIsLocked("A/C", false);
// test the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.revert functionality.
* @throws Throwable
*/
public void testBasicRevert() throws Throwable
{
// create a test working copy
OneTest thisTest = new OneTest();
// modify A/B/E/beta
File file = new File(thisTest.getWorkingCopy(), "A/B/E/beta");
PrintWriter pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("Added some text to 'beta'.");
pw.close();
thisTest.getWc().setItemTextStatus("A/B/E/beta", Status.Kind.modified);
// modify iota
file = new File(thisTest.getWorkingCopy(), "iota");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("Added some text to 'iota'.");
pw.close();
thisTest.getWc().setItemTextStatus("iota", Status.Kind.modified);
// modify A/D/G/rho
file = new File(thisTest.getWorkingCopy(), "A/D/G/rho");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("Added some text to 'rho'.");
pw.close();
thisTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.modified);
// create new file A/D/H/zeta and add it to subversion
file = new File(thisTest.getWorkingCopy(), "A/D/H/zeta");
pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("Added some text to 'zeta'.");
pw.close();
thisTest.getWc().addItem("A/D/H/zeta", "Added some text to 'zeta'.");
thisTest.getWc().setItemTextStatus("A/D/H/zeta", Status.Kind.added);
client.add(file.getAbsolutePath(), false);
// test the status of the working copy
thisTest.checkStatus();
// revert the changes
client.revert(thisTest.getWCPath()+"/A/B/E/beta", false);
thisTest.getWc().setItemTextStatus("A/B/E/beta", Status.Kind.normal);
client.revert(thisTest.getWCPath()+"/iota", false);
thisTest.getWc().setItemTextStatus("iota", Status.Kind.normal);
client.revert(thisTest.getWCPath()+"/A/D/G/rho", false);
thisTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.normal);
client.revert(thisTest.getWCPath()+"/A/D/H/zeta", false);
thisTest.getWc().setItemTextStatus("A/D/H/zeta",
Status.Kind.unversioned);
thisTest.getWc().setItemNodeKind("A/D/H/zeta", NodeKind.unknown);
// test the status of the working copy
thisTest.checkStatus();
// delete A/B/E/beta and revert the change
file = new File(thisTest.getWorkingCopy(), "A/B/E/beta");
file.delete();
client.revert(file.getAbsolutePath(), false);
// resurected file should not be readonly
assertTrue("reverted file is not readonly",
file.canWrite()&& file.canRead());
// test the status of the working copy
thisTest.checkStatus();
// create & add the directory X
client.mkdir(new String[] {thisTest.getWCPath()+"/X"}, null);
thisTest.getWc().addItem("X", null);
thisTest.getWc().setItemTextStatus("X", Status.Kind.added);
// test the status of the working copy
thisTest.checkStatus();
// remove & revert X
removeDirOrFile(new File(thisTest.getWorkingCopy(), "X"));
client.revert(thisTest.getWCPath()+"/X", false);
thisTest.getWc().removeItem("X");
// test the status of the working copy
thisTest.checkStatus();
// delete the directory A/B/E
client.remove(new String[] {thisTest.getWCPath()+"/A/B/E"}, null, true);
removeDirOrFile(new File(thisTest.getWorkingCopy(), "A/B/E"));
thisTest.getWc().setItemTextStatus("A/B/E", Status.Kind.deleted);
thisTest.getWc().removeItem("A/B/E/alpha");
thisTest.getWc().removeItem("A/B/E/beta");
// test the status of the working copy
thisTest.checkStatus();
// revert A/B/E -> this will not resurect it
client.revert(thisTest.getWCPath()+"/A/B/E", true);
// test the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.switch functionality.
* @throws Throwable
*/
public void testBasicSwitch() throws Throwable
{
// create the test working copy
OneTest thisTest = new OneTest();
// switch iota to A/D/gamma
String iotaPath = thisTest.getWCPath() + "/iota";
String gammaUrl = thisTest.getUrl() + "/A/D/gamma";
thisTest.getWc().setItemContent("iota",
greekWC.getItemContent("A/D/gamma"));
thisTest.getWc().setItemIsSwitched("iota", true);
client.doSwitch(iotaPath, gammaUrl, null, true);
// check the status of the working copy
thisTest.checkStatus();
// switch A/D/H to /A/D/G
String adhPath = thisTest.getWCPath() + "/A/D/H";
String adgURL = thisTest.getUrl() + "/A/D/G";
thisTest.getWc().setItemIsSwitched("A/D/H",true);
thisTest.getWc().removeItem("A/D/H/chi");
thisTest.getWc().removeItem("A/D/H/omega");
thisTest.getWc().removeItem("A/D/H/psi");
thisTest.getWc().addItem("A/D/H/pi",
thisTest.getWc().getItemContent("A/D/G/pi"));
thisTest.getWc().addItem("A/D/H/rho",
thisTest.getWc().getItemContent("A/D/G/rho"));
thisTest.getWc().addItem("A/D/H/tau",
thisTest.getWc().getItemContent("A/D/G/tau"));
client.doSwitch(adhPath, adgURL, null, true);
// check the status of the working copy
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.remove functionality.
* @throws Throwable
*/
public void testBasicDelete() throws Throwable
{
// create the test working copy
OneTest thisTest = new OneTest();
// modify A/D/H/chi
File file = new File(thisTest.getWorkingCopy(), "A/D/H/chi");
PrintWriter pw = new PrintWriter(new FileOutputStream(file, true));
pw.print("added to chi");
pw.close();
thisTest.getWc().setItemTextStatus("A/D/H/chi", Status.Kind.modified);
// set a property on A/D/G/rho file
client.propertySet(thisTest.getWCPath()+"/A/D/G/rho", "abc", "def",
true);
thisTest.getWc().setItemPropStatus("A/D/G/rho", Status.Kind.modified);
// set a property on A/B/F directory
client.propertySet(thisTest.getWCPath()+"/A/B/F", "abc", "def", false);
thisTest.getWc().setItemPropStatus("A/B/F", Status.Kind.modified);
// create a unversioned A/C/sigma file
file = new File(thisTest.getWCPath(),"A/C/sigma");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("unversioned sigma");
pw.close();
thisTest.getWc().addItem("A/C/sigma", "unversioned sigma");
thisTest.getWc().setItemTextStatus("A/C/sigma", Status.Kind.unversioned);
thisTest.getWc().setItemNodeKind("A/C/sigma", NodeKind.unknown);
// create unversioned directory A/C/Q
file = new File(thisTest.getWCPath(), "A/C/Q");
file.mkdir();
thisTest.getWc().addItem("A/C/Q", null);
thisTest.getWc().setItemNodeKind("A/C/Q", NodeKind.unknown);
thisTest.getWc().setItemTextStatus("A/C/Q", Status.Kind.unversioned);
// create & add the directory A/B/X
file = new File(thisTest.getWCPath(), "A/B/X");
client.mkdir(new String[] {file.getAbsolutePath()}, null);
thisTest.getWc().addItem("A/B/X", null);
thisTest.getWc().setItemTextStatus("A/B/X", Status.Kind.added);
// create & add the file A/B/X/xi
file = new File(file, "xi");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("added xi");
pw.close();
client.add(file.getAbsolutePath(), false);
thisTest.getWc().addItem("A/B/X/xi", "added xi");
thisTest.getWc().setItemTextStatus("A/B/X/xi", Status.Kind.added);
// create & add the directory A/B/Y
file = new File(thisTest.getWCPath(), "A/B/Y");
client.mkdir(new String[] {file.getAbsolutePath()}, null);
thisTest.getWc().addItem("A/B/Y", null);
thisTest.getWc().setItemTextStatus("A/B/Y", Status.Kind.added);
// test the status of the working copy
thisTest.checkStatus();
// the following removes should all fail without force
try
{
// remove of A/D/H/chi without force should fail, because it is
// modified
client.remove(new String[] {thisTest.getWCPath()+"/A/D/H/chi"},
null, false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/D/H without force should fail, because A/D/H/chi is
// modified
client.remove(new String[] {thisTest.getWCPath()+"/A/D/H"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/D/G/rho without force should fail, because it has
// a new property
client.remove(new String[] {thisTest.getWCPath()+"/A/D/G/rho"},
null, false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/D/G without force should fail, because A/D/G/rho has
// a new property
client.remove(new String[] {thisTest.getWCPath()+"/A/D/G"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/B/F without force should fail, because it has
// a new property
client.remove(new String[] {thisTest.getWCPath()+"/A/B/F"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/B without force should fail, because A/B/F has
// a new property
client.remove(new String[] {thisTest.getWCPath()+"/A/B"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/C/sigma without force should fail, because it is
// unversioned
client.remove(new String[] {thisTest.getWCPath()+"/A/C/sigma"},
null, false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/C without force should fail, because A/C/sigma is
// unversioned
client.remove(new String[] {thisTest.getWCPath()+"/A/C"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
try
{
// remove of A/B/X without force should fail, because it is new
client.remove(new String[] {thisTest.getWCPath()+"/A/B/X"}, null,
false);
fail("missing exception");
}
catch(ClientException expected)
{
}
// check the status of the working copy
thisTest.checkStatus();
// the following removes should all work
client.remove(new String[] {thisTest.getWCPath()+"/A/B/E"}, null,
false);
thisTest.getWc().setItemTextStatus("A/B/E",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/B/E/alpha",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/B/E/beta",Status.Kind.deleted);
client.remove(new String[] {thisTest.getWCPath()+"/A/D/H"}, null, true);
thisTest.getWc().setItemTextStatus("A/D/H",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/chi",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/omega",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/psi",Status.Kind.deleted);
client.remove(new String[] {thisTest.getWCPath()+"/A/D/G"}, null, true);
thisTest.getWc().setItemTextStatus("A/D/G",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/rho",Status.Kind.deleted);
thisTest.getWc().setItemPropStatus("A/D/G/rho", Status.Kind.none);
thisTest.getWc().setItemTextStatus("A/D/G/pi",Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/tau",Status.Kind.deleted);
client.remove(new String[] {thisTest.getWCPath()+"/A/B/F"}, null, true);
thisTest.getWc().setItemTextStatus("A/B/F",Status.Kind.deleted);
thisTest.getWc().setItemPropStatus("A/B/F", Status.Kind.none);
client.remove(new String[] {thisTest.getWCPath()+"/A/C"}, null, true);
thisTest.getWc().setItemTextStatus("A/C",Status.Kind.deleted);
client.remove(new String[] {thisTest.getWCPath()+"/A/B/X"}, null, true);
file = new File(thisTest.getWorkingCopy(), "iota");
file.delete();
client.remove(new String[] {file.getAbsolutePath()}, null, true);
thisTest.getWc().setItemTextStatus("iota",Status.Kind.deleted);
file = new File(thisTest.getWorkingCopy(), "A/D/gamma");
file.delete();
client.remove(new String[] {file.getAbsolutePath()}, null, false);
thisTest.getWc().setItemTextStatus("A/D/gamma",Status.Kind.deleted);
client.remove(new String[] {file.getAbsolutePath()}, null, true);
client.remove(new String[] {thisTest.getWCPath()+"/A/B/E"}, null,
false);
thisTest.getWc().removeItem("A/B/X");
thisTest.getWc().removeItem("A/B/X/xi");
thisTest.getWc().removeItem("A/C/sigma");
thisTest.getWc().removeItem("A/C/Q");
thisTest.checkStatus();
client.remove(new String[] {thisTest.getWCPath()+"/A/D"},null, true);
thisTest.getWc().setItemTextStatus("A/D", Status.Kind.deleted);
thisTest.getWc().removeItem("A/D/Y");
// check the status of the working copy
thisTest.checkStatus();
// confirm that the file are realy deleted
assertFalse("failed to remove text modified file",
new File(thisTest.getWorkingCopy(), "A/D/G/rho").exists());
assertFalse("failed to remove prop modified file",
new File(thisTest.getWorkingCopy(), "A/D/H/chi").exists());
assertFalse("failed to remove unversioned file",
new File(thisTest.getWorkingCopy(), "A/C/sigma").exists());
assertFalse("failed to remove unmodified file",
new File(thisTest.getWorkingCopy(), "A/B/E/alpha").exists());
file = new File(thisTest.getWorkingCopy(),"A/B/F");
assertTrue("removed versioned dir", file.exists()
&& file.isDirectory());
assertFalse("failed to remove unversioned dir",
new File(thisTest.getWorkingCopy(), "A/C/Q").exists());
assertFalse("failed to remove added dir",
new File(thisTest.getWorkingCopy(), "A/B/X").exists());
// delete unversioned file foo
file = new File(thisTest.getWCPath(),"foo");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("unversioned foo");
pw.close();
client.remove(new String[] {file.getAbsolutePath()}, null, true);
assertFalse("failed to remove unversioned file foo", file.exists());
try
{
// delete non-existant file foo
client.remove(new String[] {file.getAbsolutePath()}, null, true);
fail("missing exception");
}
catch(ClientException expected)
{
}
// delete file iota in the repository
addExpectedCommitItem(null, thisTest.getUrl(), "iota", NodeKind.none,
CommitItemStateFlags.Delete);
client.remove(new String[] {thisTest.getUrl()+"/iota"},
"delete iota URL", false);
}
public void testBasicCheckoutDeleted() throws Throwable
{
// create working copy
OneTest thisTest = new OneTest();
// delete A/D and its content
client.remove(new String[] {thisTest.getWCPath()+"/A/D"}, null, true);
thisTest.getWc().setItemTextStatus("A/D", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/rho", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/pi", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/G/tau", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/chi", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/psi", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/H/omega", Status.Kind.deleted);
thisTest.getWc().setItemTextStatus("A/D/gamma", Status.Kind.deleted);
// check the working copy status
thisTest.checkStatus();
// commit the change
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D", NodeKind.dir,
CommitItemStateFlags.Delete);
assertEquals("wrong revision from commit",
client.commit(new String[]{thisTest.getWCPath()}, "log message",
true),2);
thisTest.getWc().removeItem("A/D");
thisTest.getWc().removeItem("A/D/G");
thisTest.getWc().removeItem("A/D/G/rho");
thisTest.getWc().removeItem("A/D/G/pi");
thisTest.getWc().removeItem("A/D/G/tau");
thisTest.getWc().removeItem("A/D/H");
thisTest.getWc().removeItem("A/D/H/chi");
thisTest.getWc().removeItem("A/D/H/psi");
thisTest.getWc().removeItem("A/D/H/omega");
thisTest.getWc().removeItem("A/D/gamma");
// check the working copy status
thisTest.checkStatus();
// check out the previous revision
client.checkout(thisTest.getUrl()+"/A/D", thisTest.getWCPath()+"/new_D",
new Revision.Number(1), true);
}
/**
* Test if Subversion will detect the change of a file to a
* direcory.
* @throws Throwable
*/
public void testBasicNodeKindChange() throws Throwable
{
// create working copy
OneTest thisTest = new OneTest();
// remove A/D/gamma
client.remove(new String[] {thisTest.getWCPath()+"/A/D/gamma"}, null,
false);
thisTest.getWc().setItemTextStatus("A/D/gamma", Status.Kind.deleted);
// check the working copy status
thisTest.checkStatus();
try
{
// creating a directory in the place of the deleted file should
// fail
client.mkdir(new String[] {thisTest.getWCPath()+"/A/D/gamma"},
null);
fail("can change node kind");
}
catch(ClientException e)
{
}
// check the working copy status
thisTest.checkStatus();
// commit the deletion
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/D/gamma", NodeKind.file,
CommitItemStateFlags.Delete);
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},"log message",
true), 2);
thisTest.getWc().removeItem("A/D/gamma");
// check the working copy status
thisTest.checkStatus();
try
{
// creating a directory in the place of the deleted file should
// still fail
client.mkdir(
new String[] {thisTest.getWCPath()+"/A/D/gamma"}, null);
fail("can change node kind");
}
catch(ClientException e)
{
}
// check the working copy status
thisTest.checkStatus();
// update the working copy
client.update(thisTest.getWCPath(), null, true);
// check the working copy status
thisTest.checkStatus();
// now creating the directory should succeed
client.mkdir(new String[] {thisTest.getWCPath()+"/A/D/gamma"}, null);
thisTest.getWc().addItem("A/D/gamma", null);
thisTest.getWc().setItemTextStatus("A/D/gamma", Status.Kind.added);
// check the working copy status
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.import functionality.
* @throws Throwable
*/
public void testBasicImport() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// create new_file
File file = new File(thisTest.getWCPath(),"new_file");
PrintWriter pw = new PrintWriter(new FileOutputStream(file));
pw.print("some text");
pw.close();
// import new_file info dirA/dirB/newFile
addExpectedCommitItem(thisTest.getWCPath(),
null, "new_file", NodeKind.none, CommitItemStateFlags.Add);
client.doImport(file.getAbsolutePath(),
thisTest.getUrl()+"/dirA/dirB/new_file",
"log message for new import", true);
// delete new_file
file.delete();
// update the working
assertEquals("wrong revision from update",
client.update(thisTest.getWCPath(), null, true),2);
thisTest.getWc().addItem("dirA", null);
thisTest.getWc().setItemWorkingCopyRevision("dirA",2);
thisTest.getWc().addItem("dirA/dirB", null);
thisTest.getWc().setItemWorkingCopyRevision("dirA/dirB",2);
thisTest.getWc().addItem("dirA/dirB/new_file", "some text");
thisTest.getWc().setItemWorkingCopyRevision("dirA/dirB/new_file",2);
// test the working copy status
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.fileContent functionality.
* @throws Throwable
*/
public void testBasicCat() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// modify A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter pw = new PrintWriter(new FileOutputStream(mu, true));
pw.print("some text");
pw.close();
// get the content from the repository
byte[] content = client.fileContent(thisTest.getWCPath()+"/A/mu", null);
byte[] testContent = thisTest.getWc().getItemContent("A/mu").getBytes();
// the content should be the same
assertTrue("content changed", Arrays.equals(content, testContent));
}
/**
* Test the basic SVNClient.fileContent functionality.
* @throws Throwable
*/
public void testBasicCatStream() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// modify A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter pw = new PrintWriter(new FileOutputStream(mu, true));
pw.print("some text");
pw.close();
// get the content from the repository
ByteArrayOutputStream baos = new ByteArrayOutputStream();
client.streamFileContent(thisTest.getWCPath() + "/A/mu", null, null,
100, baos);
byte[] content = baos.toByteArray();
byte[] testContent = thisTest.getWc().getItemContent("A/mu").getBytes();
// the content should be the same
assertTrue("content changed", Arrays.equals(content, testContent));
}
/**
* Test the basic SVNClient.list functionality.
* @throws Throwable
*/
public void testBasicLs() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// list the repository root dir
DirEntry[] entries = client.list(thisTest.getWCPath(), null, false);
thisTest.getWc().check(entries, "", false);
// list directory A
entries = client.list(thisTest.getWCPath() + "/A", null, false);
thisTest.getWc().check(entries, "A", false);
// list directory A in BASE revision
entries = client.list(thisTest.getWCPath() + "/A", Revision.BASE,
false);
thisTest.getWc().check(entries, "A", false);
// list file A/mu
entries = client.list(thisTest.getWCPath() + "/A/mu", null, false);
thisTest.getWc().check(entries, "A/mu");
}
/**
* Test the basis SVNClient.add functionality with files that
* should be ignored.
* @throws Throwable
*/
public void testBasicAddIgnores() throws Throwable
{
// create working copy
OneTest thisTest = new OneTest();
// create dir
File dir = new File(thisTest.getWorkingCopy(), "dir");
dir.mkdir();
// create dir/foo.c
File fileC = new File(dir, "foo.c");
new FileOutputStream(fileC).close();
// create dir/foo.o (should be ignored)
File fileO = new File(dir, "foo.o");
new FileOutputStream(fileO).close();
// add dir
client.add(dir.getAbsolutePath(), true);
thisTest.getWc().addItem("dir", null);
thisTest.getWc().setItemTextStatus("dir",Status.Kind.added);
thisTest.getWc().addItem("dir/foo.c", "");
thisTest.getWc().setItemTextStatus("dir/foo.c",Status.Kind.added);
thisTest.getWc().addItem("dir/foo.o", "");
thisTest.getWc().setItemTextStatus("dir/foo.o",Status.Kind.ignored);
thisTest.getWc().setItemNodeKind("dir/foo.o", NodeKind.unknown);
// test the working copy status
thisTest.checkStatus();
}
/**
* Test the basis SVNClient.import functionality with files that
* should be ignored.
* @throws Throwable
*/
public void testBasicImportIgnores() throws Throwable
{
// create working copy
OneTest thisTest = new OneTest();
// create dir
File dir = new File(thisTest.getWorkingCopy(), "dir");
dir.mkdir();
// create dir/foo.c
File fileC = new File(dir, "foo.c");
new FileOutputStream(fileC).close();
// create dir/foo.o (should be ignored)
File fileO = new File(dir, "foo.o");
new FileOutputStream(fileO).close();
// import dir
addExpectedCommitItem(thisTest.getWCPath(),
null, "dir", NodeKind.none, CommitItemStateFlags.Add);
client.doImport(dir.getAbsolutePath(), thisTest.getUrl()+"/dir",
"log message for import", true);
// remove dir
removeDirOrFile(dir);
// udpate the working copy
assertEquals("wrong revision from update", 2,
client.update(thisTest.getWCPath(), null, true));
thisTest.getWc().addItem("dir", null);
thisTest.getWc().addItem("dir/foo.c", "");
// test the working copy status
thisTest.checkStatus();
}
/**
* Test the basic SVNClient.info functionality.
* @throws Throwable
*/
public void testBasicInfo() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// get the item information and test it
Info info = client.info(thisTest.getWCPath()+"/A/mu");
assertEquals("wrong revision from info", 1,
info.getLastChangedRevision());
assertEquals("wrong schedule kind from info", ScheduleKind.normal,
info.getSchedule());
assertEquals("wrong node kind from info", NodeKind.file,
info.getNodeKind());
}
/**
* Test the basic SVNClient.logMessages functionality.
* @throws Throwable
*/
public void testBasicLogMessage() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
// get the commit message of the initial import and test it
LogMessage lm[] = client.logMessages(thisTest.getWCPath(), null,
null, false, true);
assertEquals("wrong number of objects", 1, lm.length);
assertEquals("wrong message", "Log Message", lm[0].getMessage());
assertEquals("wrong revision", 1, lm[0].getRevisionNumber());
assertEquals("wrong user", "jrandom", lm[0].getAuthor());
assertNotNull("changed paths set", lm[0].getChangedPaths());
ChangePath cp[] = lm[0].getChangedPaths();
assertEquals("wrong number of chang pathes", 20, cp.length);
assertEquals("wrong path", "/A", cp[0].getPath());
assertEquals("wrong copy source rev", -1, cp[0].getCopySrcRevision());
assertNull("wrong copy source path", cp[0].getCopySrcPath());
assertEquals("wrong action", 'A', cp[0].getAction());
assertEquals("wrong time with getTimeMicros()",
lm[0].getTimeMicros()/1000,
lm[0].getDate().getTime());
assertEquals("wrong time with getTimeMillis()",
lm[0].getTimeMillis(),
lm[0].getDate().getTime());
assertEquals("wrong date with getTimeMicros()",
lm[0].getDate(),
new java.util.Date(lm[0].getTimeMicros()/1000));
assertEquals("wrong date with getTimeMillis()",
lm[0].getDate(),
new java.util.Date(lm[0].getTimeMillis()));
}
/**
* Test the basic SVNClient.getVersionInfo functionality.
* @throws Throwable
* @since 1.2
*/
public void testBasicVersionInfo() throws Throwable
{
// create the working copy
OneTest thisTest = new OneTest();
assertEquals("wrong version info",
"1",
client.getVersionInfo(thisTest.getWCPath(), null, false));
}
/**
* Test the basic SVNClient locking functionality.
* @throws Throwable
* @since 1.2
*/
public void testBasicLocking() throws Throwable
{
// build the first working copy
OneTest thisTest = new OneTest();
client.propertySet(thisTest.getWCPath()+"/A/mu",
PropertyData.NEEDS_LOCK, "*", false);
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
CommitItemStateFlags.PropMods);
assertEquals("bad revision number on commit", 2,
client.commit(new String[] {thisTest.getWCPath()},
"message", true));
File f = new File(thisTest.getWCPath()+"/A/mu");
assertEquals("file should be read only now", false, f.canWrite());
client.lock(new String[] {thisTest.getWCPath()+"/A/mu"},
"comment", false);
assertEquals("file should be read write now", true, f.canWrite());
client.unlock(new String[]{thisTest.getWCPath()+"/A/mu"}, false);
assertEquals("file should be read only now", false, f.canWrite());
client.lock(new String[]{thisTest.getWCPath()+"/A/mu"},
"comment", false);
assertEquals("file should be read write now", true, f.canWrite());
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
0);
assertEquals("rev number from commit", -1,
client.commit(new String[]{thisTest.getWCPath()},
"message", true));
assertEquals("file should be read write now", true, f.canWrite());
try
{
// Attempt to lock an invalid path
client.lock(new String[]{thisTest.getWCPath()+"/A/mu2"}, "comment",
false);
fail("missing exception");
}
catch (ClientException expected)
{
}
}
/**
* Test the basic SVNClient.info2 functionality.
* @throws Throwable
* @since 1.2
*/
public void testBasicInfo2() throws Throwable
{
// build the first working copy
OneTest thisTest = new OneTest();
final String failureMsg = "Incorrect number of info objects";
Info2[] infos = client.info2(thisTest.getWCPath(), null, null, false);
assertEquals(failureMsg, 1, infos.length);
infos = client.info2(thisTest.getWCPath(), null, null, true);
assertEquals(failureMsg, 21, infos.length);
for (int i = 0; i < infos.length; i++)
{
Info2 info = infos[i];
assertNull("Unexpected changelist present",
info.getChangelistName());
boolean isFile = info.getKind() == NodeKind.file;
assertTrue("Unexpected working file size " + info.getWorkingSize()
+ " for '" + info + '\'',
(isFile ? info.getWorkingSize() > -1 :
info.getWorkingSize() == -1));
// We shouldn't know the repository file size when only
// examining the WC.
assertEquals("Unexpected repos file size for '" + info + '\'',
-1, info.getReposSize());
// Examine depth
assertEquals(Depth.infinity, info.getDepth());
}
// Create wc with a depth of Depth.empty
String secondWC = thisTest.getWCPath() + ".empty";
removeDirOrFile(new File(secondWC));
client.checkout(thisTest.getUrl(), secondWC, null, null, Depth.empty,
false, true);
infos = client.info2(secondWC, null, null, false);
// Examine that depth is Depth.empty
assertEquals(Depth.empty, infos[0].getDepth());
}
/**
* Test basic changelist functionality.
* @throws Throwable
* @since 1.5
*/
public void testBasicChangelist() throws Throwable
{
// build the working copy
OneTest thisTest = new OneTest();
String changelistName = "changelist1";
String[] changelists = new String[] { changelistName };
MyChangelistCallback clCallback = new MyChangelistCallback();
String[] paths = new String[]
{thisTest.getWCPath() + "/iota"};
// Add a path to a changelist, and check to see if it got added
client.addToChangelist(paths, changelistName, Depth.infinity, null);
String[] cl = new String[1];
client.getChangelists(thisTest.getWCPath(), changelists,
Depth.infinity, clCallback);
cl[0] = (String) clCallback.get(paths[0]).get(0);
assertTrue(java.util.Arrays.equals(cl, changelists));
// Does status report this changelist?
Status[] status = client.status(paths[0], false, false, false, false,
false);
assertEquals(status[0].getChangelist(), changelistName);
// Remove the path from the changelist, and check to see if the path is
// actually removed.
client.removeFromChangelists(paths, Depth.infinity, changelists);
clCallback.clear();
client.getChangelists(thisTest.getWCPath(), changelists,
Depth.infinity, clCallback);
assertTrue(clCallback.isEmpty());
}
/**
* Helper method for testing mergeinfo retrieval. Assumes
* that <code>targetPath</code> has both merge history and
* available merges.
* @param expectedMergedStart The expected start revision from the
* merge history for <code>mergeSrc</code>.
* @param expectedMergedEnd The expected end revision from the
* merge history for <code>mergeSrc</code>.
* @param expectedAvailableStart The expected start available revision
* from the merge history for <code>mergeSrc</code>. Zero if no need
* to test the available range.
* @param expectedAvailableEnd The expected end available revision
* from the merge history for <code>mergeSrc</code>.
* @param targetPath The path for which to acquire mergeinfo.
* @param mergeSrc The URL from which to consider merges.
*/
private void acquireMergeinfoAndAssertEquals(long expectedMergeStart,
long expectedMergeEnd,
long expectedAvailableStart,
long expectedAvailableEnd,
String targetPath,
String mergeSrc)
throws SubversionException
{
// Verify expected merge history.
Mergeinfo mergeInfo = client.getMergeinfo(targetPath, Revision.HEAD);
assertNotNull("Missing merge info on '" + targetPath + '\'',
mergeInfo);
List ranges = mergeInfo.getRevisions(mergeSrc);
assertTrue("Missing merge info for source '" + mergeSrc + "' on '" +
targetPath + '\'', ranges != null && !ranges.isEmpty());
RevisionRange range = (RevisionRange) ranges.get(0);
String expectedMergedRevs = expectedMergeStart + "-" + expectedMergeEnd;
assertEquals("Unexpected first merged revision range for '" +
mergeSrc + "' on '" + targetPath + '\'',
expectedMergedRevs, range.toString());
// Verify expected available merges.
if (expectedAvailableStart > 0)
{
long[] availableRevs =
getMergeinfoRevisions(MergeinfoLogKind.eligible, targetPath,
Revision.HEAD, mergeSrc,
Revision.HEAD);
assertNotNull("Missing eligible merge info on '"+targetPath + '\'',
availableRevs);
assertExpectedMergeRange(expectedAvailableStart,
expectedAvailableEnd, availableRevs);
}
}
/**
* Calls the API to get mergeinfo revisions and returns
* the revision numbers in a sorted array, or null if there
* are no revisions to return.
* @since 1.5
*/
private long[] getMergeinfoRevisions(int kind, String pathOrUrl,
Revision pegRevision,
String mergeSourceUrl,
Revision srcPegRevision) {
class Callback implements LogMessageCallback {
List revList = new ArrayList();
public void singleMessage(ChangePath[] changedPaths, long revision,
Map revprops, boolean hasChildren) {
revList.add(new Long(revision));
}
public long[] getRevisions() {
long[] revisions = new long[revList.size()];
int i = 0;
for (Iterator iter = revList.iterator(); iter.hasNext();) {
Long revision = (Long) iter.next();
revisions[i] = revision.longValue();
i++;
}
return revisions;
}
}
try {
Callback callback = new Callback();
client.getMergeinfoLog(kind, pathOrUrl, pegRevision, mergeSourceUrl,
srcPegRevision, false, null, callback);
return callback.getRevisions();
} catch (ClientException e) {
return null;
}
}
/**
* Append the text <code>toAppend</code> to the WC file at
* <code>path</code>, and update the expected WC state
* accordingly.
*
* @param thisTest The test whose expected WC to tweak.
* @param path The working copy-relative path to change.
* @param toAppend The text to append to <code>path</code>.
* @param rev The expected revision number for thisTest's WC.
* @return The file created during the setup.
* @since 1.5
*/
private File appendText(OneTest thisTest, String path, String toAppend,
int rev)
throws FileNotFoundException
{
File f = new File(thisTest.getWorkingCopy(), path);
PrintWriter writer = new PrintWriter(new FileOutputStream(f, true));
writer.print(toAppend);
writer.close();
if (rev > 0)
{
WC wc = thisTest.getWc();
wc.setItemWorkingCopyRevision(path, rev);
wc.setItemContent(path, wc.getItemContent(path) + toAppend);
}
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(), path,
NodeKind.file, CommitItemStateFlags.TextMods);
return f;
}
/**
* Test the basic functionality of SVNClient.merge().
* @throws Throwable
* @since 1.2
*/
public void testBasicMerge() throws Throwable
{
OneTest thisTest = setupAndPerformMerge();
// Verify that there are now potential merge sources.
String[] suggestedSrcs =
client.suggestMergeSources(thisTest.getWCPath() + "/branches/A",
Revision.WORKING);
assertNotNull(suggestedSrcs);
assertEquals(1, suggestedSrcs.length);
// Test that getMergeinfo() returns null.
assertNull(client.getMergeinfo(new File(thisTest.getWCPath(), "A")
.toString(), Revision.HEAD));
// Merge and commit some changes (r4).
appendText(thisTest, "A/mu", "xxx", 4);
appendText(thisTest, "A/D/G/rho", "yyy", 4);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
4);
// Add a "begin merge" notification handler.
final Revision[] actualRange = new Revision[2];
Notify2 notify = new Notify2()
{
public void onNotify(NotifyInformation info)
{
if (info.getAction() == NotifyAction.merge_begin)
{
RevisionRange r = info.getMergeRange();
actualRange[0] = r.getFromRevision();
actualRange[1] = r.getToRevision();
}
}
};
client.notification2(notify);
// merge changes in A to branches/A
String branchPath = thisTest.getWCPath() + "/branches/A";
String modUrl = thisTest.getUrl() + "/A";
// test --dry-run
client.merge(modUrl, new Revision.Number(2), modUrl, Revision.HEAD,
branchPath, false, true, false, true);
assertEquals("Notification of beginning of merge reported incorrect " +
"start revision", new Revision.Number(2), actualRange[0]);
assertEquals("Notification of beginning of merge reported incorrect " +
"end revision", new Revision.Number(4), actualRange[1]);
// now do the real merge
client.merge(modUrl, new Revision.Number(2), modUrl, Revision.HEAD,
branchPath, false, true, false, false);
assertEquals("Notification of beginning of merge reported incorrect " +
"start revision", new Revision.Number(2), actualRange[0]);
assertEquals("Notification of beginning of merge reported incorrect " +
"end revision", new Revision.Number(4), actualRange[1]);
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A", NodeKind.dir,
CommitItemStateFlags.PropMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 5);
// Merge and commit some more changes (r6).
appendText(thisTest, "A/mu", "xxxr6", 6);
appendText(thisTest, "A/D/G/rho", "yyyr6", 6);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
6);
// Test retrieval of mergeinfo from a WC path.
String targetPath =
new File(thisTest.getWCPath(), "branches/A/mu").getPath();
final String mergeSrc = thisTest.getUrl() + "/A/mu";
acquireMergeinfoAndAssertEquals(2, 4, 6, 6, targetPath, mergeSrc);
// Test retrieval of mergeinfo from the repository.
targetPath = thisTest.getUrl() + "/branches/A/mu";
acquireMergeinfoAndAssertEquals(2, 4, 6, 6, targetPath, mergeSrc);
}
/**
* Test merge with automatic source and revision determination
* (e.g. 'svn merge -g').
* @throws Throwable
* @since 1.5
*/
public void testMergeUsingHistory() throws Throwable
{
OneTest thisTest = setupAndPerformMerge();
// Test that getMergeinfo() returns null.
assertNull(client.getMergeinfo(new File(thisTest.getWCPath(), "A")
.toString(), Revision.HEAD));
// Merge and commit some changes (r4).
appendText(thisTest, "A/mu", "xxx", 4);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
4);
String branchPath = thisTest.getWCPath() + "/branches/A";
String modUrl = thisTest.getUrl() + "/A";
Revision unspec = new Revision(RevisionKind.unspecified);
client.merge(modUrl, Revision.HEAD,
new RevisionRange[] { new RevisionRange(unspec, unspec) },
branchPath, true, Depth.infinity, false, false, false);
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A", NodeKind.dir,
CommitItemStateFlags.PropMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 5);
}
/**
* Test reintegrating a branch with trunk
* (e.g. 'svn merge --reintegrate').
* @throws Throwable
* @since 1.5
*/
public void testMergeReintegrate() throws Throwable
{
OneTest thisTest = setupAndPerformMerge();
// Test that getMergeinfo() returns null.
assertNull(client.getMergeinfo(new File(thisTest.getWCPath(), "A")
.toString(), Revision.HEAD));
// Merge and commit some changes to main (r4).
appendText(thisTest, "A/mu", "xxx", 4);
assertEquals("wrong revision number from main commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
4);
// Merge and commit some changes to branch (r5).
appendText(thisTest, "branches/A/D/G/rho", "yyy", -1);
assertEquals("wrong revision number from branch commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
5);
// update the branch WC (to r5) before merge
client.update(thisTest.getWCPath() + "/branches", Revision.HEAD, true);
String branchPath = thisTest.getWCPath() + "/branches/A";
String modUrl = thisTest.getUrl() + "/A";
Revision unspec = new Revision(RevisionKind.unspecified);
client.merge(modUrl, Revision.HEAD,
new RevisionRange[] { new RevisionRange(unspec, unspec) },
branchPath, true, Depth.infinity, false, false, false);
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A", NodeKind.dir,
CommitItemStateFlags.PropMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A/mu", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 6);
// now we --reintegrate the branch with main
String branchUrl = thisTest.getUrl() + "/branches/A";
try
{
client.mergeReintegrate(branchUrl, Revision.HEAD,
thisTest.getWCPath() + "/A", false);
fail("reintegrate merged into a mixed-revision WC");
}
catch(ClientException e)
{
// update the WC (to r6) and try again
client.update(thisTest.getWCPath(), Revision.HEAD, true);
client.mergeReintegrate(branchUrl, Revision.HEAD,
thisTest.getWCPath() + "/A", false);
}
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A", NodeKind.dir,
CommitItemStateFlags.PropMods);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/G/rho", NodeKind.file,
CommitItemStateFlags.TextMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 7);
}
/**
* Test automatic merge conflict resolution.
* @throws Throwable
* @since 1.5
*/
public void testMergeConflictResolution() throws Throwable
{
// Add a conflict resolution callback which always chooses the
// user's version of a conflicted file.
client.setConflictResolver(new ConflictResolverCallback()
{
public ConflictResult resolve(ConflictDescriptor descrip)
{
return new ConflictResult(ConflictResult.chooseTheirsFull,
null);
}
});
OneTest thisTest = new OneTest();
String originalContents = thisTest.getWc().getItemContent("A/mu");
String expectedContents = originalContents + "xxx";
// Merge and commit a change (r2).
File mu = appendText(thisTest, "A/mu", "xxx", 2);
assertEquals("wrong revision number from commit", 2,
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true));
// Backdate the WC to the previous revision (r1).
client.update(thisTest.getWCPath(), Revision.getInstance(1), true);
// Prep for a merge conflict by changing A/mu in a different
// way.
mu = appendText(thisTest, "A/mu", "yyy", 1);
// Merge in the previous changes to A/mu (from r2).
RevisionRange[] ranges = new RevisionRange[1];
ranges[0] = new RevisionRange(new Revision.Number(1),
new Revision.Number(2));
client.merge(thisTest.getUrl(), Revision.HEAD, ranges,
thisTest.getWCPath(), false, Depth.infinity, false,
false, false);
assertFileContentsEquals("Unexpected conflict resolution",
expectedContents, mu);
}
/**
* Test merge --record-only
* @throws Throwable
* @since 1.5
*/
public void testRecordOnlyMerge() throws Throwable
{
OneTest thisTest = setupAndPerformMerge();
// Verify that there are now potential merge sources.
String[] suggestedSrcs =
client.suggestMergeSources(thisTest.getWCPath() + "/branches/A",
Revision.WORKING);
assertNotNull(suggestedSrcs);
assertEquals(1, suggestedSrcs.length);
// Test that getMergeinfo() returns null.
assertNull(client.getMergeinfo(new File(thisTest.getWCPath(), "A")
.toString(), Revision.HEAD));
// Merge and commit some changes (r4).
appendText(thisTest, "A/mu", "xxx", 4);
appendText(thisTest, "A/D/G/rho", "yyy", 4);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true),
4);
// --record-only merge changes in A to branches/A
String branchPath = thisTest.getWCPath() + "/branches/A";
String modUrl = thisTest.getUrl() + "/A";
RevisionRange[] ranges = new RevisionRange[1];
ranges[0] = new RevisionRange(new Revision.Number(2),
new Revision.Number(4));
client.merge(modUrl, Revision.HEAD, ranges,
branchPath, true, Depth.infinity, false, false, true);
// commit the changes so that we can verify merge
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"branches/A", NodeKind.dir,
CommitItemStateFlags.PropMods);
assertEquals("wrong revision number from commit",
client.commit(new String[] { thisTest.getWCPath() },
"log msg", true), 5);
// Test retrieval of mergeinfo from a WC path.
String targetPath =
new File(thisTest.getWCPath(), "branches/A").getPath();
final String mergeSrc = thisTest.getUrl() + "/A";
acquireMergeinfoAndAssertEquals(2, 4, 0, 0, targetPath, mergeSrc);
}
/**
* Setup a test with a WC. In the repository, create a
* "/branches" directory, with a branch of "/A" underneath it.
* Update the WC to reflect these modifications.
* @return This test.
*/
private OneTest setupAndPerformMerge()
throws Exception
{
OneTest thisTest = new OneTest();
// Verify that there are initially no potential merge sources.
String[] suggestedSrcs =
client.suggestMergeSources(thisTest.getWCPath(),
Revision.WORKING);
assertNotNull(suggestedSrcs);
assertEquals(0, suggestedSrcs.length);
// create branches directory in the repository (r2)
addExpectedCommitItem(null, thisTest.getUrl(), "branches",
NodeKind.none, CommitItemStateFlags.Add);
client.mkdir(new String[]{thisTest.getUrl() + "/branches"}, "log_msg");
// copy A to branches (r3)
addExpectedCommitItem(null, thisTest.getUrl(), "branches/A",
NodeKind.none, CommitItemStateFlags.Add);
client.copy(thisTest.getUrl() + "/A", thisTest.getUrl() +
"/branches/A", "create A branch", Revision.HEAD);
// update the WC (to r3) so that it has the branches folder
client.update(thisTest.getWCPath(), Revision.HEAD, true);
return thisTest;
}
/**
* Test the {@link SVNClientInterface.diff()} APIs.
* @since 1.5
*/
public void testDiff()
throws SubversionException, IOException
{
OneTest thisTest = new OneTest(true);
File diffOutput = new File(super.localTmp, thisTest.testName);
final String NL = System.getProperty("line.separator");
final String sepLine =
"===================================================================" + NL;
final String underSepLine =
"___________________________________________________________________" + NL;
final String expectedDiffBody =
"@@ -1 +1 @@" + NL +
"-This is the file 'iota'." + NL +
"\\ No newline at end of file" + NL +
"+This is the file 'mu'." + NL +
"\\ No newline at end of file" + NL;
final String iotaPath = thisTest.getWCPath().replace('\\', '/') + "/iota";
final String wcPath = fileToSVNPath(new File(thisTest.getWCPath()),
false);
// make edits to iota
PrintWriter writer = new PrintWriter(new FileOutputStream(iotaPath));
writer.print("This is the file 'mu'.");
writer.flush();
writer.close();
/*
* This test does tests with and without svn:eol-style set to native
* We will first run all of the tests where this does not matter so
* that they are not run twice.
*/
// Two-path diff of URLs.
String expectedDiffOutput = "Index: iota" + NL + sepLine +
"--- iota\t(.../iota)\t(revision 1)" + NL +
"+++ iota\t(.../A/mu)\t(revision 1)" + NL +
expectedDiffBody;
client.diff(thisTest.getUrl() + "/iota", Revision.HEAD,
thisTest.getUrl() + "/A/mu", Revision.HEAD,
diffOutput.getPath(), false, true, true, false);
assertFileContentsEquals("Unexpected diff output in file '" +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
// Test relativeToDir fails with urls. */
try
{
client.diff(thisTest.getUrl() + "/iota", Revision.HEAD,
thisTest.getUrl() + "/A/mu", Revision.HEAD,
thisTest.getUrl(), diffOutput.getPath(),
Depth.infinity, null, true, true, false);
fail("This test should fail becaus the relativeToDir parameter " +
"does not work with URLs");
}
catch (Exception ignored)
{
}
/* Testing the expected failure when relativeToDir is not a parent
path of the target. */
try
{
client.diff(iotaPath, Revision.BASE, iotaPath, Revision.WORKING,
"/non/existent/path", diffOutput.getPath(),
Depth.infinity, null, true, true, false);
fail("This test should fail because iotaPath is not a child of " +
"the relativeToDir parameter");
}
catch (Exception ignored)
{
}
// Test diff with a relative path on a directory with prop
// changes.
String aPath = fileToSVNPath(new File(thisTest.getWCPath() + "/A"),
false);
expectedDiffOutput = NL + "Property changes on: A" + NL +
underSepLine +
"Added: testprop" + NL +
" + Test property value." + NL + NL;
client.propertySet(aPath, "testprop", "Test property value.", false);
client.diff(aPath, Revision.BASE, aPath, Revision.WORKING, wcPath,
diffOutput.getPath(), Depth.infinity, null, true, true,
false);
assertFileContentsEquals("Unexpected diff output in file '" +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
// Test diff where relativeToDir and path are the same.
expectedDiffOutput = NL + "Property changes on: ." + NL +
underSepLine +
"Added: testprop" + NL +
" + Test property value." + NL + NL;
client.propertySet(aPath, "testprop", "Test property value.", false);
client.diff(aPath, Revision.BASE, aPath, Revision.WORKING, aPath,
diffOutput.getPath(), Depth.infinity, null, true, true,
false);
assertFileContentsEquals("Unexpected diff output in file '" +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
/*
* The rest of these tests are run twice. The first time
* without svn:eol-style set and the second time with the
* property set to native. This is tracked by the int named
* operativeRevision. It will have a value = 2 after the
* commit which sets the property
*/
for (int operativeRevision = 1; operativeRevision < 3; operativeRevision++)
{
String revisionPrefix = "While processing operativeRevison=" + operativeRevision + ". ";
String assertPrefix = revisionPrefix + "Unexpected diff output in file '";
// Undo previous edits to working copy
client.revert(wcPath, true);
if (operativeRevision == 2) {
// Set svn:eol-style=native on iota
client.propertyCreate(iotaPath, "svn:eol-style", "native", false);
String[] paths = new String[] {iotaPath};
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "iota",NodeKind.file,
CommitItemStateFlags.PropMods);
client.commit(paths, "Set svn:eol-style to native", false);
}
// make edits to iota and set expected output.
writer = new PrintWriter(new FileOutputStream(iotaPath));
writer.print("This is the file 'mu'.");
writer.flush();
writer.close();
expectedDiffOutput = "Index: " + iotaPath + NL + sepLine +
"--- " + iotaPath + "\t(revision " + operativeRevision + ")" + NL +
"+++ " + iotaPath + "\t(working copy)" + NL +
expectedDiffBody;
try
{
// Two-path diff of WC paths.
client.diff(iotaPath, Revision.BASE,
iotaPath, Revision.WORKING,
diffOutput.getPath(), false, true, true, false);
assertFileContentsEquals(assertPrefix +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
diffOutput.delete();
}
catch (ClientException e)
{
fail(revisionPrefix + e.getMessage());
}
try
{
// Peg revision diff of a single file.
client.diff(thisTest.getUrl() + "/iota", Revision.HEAD,
new Revision.Number(operativeRevision), Revision.HEAD,
diffOutput.getPath(), false, true, true, false);
assertFileContentsEquals(assertPrefix +
diffOutput.getPath() + '\'',
"", diffOutput);
diffOutput.delete();
}
catch (ClientException e)
{
fail(revisionPrefix + e.getMessage());
}
// Test svn diff with a relative path.
expectedDiffOutput = "Index: iota" + NL + sepLine +
"--- iota\t(revision " + operativeRevision + ")" + NL +
"+++ iota\t(working copy)" + NL +
expectedDiffBody;
try
{
client.diff(iotaPath, Revision.BASE, iotaPath,
Revision.WORKING, wcPath, diffOutput.getPath(),
Depth.infinity, null, true, true, false);
assertFileContentsEquals(assertPrefix +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
diffOutput.delete();
}
catch (ClientException e)
{
fail(revisionPrefix + e.getMessage());
}
try
{
// Test svn diff with a relative path and trailing slash.
client.diff(iotaPath, Revision.BASE, iotaPath,
Revision.WORKING, wcPath + "/",
diffOutput.getPath(), Depth.infinity, null,
true, true, false);
assertFileContentsEquals(assertPrefix +
diffOutput.getPath() + '\'',
expectedDiffOutput, diffOutput);
diffOutput.delete();
}
catch (ClientException e)
{
fail(revisionPrefix + e.getMessage());
}
}
}
private void assertFileContentsEquals(String msg, String expected,
File actual)
throws IOException
{
FileReader reader = new FileReader(actual);
StringBuffer buf = new StringBuffer();
int ch;
while ((ch = reader.read()) != -1)
{
buf.append((char) ch);
}
assertEquals(msg, expected, buf.toString());
}
/**
* Test the {@link SVNClientInterface.diffSummarize()} API.
* @since 1.5
*/
public void testDiffSummarize()
throws SubversionException, IOException
{
OneTest thisTest = new OneTest(false);
DiffSummaries summaries = new DiffSummaries();
// Perform a recursive diff summary, ignoring ancestry.
client.diffSummarize(thisTest.getUrl(), new Revision.Number(0),
thisTest.getUrl(), Revision.HEAD, Depth.infinity,
null, false, summaries);
assertExpectedDiffSummaries(summaries);
summaries.clear();
// Perform a recursive diff summary with a peg revision,
// ignoring ancestry.
client.diffSummarize(thisTest.getUrl(), Revision.HEAD,
new Revision.Number(0), Revision.HEAD,
Depth.infinity, null, false, summaries);
assertExpectedDiffSummaries(summaries);
}
private void assertExpectedDiffSummaries(DiffSummaries summaries)
{
assertEquals("Wrong number of diff summary descriptors", 20,
summaries.size());
// Rigorously inspect one of our DiffSummary notifications.
final String BETA_PATH = "A/B/E/beta";
DiffSummary betaDiff = (DiffSummary) summaries.get(BETA_PATH);
assertNotNull("No diff summary for " + BETA_PATH, betaDiff);
assertEquals("Incorrect path for " + BETA_PATH, BETA_PATH,
betaDiff.getPath());
assertTrue("Incorrect diff kind for " + BETA_PATH,
DiffSummary.DiffKind.ADDED.equals(betaDiff.getDiffKind()));
assertEquals("Incorrect props changed notice for " + BETA_PATH,
false, betaDiff.propsChanged());
assertEquals("Incorrect node kind for " + BETA_PATH, 1,
betaDiff.getNodeKind());
}
/**
* test the basic SVNClient.isAdminDirectory functionality
* @throws Throwable
* @since 1.2
*/
public void testBasicIsAdminDirectory() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
Notify2 notify = new Notify2()
{
public void onNotify(NotifyInformation info)
{
client.isAdminDirectory(".svn");
}
};
client.notification2(notify);
// update the test
assertEquals("wrong revision number from update",
client.update(thisTest.getWCPath(), null, true), 1);
}
public void testBasicCancelOperation() throws Throwable
{
// build the test setup
OneTest thisTest = new OneTest();
Notify2 notify = new Notify2()
{
public void onNotify(NotifyInformation info)
{
try
{
client.cancelOperation();
}
catch (ClientException e)
{
fail(e.getMessage());
}
}
};
client.notification2(notify);
// update the test to try to cancel an operation
try
{
client.update(thisTest.getWCPath(), null, true);
fail("missing exception for canceled operation");
}
catch (ClientException e)
{
// this is expected
}
}
public void testDataTransferProgressReport() throws Throwable
{
// ### FIXME: This isn't working over ra_local, because
// ### ra_local is not invoking the progress callback.
if (SVNTests.rootUrl.startsWith("file://"))
return;
// build the test setup
OneTest thisTest = new OneTest();
ProgressListener listener = new ProgressListener()
{
public void onProgress(ProgressEvent event)
{
// TODO: Examine the byte counts from "event".
throw new RuntimeException("Progress reported as expected");
}
};
client.setProgressListener(listener);
// Perform an update to exercise the progress notification.
try
{
client.update(thisTest.getWCPath(), null, true);
fail("No progress reported");
}
catch (RuntimeException progressReported)
{
}
}
/**
* Test the basic tree conflict functionality.
* @throws Throwable
*/
public void testTreeConflict() throws Throwable
{
// build the test setup. Used for the changes
OneTest thisTest = new OneTest();
WC wc = thisTest.getWc();
// build the backup test setup. That is the one that will be updated
OneTest tcTest = thisTest.copy(".tree-conflict");
// Move files from A/B/E to A/B/F.
String[] srcPaths = { "alpha" };
for (int i = 0; i < srcPaths.length; i++)
{
String fileName = srcPaths[i];
srcPaths[i] = new File(thisTest.getWorkingCopy(),
"A/B/E/" + fileName).getPath();
wc.addItem("A/B/F/" + fileName,
wc.getItemContent("A/B/E/" + fileName));
wc.setItemWorkingCopyRevision("A/B/F/" + fileName, 2);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/F/" + fileName, NodeKind.file,
CommitItemStateFlags.Add |
CommitItemStateFlags.IsCopy);
wc.removeItem("A/B/E/" + fileName);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/B/E/" + fileName, NodeKind.file,
CommitItemStateFlags.Delete);
}
client.move(srcPaths,
new File(thisTest.getWorkingCopy(), "A/B/F").getPath(),
null, false, true, false, null);
// Commit the changes, and check the state of the WC.
assertEquals("Unexpected WC revision number after commit",
client.commit(new String[] { thisTest.getWCPath() },
"Move files", true), 2);
thisTest.checkStatus();
// modify A/B/E/alpha in second working copy
File alpha = new File(tcTest.getWorkingCopy(), "A/B/E/alpha");
PrintWriter alphaWriter = new PrintWriter(new FileOutputStream(alpha, true));
alphaWriter.print("appended alpha text");
alphaWriter.close();
// update the tc test
assertEquals("wrong revision number from update",
client.update(tcTest.getWCPath(), null, true),
2);
// set the expected working copy layout for the tc test
tcTest.getWc().addItem("A/B/F/alpha",
tcTest.getWc().getItemContent("A/B/E/alpha"));
tcTest.getWc().setItemWorkingCopyRevision("A/B/F/alpha", 2);
// we expect the tree conflict to turn the existing item into
// a scheduled-add with history. We expect the modifications in
// the local file to have been copied to the new file.
tcTest.getWc().setItemTextStatus("A/B/E/alpha", StatusKind.added);
tcTest.getWc().setItemTextStatus("A/B/F/alpha", StatusKind.modified);
// check the status of the working copy of the tc test
tcTest.checkStatus();
// get the Info2 of the tree conflict
MyInfoCallback callback = new MyInfoCallback();
client.info2(tcTest.getWCPath() + "/A/B/E/alpha", null,
null, Depth.unknown, null, callback);
ConflictDescriptor conflict = callback.getInfo().getConflictDescriptor();
assertNotNull("Conflict should not be null", conflict);
assertEquals(conflict.getSrcLeftVersion().getNodeKind(), NodeKind.file);
assertEquals(conflict.getSrcLeftVersion().getReposURL() + "/" +
conflict.getSrcLeftVersion().getPathInRepos(), tcTest.getUrl() + "/A/B/E/alpha");
assertEquals(conflict.getSrcLeftVersion().getPegRevision(), 1L);
assertEquals(conflict.getSrcRightVersion().getNodeKind(), NodeKind.none);
assertEquals(conflict.getSrcRightVersion().getReposURL(), tcTest.getUrl());
assertEquals(conflict.getSrcRightVersion().getPegRevision(), 2L);
}
/**
* Test tolerance of unversioned obstructions when adding paths with
* {@link org.tigris.subversion.javahl.SVNClient#checkout()},
* {@link org.tigris.subversion.javahl.SVNClient#update()}, and
* {@link org.tigris.subversion.javahl.SVNClient#doSwitch()}
* @throws IOException
* @throws SubversionException
*/
public void testObstructionTolerance()
throws SubversionException, IOException
{
// build the test setup
OneTest thisTest = new OneTest();
File file;
PrintWriter pw;
// ----- TEST CHECKOUT -----
// Use export to make unversioned obstructions for a second
// WC checkout (deleting export target from previous tests
// first if it exists).
String secondWC = thisTest.getWCPath() + ".backup1";
removeDirOrFile(new File(secondWC));
client.doExport(thisTest.getUrl(), secondWC, null, false);
// Make an obstructing file that conflicts with add coming from repos
file = new File(secondWC, "A/B/lambda");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the conflicting obstructiong file 'lambda'.");
pw.close();
// Attempt to checkout backup WC without "--force"...
try
{
// ...should fail
client.checkout(thisTest.getUrl(), secondWC, null, null,
Depth.infinity, false, false);
fail("obstructed checkout should fail by default");
}
catch (ClientException expected)
{
}
// Attempt to checkout backup WC with "--force"
// so obstructions are tolerated
client.checkout(thisTest.getUrl(), secondWC, null, null,
Depth.infinity, false, true);
// Check the WC status, the only status should be a text
// mod to lambda. All the other obstructing files were identical
Status[] secondWCStatus = client.status(secondWC, true, false,
false, false, false);
if (!(secondWCStatus.length == 1 &&
secondWCStatus[0].getPath().endsWith("A/B/lambda") &&
secondWCStatus[0].getTextStatus() == StatusKind.modified &&
secondWCStatus[0].getPropStatus() == StatusKind.none))
{
fail("Unexpected WC status after co with " +
"unversioned obstructions");
}
// Make a third WC to test obstruction tolerance of sw and up.
OneTest backupTest = thisTest.copy(".backup2");
// ----- TEST UPDATE -----
// r2: Add a file A/D/H/nu
file = new File(thisTest.getWorkingCopy(), "A/D/H/nu");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the file 'nu'.");
pw.close();
client.add(file.getAbsolutePath(), false);
addExpectedCommitItem(thisTest.getWCPath(), thisTest.getUrl(),
"A/D/H/nu", NodeKind.file,
CommitItemStateFlags.TextMods +
CommitItemStateFlags.Add);
assertEquals("wrong revision number from commit",
client.commit(new String[] {thisTest.getWCPath()},
"log msg", true), 2);
thisTest.getWc().addItem("A/D/H/nu", "This is the file 'nu'.");
Status status = client.singleStatus(thisTest.getWCPath() +
"/A/D/H/nu",
false);
// Add an unversioned file A/D/H/nu to the backup WC
file = new File(backupTest.getWorkingCopy(), "A/D/H/nu");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the file 'nu'.");
pw.close();
// Attempt to update backup WC without "--force"
try
{
// obstructed update should fail
client.update(backupTest.getWCPath(), null, true);
fail("obstructed update should fail by default");
}
catch (ClientException expected)
{
}
// Attempt to update backup WC with "--force"
assertEquals("wrong revision from update",
client.update(backupTest.getWCPath(),
null, Depth.infinity, false, false, true),
2);
// ----- TEST SWITCH -----
// Add an unversioned file A/B/E/nu to the backup WC
// The file differs from A/D/H/nu
file = new File(backupTest.getWorkingCopy(), "A/B/E/nu");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is yet another file 'nu'.");
pw.close();
// Add an unversioned file A/B/E/chi to the backup WC
// The file is identical to A/D/H/chi.
file = new File(backupTest.getWorkingCopy(), "A/B/E/chi");
pw = new PrintWriter(new FileOutputStream(file));
pw.print("This is the file 'chi'.");
pw.close();
// Attempt to switch A/B/E to A/D/H without "--force"
try
{
// obstructed switch should fail
client.doSwitch(backupTest.getWCPath() + "/A/B/E",
backupTest.getUrl() + "/A/D/H",
null, true);
fail("obstructed switch should fail by default");
}
catch (ClientException expected)
{
}
// Complete the switch using "--force" and check the status
client.doSwitch(backupTest.getWCPath() + "/A/B/E",
backupTest.getUrl() + "/A/D/H",
Revision.HEAD, Revision.HEAD, Depth.infinity,
false, false, true);
backupTest.getWc().setItemIsSwitched("A/B/E",true);
backupTest.getWc().removeItem("A/B/E/alpha");
backupTest.getWc().removeItem("A/B/E/beta");
backupTest.getWc().addItem("A/B/E/nu",
"This is yet another file 'nu'.");
backupTest.getWc().setItemTextStatus("A/B/E/nu", Status.Kind.modified);
backupTest.getWc().addItem("A/D/H/nu",
"This is the file 'nu'.");
backupTest.getWc().addItem("A/B/E/chi",
backupTest.getWc().getItemContent("A/D/H/chi"));
backupTest.getWc().addItem("A/B/E/psi",
backupTest.getWc().getItemContent("A/D/H/psi"));
backupTest.getWc().addItem("A/B/E/omega",
backupTest.getWc().getItemContent("A/D/H/omega"));
backupTest.checkStatus();
}
/**
* Test basic blame functionality. This test marginally tests blame
* correctness, mainly just that the blame APIs link correctly.
* @throws Throwable
* @since 1.5
*/
public void testBasicBlame() throws Throwable
{
OneTest thisTest = new OneTest();
// Test the old interface to be sure it still works
byte[] result = client.blame(thisTest.getWCPath() + "/iota", Revision
.getInstance(1), Revision.getInstance(1));
assertEquals(" 1 jrandom This is the file 'iota'.\n",
new String(result));
// Test the current interface
BlameCallbackImpl callback = new BlameCallbackImpl();
client.blame(thisTest.getWCPath() + "/iota", Revision.getInstance(1),
Revision.getInstance(1), callback);
assertEquals(1, callback.numberOfLines());
BlameCallbackImpl.BlameLine line = callback.getBlameLine(0);
if (line != null)
{
assertEquals(1, line.getRevision());
assertEquals("jrandom", line.getAuthor());
}
}
/**
* Test commit of arbitrary revprops.
* @throws Throwable
* @since 1.5
*/
public void testCommitRevprops() throws Throwable
{
class RevpropLogCallback implements LogMessageCallback
{
Map revprops;
public void singleMessage(ChangePath[] changedPaths,
long revision,
Map revprops,
boolean hasChildren)
{
this.revprops = revprops;
}
public Map getRevprops()
{
return revprops;
}
}
// build the test setup
OneTest thisTest = new OneTest();
// modify file A/mu
File mu = new File(thisTest.getWorkingCopy(), "A/mu");
PrintWriter muWriter = new PrintWriter(new FileOutputStream(mu, true));
muWriter.print("appended mu text");
muWriter.close();
thisTest.getWc().setItemWorkingCopyRevision("A/mu", 2);
thisTest.getWc().setItemContent("A/mu",
thisTest.getWc().getItemContent("A/mu") + "appended mu text");
addExpectedCommitItem(thisTest.getWCPath(),
thisTest.getUrl(), "A/mu",NodeKind.file,
CommitItemStateFlags.TextMods);
// commit the changes, with some extra revprops
Map revprops = new HashMap();
revprops.put("kfogel", "rockstar");
revprops.put("cmpilato", "theman");
assertEquals("wrong revision number from commit",
client.commit(new String[]{thisTest.getWCPath()},
"log msg", Depth.infinity, true, true,
null, revprops),
2);
// check the status of the working copy
thisTest.checkStatus();
// Fetch our revprops from the server
RevpropLogCallback callback = new RevpropLogCallback();
client.logMessages(thisTest.getWCPath(), Revision.getInstance(2),
Revision.getInstance(2),
Revision.getInstance(2), false, false, false,
new String[] {"kfogel", "cmpilato"}, 0,
callback);
Map fetchedProps = callback.getRevprops();
assertEquals("wrong number of fetched revprops", revprops.size(),
fetchedProps.size());
Set keys = fetchedProps.keySet();
for (Iterator it = keys.iterator(); it.hasNext(); )
{
String key = (String) it.next();
assertEquals("revprops check", revprops.get(key),
fetchedProps.get(key));
}
}
/**
* @return <code>file</code> converted into a -- possibly
* <code>canonical</code>-ized -- Subversion-internal path
* representation.
*/
private String fileToSVNPath(File file, boolean canonical)
{
// JavaHL need paths with '/' separators
if (canonical)
{
try
{
return file.getCanonicalPath().replace('\\', '/');
}
catch (IOException e)
{
return null;
}
}
else
{
return file.getPath().replace('\\', '/');
}
}
/**
* A DiffSummaryReceiver implementation which collects all DiffSummary
* notifications.
*/
private static class DiffSummaries extends HashMap
implements DiffSummaryReceiver
{
// Update the serialVersionUID when there is a incompatible
// change made to this class.
private static final long serialVersionUID = 1L;
public void onSummary(DiffSummary descriptor)
{
super.put(descriptor.getPath(), descriptor);
}
}
private class MyChangelistCallback extends HashMap
implements ChangelistCallback
{
public void doChangelist(String path, String changelist)
{
if (super.containsKey(path))
{
// Append the changelist to the existing list
List changelistList = (List) super.get(path);
changelistList.add(changelist);
}
else
{
// Create a new changelist with that list
List changelistList = new ArrayList();
changelistList.add(changelist);
super.put(path, changelistList);
}
}
public List get(String path)
{
return (List) super.get(path);
}
}
private class MyInfoCallback implements InfoCallback {
private Info2 info;
public void singleInfo(Info2 info) {
this.info = info;
}
public Info2 getInfo() {
return info;
}
}
}
| JavaHL: Adjust expectations for test of property diff output.
[ in subversion/bindings/javahl/ ]
* tests/org/tigris/subversion/javahl/BasicTests.java:
(testDiff): property diffs now show in unidiff output format.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@878705 13f79535-47bb-0310-9956-ffa450edef68
| subversion/bindings/javahl/tests/org/tigris/subversion/javahl/BasicTests.java | JavaHL: Adjust expectations for test of property diff output. |
|
Java | apache-2.0 | 72ea82ace11b2eb0bafce079dc79dc9616aebb22 | 0 | gradle/gradle,gstevey/gradle,lsmaira/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,gstevey/gradle,gradle/gradle,gstevey/gradle,gstevey/gradle,blindpirate/gradle,gstevey/gradle,lsmaira/gradle,lsmaira/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,lsmaira/gradle,gstevey/gradle,robinverduijn/gradle,lsmaira/gradle,lsmaira/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,gstevey/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,lsmaira/gradle,robinverduijn/gradle,gradle/gradle | /*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.ivyservice.resolveengine.store;
import com.google.common.collect.MapMaker;
import org.gradle.api.internal.artifacts.configurations.ConfigurationInternal;
import org.gradle.api.internal.cache.BinaryStore;
import org.gradle.api.internal.cache.Store;
import org.gradle.api.internal.file.TemporaryFileProvider;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.internal.CompositeStoppable;
import org.gradle.util.Clock;
import java.io.*;
import java.util.Map;
public class ResolutionResultsStoreFactory implements Closeable {
private final static Logger LOG = Logging.getLogger(ResolutionResultsStoreFactory.class);
private final TemporaryFileProvider temp;
private CachedStoreFactory oldModelCache;
private CachedStoreFactory newModelCache;
public ResolutionResultsStoreFactory(TemporaryFileProvider temp) {
this.temp = temp;
}
private final Map<String, DefaultBinaryStore> stores = new MapMaker().makeMap();
private final Object lock = new Object();
public BinaryStore createBinaryStore(String id) {
String storeKey = Thread.currentThread().getId() + id; //one store per thread
if (stores.containsKey(storeKey)) {
return stores.get(storeKey);
}
synchronized (lock) {
DefaultBinaryStore store = stores.get(storeKey);
if (store == null) {
File storeFile = temp.createTemporaryFile("gradle", ".bin");
storeFile.deleteOnExit();
store = new DefaultBinaryStore(storeFile);
stores.put(storeKey, store);
}
return store;
}
}
public void close() throws IOException {
Clock clock = new Clock();
new CompositeStoppable()
.add(stores.values())
.add(oldModelCache)
.add(newModelCache)
.stop();
LOG.debug("Deleted {} resolution results binary files in {}", stores.size(), clock.getTime());
oldModelCache = null;
newModelCache = null;
}
public <T> Store<T> createOldModelCache(ConfigurationInternal configuration) {
if (oldModelCache == null) {
oldModelCache = new CachedStoreFactory("Resolution result");
}
return oldModelCache.createCachedStore(configuration.getPath());
}
public <T> Store<T> createNewModelCache(ConfigurationInternal configuration) {
if (newModelCache == null) {
newModelCache = new CachedStoreFactory("Resolved configuration");
}
return newModelCache.createCachedStore(configuration.getPath());
}
} | subprojects/core-impl/src/main/groovy/org/gradle/api/internal/artifacts/ivyservice/resolveengine/store/ResolutionResultsStoreFactory.java | /*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.ivyservice.resolveengine.store;
import com.google.common.collect.MapMaker;
import org.gradle.api.internal.artifacts.configurations.ConfigurationInternal;
import org.gradle.api.internal.cache.BinaryStore;
import org.gradle.api.internal.cache.Store;
import org.gradle.api.internal.file.TemporaryFileProvider;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.internal.CompositeStoppable;
import org.gradle.util.Clock;
import java.io.*;
import java.util.concurrent.ConcurrentMap;
public class ResolutionResultsStoreFactory implements Closeable {
private final static Logger LOG = Logging.getLogger(ResolutionResultsStoreFactory.class);
private final TemporaryFileProvider temp;
private final CachedStoreFactory oldModelCache =
new CachedStoreFactory("Resolution result");
private final CachedStoreFactory newModelCache =
new CachedStoreFactory("Resolved configuration");
public ResolutionResultsStoreFactory(TemporaryFileProvider temp) {
this.temp = temp;
}
private final ConcurrentMap<String, DefaultBinaryStore> stores = new MapMaker().makeMap();
private final Object lock = new Object();
public BinaryStore createBinaryStore(String id) {
String storeKey = Thread.currentThread().getId() + id; //one store per thread
if (stores.containsKey(storeKey)) {
return stores.get(storeKey);
}
synchronized (lock) {
DefaultBinaryStore store = stores.get(storeKey);
if (store == null) {
File storeFile = temp.createTemporaryFile("gradle", ".bin");
storeFile.deleteOnExit();
store = new DefaultBinaryStore(storeFile);
stores.put(storeKey, store);
}
return store;
}
}
public void close() throws IOException {
Clock clock = new Clock();
new CompositeStoppable()
.add(stores.values())
.add(oldModelCache)
.add(newModelCache)
.stop();
LOG.debug("Deleted {} resolution results binary files in {}", stores.size(), clock.getTime());
}
public <T> Store<T> createOldModelCache(ConfigurationInternal configuration) {
return oldModelCache.createCachedStore(configuration.getPath());
}
public <T> Store<T> createNewModelCache(ConfigurationInternal configuration) {
return newModelCache.createCachedStore(configuration.getPath());
}
} | Small refactoring, lazily create the caches.
(cherry picked from commit 853fa39)
| subprojects/core-impl/src/main/groovy/org/gradle/api/internal/artifacts/ivyservice/resolveengine/store/ResolutionResultsStoreFactory.java | Small refactoring, lazily create the caches. (cherry picked from commit 853fa39) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.