lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | 7278fe73e8685e92df082d308c3d7b8e66c9d441 | 0 | ceylon/ceylon-spec,lucaswerkmeister/ceylon-spec,jvasileff/ceylon-spec,jvasileff/ceylon-spec,ceylon/ceylon-spec,jvasileff/ceylon-spec,ceylon/ceylon-spec | package com.redhat.ceylon.compiler.typechecker.model;
import static com.redhat.ceylon.compiler.typechecker.model.Util.addToIntersection;
import static com.redhat.ceylon.compiler.typechecker.model.Util.intersectionType;
import static com.redhat.ceylon.compiler.typechecker.model.Util.isTypeUnknown;
import static com.redhat.ceylon.compiler.typechecker.model.Util.producedType;
import static com.redhat.ceylon.compiler.typechecker.model.Util.unionType;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.Identifier;
public class Unit {
private Package pkg;
private List<Import> imports = new ArrayList<Import>();
private List<Declaration> declarations = new ArrayList<Declaration>();
private String filename;
private List<ImportList> importLists = new ArrayList<ImportList>();
private Set<Identifier> unresolvedReferences = new HashSet<Identifier>();
private Set<Declaration> duplicateDeclarations = new HashSet<Declaration>();
private final Set<String> dependentsOf = new HashSet<String>();
private String fullPath;
private String relativePath;
public List<Import> getImports() {
return imports;
}
public List<ImportList> getImportLists() {
return importLists;
}
/**
* @return the dependentsOf
*/
public Set<String> getDependentsOf() {
return dependentsOf;
}
public Set<Identifier> getUnresolvedReferences() {
return unresolvedReferences;
}
public Set<Declaration> getDuplicateDeclarations() {
return duplicateDeclarations;
}
public Package getPackage() {
return pkg;
}
public void setPackage(Package p) {
pkg = p;
}
public List<Declaration> getDeclarations() {
synchronized (declarations) {
return new ArrayList<Declaration>(declarations);
}
}
public void addDeclaration(Declaration declaration) {
synchronized (declarations) {
declarations.add(declaration);
}
}
public String getFilename() {
return filename;
}
public void setFilename(String filename) {
this.filename = filename;
}
public String getFullPath() {
return fullPath;
}
public void setFullPath(String fullPath) {
this.fullPath = fullPath;
}
public String getRelativePath() {
return relativePath;
}
public void setRelativePath(String relativePath) {
this.relativePath = relativePath;
}
@Override
public String toString() {
return "Unit[" + filename + "]";
}
public Import getImport(String name) {
for (Import i: getImports()) {
if (i.getTypeDeclaration()==null &&
i.getAlias().equals(name)) {
return i;
}
}
return null;
}
/**
* Search the imports of a compilation unit
* for the named toplevel declaration.
*/
public Declaration getImportedDeclaration(String name,
List<ProducedType> signature, boolean ellipsis) {
for (Import i: getImports()) {
if (i.getAlias().equals(name)) {
//in case of an overloaded member, this will
//be the "abstraction", so search for the
//correct overloaded version
Declaration d = i.getDeclaration();
return d.getContainer().getMember(d.getName(), signature, ellipsis);
}
}
return null;
}
/**
* Search the imports of a compilation unit
* for the named member declaration.
*/
public Declaration getImportedDeclaration(TypeDeclaration td, String name,
List<ProducedType> signature, boolean ellipsis) {
for (Import i: getImports()) {
TypeDeclaration itd = i.getTypeDeclaration();
if (itd!=null && itd.equals(td) &&
i.getAlias().equals(name)) {
//in case of an overloaded member, this will
//be the "abstraction", so search for the
//correct overloaded version
Declaration d = i.getDeclaration();
return d.getContainer().getMember(d.getName(), signature, ellipsis);
}
}
return null;
}
public Map<String, DeclarationWithProximity> getMatchingImportedDeclarations(String startingWith, int proximity) {
Map<String, DeclarationWithProximity> result = new TreeMap<String, DeclarationWithProximity>();
for (Import i: new ArrayList<Import>(getImports())) {
if (i.getAlias()!=null &&
i.getAlias().toLowerCase().startsWith(startingWith.toLowerCase())) {
result.put(i.getAlias(), new DeclarationWithProximity(i, proximity));
}
}
return result;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Unit) {
Unit that = (Unit) obj;
return that.getPackage().equals(getPackage())
&& that.getFilename().equals(getFilename());
}
else {
return false;
}
}
@Override
public int hashCode() {
return getFilename().hashCode();
}
/**
* Search for a declaration in the language module.
*/
public Declaration getLanguageModuleDeclaration(String name) {
//all elements in ceylon.language are auto-imported
//traverse all default module packages provided they have not been traversed yet
Module languageModule = getPackage().getModule().getLanguageModule();
if ( languageModule != null && languageModule.isAvailable() ) {
if ("Bottom".equals(name)) {
return getBottomDeclaration();
}
for (Package languageScope : languageModule.getPackages() ) {
Declaration d = languageScope.getMember(name, null, false);
if (d != null && d.isShared()) {
return d;
}
}
}
return null;
}
public Interface getCorrespondenceDeclaration() {
return (Interface) getLanguageModuleDeclaration("Correspondence");
}
public Class getVoidDeclaration() {
return (Class) getLanguageModuleDeclaration("Void");
}
public Class getNothingDeclaration() {
return (Class) getLanguageModuleDeclaration("Nothing");
}
public Value getNullDeclaration() {
return (Value) getLanguageModuleDeclaration("null");
}
public Interface getEmptyDeclaration() {
return (Interface) getLanguageModuleDeclaration("Empty");
}
public Interface getSequenceDeclaration() {
return (Interface) getLanguageModuleDeclaration("Sequence");
}
public Class getObjectDeclaration() {
return (Class) getLanguageModuleDeclaration("Object");
}
public Class getIdentifiableObjectDeclaration() {
return (Class) getLanguageModuleDeclaration("IdentifiableObject");
}
public Interface getIdentifiableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Identifiable");
}
public Class getExceptionDeclaration() {
return (Class) getLanguageModuleDeclaration("Exception");
}
public Interface getCategoryDeclaration() {
return (Interface) getLanguageModuleDeclaration("Category");
}
public Interface getIterableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Iterable");
}
public Interface getSequentialDeclaration() {
return (Interface) getLanguageModuleDeclaration("Sequential");
}
public Interface getListDeclaration() {
return (Interface) getLanguageModuleDeclaration("List");
}
/**
* Gets the declaration of {@code Iterator}
* @return The declaration
*/
public Interface getIteratorDeclaration() {
return (Interface) getLanguageModuleDeclaration("Iterator");
}
// public Interface getFixedSizedDeclaration() {
// return (Interface) getLanguageModuleDeclaration("FixedSized");
// }
//
// public Interface getSomeDeclaration() {
// return (Interface) getLanguageModuleDeclaration("Some");
// }
//
// public Interface getNoneDeclaration() {
// return (Interface) getLanguageModuleDeclaration("None");
// }
public Interface getCallableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Callable");
}
public Interface getCastableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Castable");
}
public Interface getSummableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Summable");
}
public Interface getNumericDeclaration() {
return (Interface) getLanguageModuleDeclaration("Numeric");
}
public Interface getIntegralDeclaration() {
return (Interface) getLanguageModuleDeclaration("Integral");
}
public Interface getInvertableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Invertable");
}
public Interface getExponentiableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Exponentiable");
}
public Interface getSetDeclaration() {
return (Interface) getLanguageModuleDeclaration("Set");
}
public TypeDeclaration getComparisonDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Comparison");
}
public TypeDeclaration getBooleanDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Boolean");
}
public TypeDeclaration getStringDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("String");
}
public TypeDeclaration getFloatDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Float");
}
public TypeDeclaration getIntegerDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Integer");
}
public TypeDeclaration getCharacterDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Character");
}
public Interface getComparableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Comparable");
}
public Interface getCloseableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Closeable");
}
public Interface getOrdinalDeclaration() {
return (Interface) getLanguageModuleDeclaration("Ordinal");
}
public Class getRangeDeclaration() {
return (Class) getLanguageModuleDeclaration("Range");
}
public Class getTupleDeclaration() {
return (Class) getLanguageModuleDeclaration("Tuple");
}
public TypeDeclaration getArrayDeclaration() {
return (Class) getLanguageModuleDeclaration("Array");
}
public Interface getRangedDeclaration() {
return (Interface) getLanguageModuleDeclaration("Ranged");
}
public Class getEntryDeclaration() {
return (Class) getLanguageModuleDeclaration("Entry");
}
ProducedType getCallableType(ProducedReference ref, ProducedType rt) {
if ( isTypeUnknown(ref.getType())) {
//special case for forward reference to member
//with inferred type TODO: something better
return new UnknownType(this).getType();
}
ProducedType result = rt;
if (ref.getDeclaration() instanceof Functional) {
List<ParameterList> pls = ((Functional) ref.getDeclaration()).getParameterLists();
boolean hasSequenced = false;
for (int i=pls.size()-1; i>=0; i--) {
List<ProducedType> args = new ArrayList<ProducedType>();
for (Parameter p: pls.get(i).getParameters()) {
ProducedTypedReference np = ref.getTypedParameter(p);
ProducedType npt = np.getType();
if (np.getDeclaration() instanceof Functional) {
args.add(getCallableType(np, npt));
}
else if (p.isSequenced()) {
args.add(getIteratedType(npt));
hasSequenced = true;
}
else {
args.add(npt);
}
}
result = producedType(getCallableDeclaration(), result,
getTupleType(args, hasSequenced));
}
}
return result;
}
public ProducedType getTupleType(List<ProducedType> elemTypes, boolean sequenced) {
ProducedType result = getEmptyDeclaration().getType();
ProducedType union = getBottomDeclaration().getType();
int last = elemTypes.size()-1;
for (int i=last; i>=0; i--) {
ProducedType elemType = elemTypes.get(i);
union = unionType(union, elemType, this);
if (sequenced && i==last) {
result = getSequentialType(elemType);
}
else {
result = producedType(getTupleDeclaration(), union, elemType, result);
}
}
return result;
}
public ProducedType getEmptyType(ProducedType pt) {
return pt==null ? null :
unionType(pt, getEmptyDeclaration().getType(), this);
/*else if (isEmptyType(pt)) {
//Nothing|Nothing|T == Nothing|T
return pt;
}
else if (pt.getDeclaration() instanceof BottomType) {
//Nothing|0 == Nothing
return getEmptyDeclaration().getType();
}
else {
UnionType ut = new UnionType();
List<ProducedType> types = new ArrayList<ProducedType>();
addToUnion(types,getEmptyDeclaration().getType());
addToUnion(types,pt);
ut.setCaseTypes(types);
return ut.getType();
}*/
}
public ProducedType getPossiblyNoneType(ProducedType pt) {
return pt==null ? null :
unionType(pt, producedType(getSequentialDeclaration(),
getVoidDeclaration().getType()), this);
}
public ProducedType getOptionalType(ProducedType pt) {
return pt==null ? null :
unionType(pt, getNothingDeclaration().getType(), this);
/*else if (isOptionalType(pt)) {
//Nothing|Nothing|T == Nothing|T
return pt;
}
else if (pt.getDeclaration() instanceof BottomType) {
//Nothing|0 == Nothing
return getNothingDeclaration().getType();
}
else {
UnionType ut = new UnionType();
List<ProducedType> types = new ArrayList<ProducedType>();
addToUnion(types,getNothingDeclaration().getType());
addToUnion(types,pt);
ut.setCaseTypes(types);
return ut.getType();
}*/
}
public ProducedType getSequenceType(ProducedType et) {
return producedType(getSequenceDeclaration(), et);
}
public ProducedType getSequentialType(ProducedType et) {
return producedType(getSequentialDeclaration(), et);
}
public ProducedType getIterableType(ProducedType et) {
return producedType(getIterableDeclaration(), et);
}
public ProducedType getSetType(ProducedType et) {
return producedType(getSetDeclaration(), et);
}
/**
* Returns a ProducedType corresponding to {@code Iterator<T>}
* @param et The ProducedType corresponding to {@code T}
* @return The ProducedType corresponding to {@code Iterator<T>}
*/
public ProducedType getIteratorType(ProducedType et) {
return Util.producedType(getIteratorDeclaration(), et);
}
/**
* Returns a ProducedType corresponding to {@code Range<T>}
* @param rt The ProducedType corresponding to {@code T}
* @return The ProducedType corresponding to {@code Range<T>}
*/
public ProducedType getRangeType(ProducedType rt) {
return Util.producedType(getRangeDeclaration(), rt);
}
public ProducedType getCastableType(ProducedType et) {
return producedType(getCastableDeclaration(), et);
}
public ProducedType getEntryType(ProducedType kt, ProducedType vt) {
return producedType(getEntryDeclaration(), kt, vt);
}
public ProducedType getKeyType(ProducedType type) {
ProducedType st = type.getSupertype(getEntryDeclaration());
if (st!=null && st.getTypeArguments().size()==2) {
return st.getTypeArgumentList().get(0);
}
else {
return null;
}
}
public ProducedType getValueType(ProducedType type) {
ProducedType st = type.getSupertype(getEntryDeclaration());
if (st!=null && st.getTypeArguments().size()==2) {
return st.getTypeArgumentList().get(1);
}
else {
return null;
}
}
public ProducedType getIteratedType(ProducedType type) {
ProducedType st = type.getSupertype(getIterableDeclaration());
if (st!=null && st.getTypeArguments().size()==1) {
return st.getTypeArgumentList().get(0);
}
else {
return null;
}
}
public ProducedType getSetElementType(ProducedType type) {
ProducedType st = type.getSupertype(getSetDeclaration());
if (st!=null && st.getTypeArguments().size()==1) {
return st.getTypeArgumentList().get(0);
}
else {
return null;
}
}
public ProducedType getFixedSizedElementType(ProducedType type) {
ProducedType st = type.getSupertype(getSequentialDeclaration());
if (st!=null && st.getTypeArguments().size()==1) {
return st.getTypeArgumentList().get(0);
}
else {
return null;
}
}
public ProducedType getDefiniteType(ProducedType pt) {
return intersectionType(getObjectDeclaration().getType(),
pt, pt.getDeclaration().getUnit());
/*if (pt.getDeclaration().equals(getVoidDeclaration())) {
return getObjectDeclaration().getType();
}
else {
return pt.minus(getNothingDeclaration());
}*/
}
public ProducedType getNonemptyType(ProducedType pt) {
return intersectionType(producedType(getSequenceDeclaration(),
getFixedSizedElementType(pt)), pt,
pt.getDeclaration().getUnit());
/*if (pt.getDeclaration().equals(getVoidDeclaration())) {
return getObjectDeclaration().getType();
}
else {
return pt.minus(getNothingDeclaration());
}*/
}
public ProducedType getNonemptyDefiniteType(ProducedType pt) {
return getNonemptyType(getDefiniteType(pt));
}
public ProducedType getNonemptySequenceType(ProducedType pt) {
return pt.minus(getEmptyDeclaration()).getSupertype(getSequenceDeclaration());
}
public ProducedType getNonemptyIterableType(ProducedType pt) {
return pt.minus(getEmptyDeclaration()).getSupertype(getIterableDeclaration());
}
public ProducedType getNonemptyListType(ProducedType pt) {
return pt.minus(getEmptyDeclaration()).getSupertype(getListDeclaration());
}
public ProducedType getNonemptySequentialType(ProducedType pt) {
return pt.minus(getEmptyDeclaration()).getSupertype(getSequentialDeclaration());
}
public boolean isEntryType(ProducedType pt) {
return pt.getSupertype(getEntryDeclaration())!=null;
}
public boolean isIterableType(ProducedType pt) {
return pt.getSupertype(getIterableDeclaration())!=null;
}
public boolean isSequentialType(ProducedType pt) {
return pt.getSupertype(getSequentialDeclaration())!=null;
}
public boolean isOptionalType(ProducedType pt) {
//must have non-empty intersection with Nothing
//and non-empty intersection with Object
return !(intersectionType(getNothingDeclaration().getType(), pt, this)
.getDeclaration() instanceof BottomType) &&
!(intersectionType(getObjectDeclaration().getType(), pt, this)
.getDeclaration() instanceof BottomType);
}
public boolean isEmptyType(ProducedType pt) {
//must be a subtype of Sequential<Void>
return getOptionalType(producedType(getSequentialDeclaration(),
getVoidDeclaration().getType()))
.isSupertypeOf(pt) &&
//must have non-empty intersection with Empty
//and non-empty intersection with Sequence<Bottom>
!(intersectionType(getEmptyDeclaration().getType(), pt, this)
.getDeclaration() instanceof BottomType) &&
!(intersectionType(getSequenceType(getBottomDeclaration().getType()), pt, this)
.getDeclaration() instanceof BottomType);
}
public boolean isCallableType(ProducedType pt) {
return pt!=null && pt.getSupertype(getCallableDeclaration())!=null;
}
public BottomType getBottomDeclaration() {
return new BottomType(this);
}
public ProducedType denotableType(ProducedType pt) {
if ( pt!=null && pt.getDeclaration()!=null &&
pt.getDeclaration().isAnonymous() ) {
List<ProducedType> list = new ArrayList<ProducedType>();
addToIntersection(list, pt.getSupertype(pt.getDeclaration().getExtendedTypeDeclaration()), this);
for (TypeDeclaration td: pt.getDeclaration().getSatisfiedTypeDeclarations()) {
addToIntersection(list, pt.getSupertype(td), this);
}
IntersectionType it = new IntersectionType(this);
it.setSatisfiedTypes(list);
return it.getType();
}
else {
return pt;
}
}
}
| src/com/redhat/ceylon/compiler/typechecker/model/Unit.java | package com.redhat.ceylon.compiler.typechecker.model;
import static com.redhat.ceylon.compiler.typechecker.model.Util.addToIntersection;
import static com.redhat.ceylon.compiler.typechecker.model.Util.intersectionType;
import static com.redhat.ceylon.compiler.typechecker.model.Util.isTypeUnknown;
import static com.redhat.ceylon.compiler.typechecker.model.Util.producedType;
import static com.redhat.ceylon.compiler.typechecker.model.Util.unionType;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.Identifier;
public class Unit {
private Package pkg;
private List<Import> imports = new ArrayList<Import>();
private List<Declaration> declarations = new ArrayList<Declaration>();
private String filename;
private List<ImportList> importLists = new ArrayList<ImportList>();
private Set<Identifier> unresolvedReferences = new HashSet<Identifier>();
private Set<Declaration> duplicateDeclarations = new HashSet<Declaration>();
private final Set<String> dependentsOf = new HashSet<String>();
private String fullPath;
private String relativePath;
public List<Import> getImports() {
return imports;
}
public List<ImportList> getImportLists() {
return importLists;
}
/**
* @return the dependentsOf
*/
public Set<String> getDependentsOf() {
return dependentsOf;
}
public Set<Identifier> getUnresolvedReferences() {
return unresolvedReferences;
}
public Set<Declaration> getDuplicateDeclarations() {
return duplicateDeclarations;
}
public Package getPackage() {
return pkg;
}
public void setPackage(Package p) {
pkg = p;
}
public List<Declaration> getDeclarations() {
synchronized (declarations) {
return new ArrayList<Declaration>(declarations);
}
}
public void addDeclaration(Declaration declaration) {
synchronized (declarations) {
declarations.add(declaration);
}
}
public String getFilename() {
return filename;
}
public void setFilename(String filename) {
this.filename = filename;
}
public String getFullPath() {
return fullPath;
}
public void setFullPath(String fullPath) {
this.fullPath = fullPath;
}
public String getRelativePath() {
return relativePath;
}
public void setRelativePath(String relativePath) {
this.relativePath = relativePath;
}
@Override
public String toString() {
return "Unit[" + filename + "]";
}
public Import getImport(String name) {
for (Import i: getImports()) {
if (i.getTypeDeclaration()==null &&
i.getAlias().equals(name)) {
return i;
}
}
return null;
}
/**
* Search the imports of a compilation unit
* for the named toplevel declaration.
*/
public Declaration getImportedDeclaration(String name,
List<ProducedType> signature, boolean ellipsis) {
for (Import i: getImports()) {
if (i.getAlias().equals(name)) {
//in case of an overloaded member, this will
//be the "abstraction", so search for the
//correct overloaded version
Declaration d = i.getDeclaration();
return d.getContainer().getMember(d.getName(), signature, ellipsis);
}
}
return null;
}
/**
* Search the imports of a compilation unit
* for the named member declaration.
*/
public Declaration getImportedDeclaration(TypeDeclaration td, String name,
List<ProducedType> signature, boolean ellipsis) {
for (Import i: getImports()) {
TypeDeclaration itd = i.getTypeDeclaration();
if (itd!=null && itd.equals(td) &&
i.getAlias().equals(name)) {
//in case of an overloaded member, this will
//be the "abstraction", so search for the
//correct overloaded version
Declaration d = i.getDeclaration();
return d.getContainer().getMember(d.getName(), signature, ellipsis);
}
}
return null;
}
public Map<String, DeclarationWithProximity> getMatchingImportedDeclarations(String startingWith, int proximity) {
Map<String, DeclarationWithProximity> result = new TreeMap<String, DeclarationWithProximity>();
for (Import i: new ArrayList<Import>(getImports())) {
if (i.getAlias()!=null &&
i.getAlias().toLowerCase().startsWith(startingWith.toLowerCase())) {
result.put(i.getAlias(), new DeclarationWithProximity(i, proximity));
}
}
return result;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Unit) {
Unit that = (Unit) obj;
return that.getPackage().equals(getPackage())
&& that.getFilename().equals(getFilename());
}
else {
return false;
}
}
@Override
public int hashCode() {
return getFilename().hashCode();
}
/**
* Search for a declaration in the language module.
*/
public Declaration getLanguageModuleDeclaration(String name) {
//all elements in ceylon.language are auto-imported
//traverse all default module packages provided they have not been traversed yet
Module languageModule = getPackage().getModule().getLanguageModule();
if ( languageModule != null && languageModule.isAvailable() ) {
if ("Bottom".equals(name)) {
return getBottomDeclaration();
}
for (Package languageScope : languageModule.getPackages() ) {
Declaration d = languageScope.getMember(name, null, false);
if (d != null && d.isShared()) {
return d;
}
}
}
return null;
}
public Interface getCorrespondenceDeclaration() {
return (Interface) getLanguageModuleDeclaration("Correspondence");
}
public Class getVoidDeclaration() {
return (Class) getLanguageModuleDeclaration("Void");
}
public Class getNothingDeclaration() {
return (Class) getLanguageModuleDeclaration("Nothing");
}
public Value getNullDeclaration() {
return (Value) getLanguageModuleDeclaration("null");
}
public Interface getEmptyDeclaration() {
return (Interface) getLanguageModuleDeclaration("Empty");
}
public Interface getSequenceDeclaration() {
return (Interface) getLanguageModuleDeclaration("Sequence");
}
public Class getObjectDeclaration() {
return (Class) getLanguageModuleDeclaration("Object");
}
public Class getIdentifiableObjectDeclaration() {
return (Class) getLanguageModuleDeclaration("IdentifiableObject");
}
public Interface getIdentifiableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Identifiable");
}
public Class getExceptionDeclaration() {
return (Class) getLanguageModuleDeclaration("Exception");
}
public Interface getCategoryDeclaration() {
return (Interface) getLanguageModuleDeclaration("Category");
}
public Interface getIterableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Iterable");
}
public Interface getSequentialDeclaration() {
return (Interface) getLanguageModuleDeclaration("Sequential");
}
public Interface getListDeclaration() {
return (Interface) getLanguageModuleDeclaration("List");
}
/**
* Gets the declaration of {@code Iterator}
* @return The declaration
*/
public Interface getIteratorDeclaration() {
return (Interface) getLanguageModuleDeclaration("Iterator");
}
// public Interface getFixedSizedDeclaration() {
// return (Interface) getLanguageModuleDeclaration("FixedSized");
// }
//
// public Interface getSomeDeclaration() {
// return (Interface) getLanguageModuleDeclaration("Some");
// }
//
// public Interface getNoneDeclaration() {
// return (Interface) getLanguageModuleDeclaration("None");
// }
public Interface getCallableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Callable");
}
public Interface getCastableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Castable");
}
public Interface getSummableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Summable");
}
public Interface getNumericDeclaration() {
return (Interface) getLanguageModuleDeclaration("Numeric");
}
public Interface getIntegralDeclaration() {
return (Interface) getLanguageModuleDeclaration("Integral");
}
public Interface getInvertableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Invertable");
}
public Interface getExponentiableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Exponentiable");
}
public Interface getSetDeclaration() {
return (Interface) getLanguageModuleDeclaration("Set");
}
public TypeDeclaration getComparisonDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Comparison");
}
public TypeDeclaration getBooleanDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Boolean");
}
public TypeDeclaration getStringDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("String");
}
public TypeDeclaration getFloatDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Float");
}
public TypeDeclaration getIntegerDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Integer");
}
public TypeDeclaration getCharacterDeclaration() {
return (TypeDeclaration) getLanguageModuleDeclaration("Character");
}
public Interface getComparableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Comparable");
}
public Interface getCloseableDeclaration() {
return (Interface) getLanguageModuleDeclaration("Closeable");
}
public Interface getOrdinalDeclaration() {
return (Interface) getLanguageModuleDeclaration("Ordinal");
}
public Class getRangeDeclaration() {
return (Class) getLanguageModuleDeclaration("Range");
}
public Class getTupleDeclaration() {
return (Class) getLanguageModuleDeclaration("Tuple");
}
public TypeDeclaration getArrayDeclaration() {
return (Class) getLanguageModuleDeclaration("Array");
}
public Interface getRangedDeclaration() {
return (Interface) getLanguageModuleDeclaration("Ranged");
}
public Class getEntryDeclaration() {
return (Class) getLanguageModuleDeclaration("Entry");
}
ProducedType getCallableType(ProducedReference ref, ProducedType rt) {
if ( isTypeUnknown(ref.getType())) {
//special case for forward reference to member
//with inferred type TODO: something better
return new UnknownType(this).getType();
}
ProducedType result = rt;
if (ref.getDeclaration() instanceof Functional) {
List<ParameterList> pls = ((Functional) ref.getDeclaration()).getParameterLists();
boolean hasSequenced = false;
for (int i=pls.size()-1; i>=0; i--) {
List<ProducedType> args = new ArrayList<ProducedType>();
for (Parameter p: pls.get(i).getParameters()) {
ProducedTypedReference np = ref.getTypedParameter(p);
ProducedType npt = np.getType();
if (np.getDeclaration() instanceof Functional) {
args.add(getCallableType(np, npt));
}
else if (p.isSequenced()) {
args.add(getIteratedType(npt));
hasSequenced = true;
}
else {
args.add(npt);
}
}
result = producedType(getCallableDeclaration(), result,
getTupleType(args, hasSequenced));
}
}
return result;
}
public ProducedType getTupleType(List<ProducedType> elemTypes, boolean sequenced) {
ProducedType result = getEmptyDeclaration().getType();
ProducedType union = getBottomDeclaration().getType();
int last = elemTypes.size()-1;
for (int i=last; i>=0; i--) {
ProducedType elemType = elemTypes.get(i);
union = unionType(union, elemType, this);
if (sequenced && i==last) {
result = getSequentialType(elemType);
}
else {
result = producedType(getTupleDeclaration(), union, elemType, result);
}
}
return result;
}
public ProducedType getEmptyType(ProducedType pt) {
return pt==null ? null :
unionType(pt, getEmptyDeclaration().getType(), this);
/*else if (isEmptyType(pt)) {
//Nothing|Nothing|T == Nothing|T
return pt;
}
else if (pt.getDeclaration() instanceof BottomType) {
//Nothing|0 == Nothing
return getEmptyDeclaration().getType();
}
else {
UnionType ut = new UnionType();
List<ProducedType> types = new ArrayList<ProducedType>();
addToUnion(types,getEmptyDeclaration().getType());
addToUnion(types,pt);
ut.setCaseTypes(types);
return ut.getType();
}*/
}
public ProducedType getPossiblyNoneType(ProducedType pt) {
return pt==null ? null :
unionType(pt, producedType(getSequentialDeclaration(),
getVoidDeclaration().getType()), this);
}
public ProducedType getOptionalType(ProducedType pt) {
return pt==null ? null :
unionType(pt, getNothingDeclaration().getType(), this);
/*else if (isOptionalType(pt)) {
//Nothing|Nothing|T == Nothing|T
return pt;
}
else if (pt.getDeclaration() instanceof BottomType) {
//Nothing|0 == Nothing
return getNothingDeclaration().getType();
}
else {
UnionType ut = new UnionType();
List<ProducedType> types = new ArrayList<ProducedType>();
addToUnion(types,getNothingDeclaration().getType());
addToUnion(types,pt);
ut.setCaseTypes(types);
return ut.getType();
}*/
}
public ProducedType getSequenceType(ProducedType et) {
return producedType(getSequenceDeclaration(), et);
}
public ProducedType getSequentialType(ProducedType et) {
return producedType(getSequentialDeclaration(), et);
}
public ProducedType getIterableType(ProducedType et) {
return producedType(getIterableDeclaration(), et);
}
public ProducedType getSetType(ProducedType et) {
return producedType(getSetDeclaration(), et);
}
/**
* Returns a ProducedType corresponding to {@code Iterator<T>}
* @param et The ProducedType corresponding to {@code T}
* @return The ProducedType corresponding to {@code Iterator<T>}
*/
public ProducedType getIteratorType(ProducedType et) {
return Util.producedType(getIteratorDeclaration(), et);
}
/**
* Returns a ProducedType corresponding to {@code Range<T>}
* @param rt The ProducedType corresponding to {@code T}
* @return The ProducedType corresponding to {@code Range<T>}
*/
public ProducedType getRangeType(ProducedType rt) {
return Util.producedType(getRangeDeclaration(), rt);
}
public ProducedType getCastableType(ProducedType et) {
return producedType(getCastableDeclaration(), et);
}
public ProducedType getEntryType(ProducedType kt, ProducedType vt) {
return producedType(getEntryDeclaration(), kt, vt);
}
public ProducedType getKeyType(ProducedType type) {
ProducedType st = type.getSupertype(getEntryDeclaration());
if (st!=null && st.getTypeArguments().size()==2) {
return st.getTypeArgumentList().get(0);
}
else {
return null;
}
}
public ProducedType getValueType(ProducedType type) {
ProducedType st = type.getSupertype(getEntryDeclaration());
if (st!=null && st.getTypeArguments().size()==2) {
return st.getTypeArgumentList().get(1);
}
else {
return null;
}
}
public ProducedType getIteratedType(ProducedType type) {
ProducedType st = type.getSupertype(getIterableDeclaration());
if (st!=null && st.getTypeArguments().size()==1) {
return st.getTypeArgumentList().get(0);
}
else {
return null;
}
}
public ProducedType getSetElementType(ProducedType type) {
ProducedType st = type.getSupertype(getSetDeclaration());
if (st!=null && st.getTypeArguments().size()==1) {
return st.getTypeArgumentList().get(0);
}
else {
return null;
}
}
public ProducedType getFixedSizedElementType(ProducedType type) {
ProducedType st = type.getSupertype(getSequentialDeclaration());
if (st!=null && st.getTypeArguments().size()==1) {
return st.getTypeArgumentList().get(0);
}
else {
return null;
}
}
public ProducedType getDefiniteType(ProducedType pt) {
return intersectionType(getObjectDeclaration().getType(),
pt, pt.getDeclaration().getUnit());
/*if (pt.getDeclaration().equals(getVoidDeclaration())) {
return getObjectDeclaration().getType();
}
else {
return pt.minus(getNothingDeclaration());
}*/
}
public ProducedType getNonemptyType(ProducedType pt) {
return intersectionType(producedType(getSequenceDeclaration(),
getFixedSizedElementType(pt)), pt,
pt.getDeclaration().getUnit());
/*if (pt.getDeclaration().equals(getVoidDeclaration())) {
return getObjectDeclaration().getType();
}
else {
return pt.minus(getNothingDeclaration());
}*/
}
public ProducedType getNonemptyDefiniteType(ProducedType pt) {
return getNonemptyType(getDefiniteType(pt));
}
public ProducedType getNonemptySequenceType(ProducedType pt) {
return pt.minus(getEmptyDeclaration()).getSupertype(getSequenceDeclaration());
}
public ProducedType getNonemptyIterableType(ProducedType pt) {
return pt.minus(getEmptyDeclaration()).getSupertype(getIterableDeclaration());
}
public ProducedType getNonemptyListType(ProducedType pt) {
return pt.minus(getEmptyDeclaration()).getSupertype(getListDeclaration());
}
public boolean isEntryType(ProducedType pt) {
return pt.getSupertype(getEntryDeclaration())!=null;
}
public boolean isIterableType(ProducedType pt) {
return pt.getSupertype(getIterableDeclaration())!=null;
}
public boolean isSequentialType(ProducedType pt) {
return pt.getSupertype(getSequentialDeclaration())!=null;
}
public boolean isOptionalType(ProducedType pt) {
//must have non-empty intersection with Nothing
//and non-empty intersection with Object
return !(intersectionType(getNothingDeclaration().getType(), pt, this)
.getDeclaration() instanceof BottomType) &&
!(intersectionType(getObjectDeclaration().getType(), pt, this)
.getDeclaration() instanceof BottomType);
}
public boolean isEmptyType(ProducedType pt) {
//must be a subtype of Sequential<Void>
return getOptionalType(producedType(getSequentialDeclaration(),
getVoidDeclaration().getType()))
.isSupertypeOf(pt) &&
//must have non-empty intersection with Empty
//and non-empty intersection with Sequence<Bottom>
!(intersectionType(getEmptyDeclaration().getType(), pt, this)
.getDeclaration() instanceof BottomType) &&
!(intersectionType(getSequenceType(getBottomDeclaration().getType()), pt, this)
.getDeclaration() instanceof BottomType);
}
public boolean isCallableType(ProducedType pt) {
return pt!=null && pt.getSupertype(getCallableDeclaration())!=null;
}
public BottomType getBottomDeclaration() {
return new BottomType(this);
}
public ProducedType denotableType(ProducedType pt) {
if ( pt!=null && pt.getDeclaration()!=null &&
pt.getDeclaration().isAnonymous() ) {
List<ProducedType> list = new ArrayList<ProducedType>();
addToIntersection(list, pt.getSupertype(pt.getDeclaration().getExtendedTypeDeclaration()), this);
for (TypeDeclaration td: pt.getDeclaration().getSatisfiedTypeDeclarations()) {
addToIntersection(list, pt.getSupertype(td), this);
}
IntersectionType it = new IntersectionType(this);
it.setSatisfiedTypes(list);
return it.getType();
}
else {
return pt;
}
}
}
| Added getNonemptySequentialType()
| src/com/redhat/ceylon/compiler/typechecker/model/Unit.java | Added getNonemptySequentialType() |
|
Java | apache-2.0 | 90b4ea2d2748761e2364e7272fd787ba512362b9 | 0 | MCUpdater/MCU-API | package org.mcupdater.util;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.io.FileUtils;
import org.mcupdater.*;
import org.mcupdater.api.Version;
import org.mcupdater.downloadlib.*;
import org.mcupdater.instance.FileInfo;
import org.mcupdater.instance.Instance;
import org.mcupdater.model.*;
import org.mcupdater.mojang.AssetIndex;
import org.mcupdater.mojang.AssetIndex.Asset;
import org.mcupdater.mojang.Library;
import org.mcupdater.mojang.MinecraftVersion;
import javax.swing.*;
import java.awt.image.BufferedImage;
import java.io.*;
import java.math.BigInteger;
import java.net.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.*;
import java.util.Map.Entry;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
public class MCUpdater {
private final Path MCFolder;
private Path archiveFolder;
private Path instanceRoot;
private MCUApp parent;
private final String sep = System.getProperty("file.separator");
public MessageDigest md5;
public ImageIcon defaultIcon;
private String newestMC = "";
private final Map<String,String> versionMap = new HashMap<>();
public static Logger apiLogger;
private int timeoutLength = 5000;
private final Gson gson = new GsonBuilder().setPrettyPrinting().create();
private static MCUpdater INSTANCE;
public static MCUpdater getInstance(File file) {
if( INSTANCE == null ) {
INSTANCE = new MCUpdater(file);
}
return INSTANCE;
}
public static MCUpdater getInstance() {
if( INSTANCE == null ) {
INSTANCE = new MCUpdater(null);
}
return INSTANCE;
}
public static String cpDelimiter() {
String osName = System.getProperty("os.name");
if (osName.startsWith("Windows")) {
return ";";
} else {
return ":";
}
}
private MCUpdater(File desiredRoot)
{
apiLogger = Logger.getLogger("MCU-API");
apiLogger.setLevel(Level.ALL);
if(System.getProperty("os.name").startsWith("Windows"))
{
MCFolder = new File(System.getenv("APPDATA")).toPath().resolve(".minecraft");
archiveFolder = new File(System.getenv("APPDATA")).toPath().resolve(".MCUpdater");
} else if(System.getProperty("os.name").startsWith("Mac"))
{
MCFolder = new File(System.getProperty("user.home")).toPath().resolve("Library").resolve("Application Support").resolve("minecraft");
archiveFolder = new File(System.getProperty("user.home")).toPath().resolve("Library").resolve("Application Support").resolve("MCUpdater");
}
else
{
MCFolder = new File(System.getProperty("user.home")).toPath().resolve(".minecraft");
archiveFolder = new File(System.getProperty("user.home")).toPath().resolve(".MCUpdater");
}
if (!(desiredRoot == null)) {
archiveFolder = desiredRoot.toPath();
}
try {
FileHandler apiHandler = new FileHandler(archiveFolder.resolve("MCU-API.log").toString(), 0, 3);
apiHandler.setFormatter(new FMLStyleFormatter());
apiLogger.addHandler(apiHandler);
} catch (SecurityException | IOException e1) {
e1.printStackTrace(); // Will only be thrown if there is a problem with logging.
}
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
apiLogger.log(Level.SEVERE, "No MD5 support!", e);
}
try {
defaultIcon = new ImageIcon(MCUpdater.class.getResource("/minecraft.png"));
} catch( NullPointerException e ) {
_debug( "Unable to load default icon?!" );
defaultIcon = new ImageIcon(new BufferedImage(32,32,BufferedImage.TYPE_INT_ARGB));
}
// configure the download cache
try {
DownloadCache.init(archiveFolder.resolve("cache").toFile());
} catch (IllegalArgumentException e) {
_debug( "Suppressed attempt to re-init download cache?!" );
}
try {
long start = System.currentTimeMillis();
URL md5s = new URL("http://files.mcupdater.com/md5.dat");
URLConnection md5Con = md5s.openConnection();
md5Con.setConnectTimeout(this.timeoutLength);
md5Con.setReadTimeout(this.timeoutLength);
InputStreamReader input = new InputStreamReader(md5Con.getInputStream());
BufferedReader buffer = new BufferedReader(input);
String currentLine;
while(true){
currentLine = buffer.readLine();
if(currentLine != null){
String entry[] = currentLine.split("\\|");
versionMap.put(entry[0], entry[1]);
newestMC = entry[1]; // Most recent entry in md5.dat is the current release
} else {
break;
}
}
buffer.close();
input.close();
apiLogger.fine("Took "+(System.currentTimeMillis()-start)+"ms to load md5.dat");
apiLogger.fine("newest Minecraft in md5.dat: " + newestMC);
} catch (MalformedURLException e) {
apiLogger.log(Level.SEVERE, "Bad URL", e);
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
}
}
public MCUApp getParent() {
return parent;
}
public void setParent(MCUApp parent) {
this.parent = parent;
}
public Path getMCFolder()
{
return MCFolder;
}
public Path getArchiveFolder() {
return archiveFolder;
}
public Path getInstanceRoot() {
return instanceRoot;
}
public void setInstanceRoot(Path instanceRoot) {
this.instanceRoot = instanceRoot;
}
private boolean getExcludedNames(String path, boolean forDelete) {
if(path.contains("mcu" + sep)) {
// never delete from the mcu folder
return true;
}
if (path.contains("mods") && (path.contains(".zip") || path.contains(".jar"))) {
// always delete mods in archive form
return false;
}
if(path.contains("bin" + sep + "minecraft.jar")) {
// always delete bin/minecraft.jar
return false;
}
if(path.contains("bin" + sep)) {
// never delete anything else in bin/
return true;
}
if(path.contains("resources") && !path.contains("mods")) {
// never delete resources unless it is under the mods directory
return true;
}
if(path.contains("lib" + sep)) {
// never delete the lib/ folder
return true;
}
if(path.contains("saves")) {
// never delete saves
return true;
}
if(path.contains("screenshots")) {
// never delete screenshots
return true;
}
if(path.contains("stats")) {
return true;
}
if(path.contains("texturepacks")) {
return true;
}
if(path.contains("lastlogin")) {
return true;
}
if(path.contains("mcuServers.dat")) {
return true;
}
if(path.contains("instance.dat")) {
return true;
}
if(path.contains("minecraft.jar")) {
return true;
}
if(path.contains("options.txt")) {
return forDelete;
}
if(path.contains("META-INF" + sep)) {
return true;
}
// Temporary hardcoding of client specific mod configs (i.e. Don't clobber on update)
if(path.contains("rei_minimap" + sep)) {
return true;
}
if(path.contains("macros" + sep)) {
return true;
}
if(path.contains("InvTweaks")) {
return true;
}
if(path.contains("optionsof.txt")){
return true;
}
if(path.contains("voxelMap")) {
return true;
}
//
return false;
}
private List<File> recurseFolder(File folder, boolean includeFolders)
{
List<File> output = new ArrayList<>();
List<File> input = new ArrayList<>(Arrays.asList(folder.listFiles()));
Iterator<File> fi = input.iterator();
if(includeFolders) {
output.add(folder);
}
while(fi.hasNext())
{
File entry = fi.next();
if(entry.isDirectory())
{
List<File> subfolder = recurseFolder(entry, includeFolders);
for (File aSubfolder : subfolder) {
output.add(aSubfolder);
}
} else {
output.add(entry);
}
}
return output;
}
public boolean installMods(final ServerList server, List<GenericModule> toInstall, List<ConfigFile> configs, final Path instancePath, boolean clearExisting, final Instance instData, ModSide side) throws FileNotFoundException {
if (Version.requestedFeatureLevel(server.getMCUVersion(), "2.2")) {
// Sort mod list for InJar
Collections.sort(toInstall, new ModuleComparator());
}
//final Path instancePath = instanceRoot.resolve(server.getServerId());
Path binPath = instancePath.resolve("bin");
final Path productionJar;
//File jar = null;
final File tmpFolder = instancePath.resolve("temp").toFile();
tmpFolder.mkdirs();
Set<Downloadable> jarMods = new HashSet<>();
Set<Downloadable> generalFiles = new HashSet<>();
DownloadQueue assetsQueue = null;
DownloadQueue jarQueue;
DownloadQueue generalQueue;
DownloadQueue libraryQueue = null;
final List<String> libExtract = new ArrayList<>();
final Map<String,Boolean> modExtract = new HashMap<>();
final Map<String,Boolean> keepMeta = new TreeMap<>();
Downloadable baseJar = null;
final MinecraftVersion version = MinecraftVersion.loadVersion(server.getVersion());
switch (side){
case CLIENT:
assetsQueue = parent.submitAssetsQueue("Assets", server.getServerId(), version);
Set<Downloadable> libSet = new HashSet<>();
for (Library lib : version.getLibraries()) {
if (lib.validForOS()) {
List<URL> urls = new ArrayList<>();
try {
urls.add(new URL(lib.getDownloadUrl()));
} catch (MalformedURLException e) {
apiLogger.log(Level.SEVERE, "Bad URL", e);
}
Downloadable entry = new Downloadable(lib.getName(),lib.getFilename(),"",100000,urls);
libSet.add(entry);
if (lib.hasNatives()) {
libExtract.add(lib.getFilename());
}
}
}
libraryQueue = parent.submitNewQueue("Libraries", server.getServerId(), libSet, instancePath.resolve("lib").toFile(), DownloadCache.getDir());
productionJar = binPath.resolve("minecraft.jar");
List<URL> jarUrl = new ArrayList<>();
try {
jarUrl.add(new URL("https://s3.amazonaws.com/Minecraft.Download/versions/" + server.getVersion() + "/" + server.getVersion() + ".jar"));
} catch (MalformedURLException e2) {
apiLogger.log(Level.SEVERE, "Bad URL", e2);
}
String jarMD5 = "";
for (Entry<String,String> entry : versionMap.entrySet()) {
if (entry.getValue().equals(server.getVersion())) {
jarMD5 = entry.getKey();
break;
}
}
baseJar = new Downloadable("Minecraft jar","0.jar",jarMD5,3000000,jarUrl);
keepMeta.put("0.jar", Version.requestedFeatureLevel(server.getVersion(), "1.6"));
break;
case SERVER:
productionJar = instancePath.resolve("minecraft_server.jar");
break;
default:
apiLogger.severe("Invalid API call to MCUpdater.installMods! (side cannot be " + side.toString() + ")");
return false;
}
Boolean updateJar = clearExisting;
if (side == ModSide.CLIENT) {
if (!productionJar.toFile().exists()) {
updateJar = true;
}
} else {
//TODO:Server jar detection
}
Iterator<GenericModule> iMods = toInstall.iterator();
List<String> modIds = new ArrayList<>();
int jarModCount = 0;
while (iMods.hasNext() && !updateJar) {
GenericModule current = iMods.next();
if (current.getModType() == ModType.Jar) {
FileInfo jarMod = instData.findJarMod(current.getId());
if (jarMod == null) {
updateJar = true;
} else if (current.getMD5().isEmpty() || (!current.getMD5().equalsIgnoreCase(jarMod.getMD5()))) {
updateJar = true;
}
jarModCount++;
} else {
modIds.add(current.getId());
}
}
if (jarModCount != instData.getJarMods().size()) {
updateJar = true;
}
if (updateJar && baseJar != null) {
jarMods.add(baseJar);
}
for (FileInfo entry : instData.getInstanceFiles()) {
if (!modIds.contains(entry.getModId())) {
instancePath.resolve(entry.getFilename()).toFile().delete();
}
}
instData.setJarMods(new ArrayList<FileInfo>());
instData.setInstanceFiles(new ArrayList<FileInfo>());
jarModCount = 0;
apiLogger.info("Instance path: " + instancePath.toString());
List<File> contents = recurseFolder(instancePath.toFile(), true);
if (clearExisting){
parent.setStatus("Clearing existing configuration");
parent.log("Clearing existing configuration...");
for (File entry : new ArrayList<>(contents)) {
if (getExcludedNames(entry.getPath(), true)) {
contents.remove(entry);
}
}
ListIterator<File> liClear = contents.listIterator(contents.size());
while(liClear.hasPrevious()) {
File entry = liClear.previous();
entry.delete();
}
}
Iterator<GenericModule> itMods = toInstall.iterator();
final File buildJar = archiveFolder.resolve("build.jar").toFile();
if(buildJar.exists()) {
buildJar.delete();
}
int modCount = toInstall.size();
int modsLoaded = 0;
int errorCount = 0;
while(itMods.hasNext()) {
GenericModule entry = itMods.next();
parent.log("Mod: "+entry.getName());
Collections.sort(entry.getPrioritizedUrls());
String filename;
switch (entry.getModType()) {
case Jar:
if (updateJar) {
jarMods.add(new Downloadable(entry.getName(),String.valueOf(entry.getJarOrder()) + "-" + entry.getId() + ".jar",entry.getMD5(),100000,entry.getUrls()));
keepMeta.put(String.valueOf(entry.getJarOrder()) + "-" + cleanForFile(entry.getId()) + ".jar", entry.getKeepMeta());
instData.addJarMod(entry.getId(), entry.getMD5());
jarModCount++;
}
break;
case Coremod:
filename = "coremods/" + cleanForFile(entry.getId()) + ".jar";
generalFiles.add(new Downloadable(entry.getName(),filename,entry.getMD5(),100000,entry.getUrls()));
instData.addMod(entry.getId(), entry.getMD5(), filename);
break;
case Library:
filename = "lib/" + cleanForFile(entry.getId()) + ".jar";
generalFiles.add(new Downloadable(entry.getName(),filename,entry.getMD5(),100000,entry.getUrls()));
instData.addMod(entry.getId(), entry.getMD5(), filename);
break;
case Extract:
generalFiles.add(new Downloadable(entry.getName(),cleanForFile(entry.getId()) + ".zip",entry.getMD5(),100000,entry.getUrls()));
modExtract.put(cleanForFile(entry.getId()) + ".zip", entry.getInRoot());
break;
case Litemod:
filename = entry.getPath().isEmpty() ? "mods/" + cleanForFile(entry.getId()) + ".litemod" : entry.getPath();
generalFiles.add(new Downloadable(entry.getName(),filename,entry.getMD5(),100000,entry.getUrls()));
instData.addMod(entry.getId(), entry.getMD5(), filename);
break;
case Regular:
filename = entry.getPath().isEmpty() ? "mods/" + cleanForFile(entry.getId()) + ".jar" : entry.getPath();
generalFiles.add(new Downloadable(entry.getName(),filename,entry.getMD5(),100000,entry.getUrls()));
instData.addMod(entry.getId(), entry.getMD5(), filename);
break;
case Option:
//TODO: Unimplemented
}
modsLoaded++;
parent.log(" Done ("+modsLoaded+"/"+modCount+")");
}
for (ConfigFile cfEntry : configs) {
final File confFile = instancePath.resolve(cfEntry.getPath()).toFile();
if (confFile.exists() && cfEntry.isNoOverwrite()) {
continue;
}
List<URL> configUrl = new ArrayList<>();
try {
configUrl.add(new URL(cfEntry.getUrl()));
} catch (MalformedURLException e) {
++errorCount;
apiLogger.log(Level.SEVERE, "General Error", e);
}
generalFiles.add(new Downloadable(cfEntry.getPath(), cfEntry.getPath(), cfEntry.getMD5(), 10000, configUrl));
}
generalQueue = parent.submitNewQueue("Instance files", server.getServerId(), generalFiles, instancePath.toFile(), DownloadCache.getDir());
jarQueue = parent.submitNewQueue("Jar build files", server.getServerId(), jarMods, tmpFolder, DownloadCache.getDir());
TaskableExecutor libExecutor = new TaskableExecutor(2, new Runnable(){
@Override
public void run() {
for (String entry : libExtract){
Archive.extractZip(instancePath.resolve("lib").resolve(entry).toFile(), instancePath.resolve("lib").resolve("natives").toFile(), false);
}
}});
libraryQueue.processQueue(libExecutor);
final File branding = new File(tmpFolder, "fmlbranding.properties");
try {
branding.createNewFile();
Properties propBrand = new Properties();
propBrand.setProperty("fmlbranding", "MCUpdater: " + server.getName() + " (rev " + server.getRevision() + ")");
propBrand.store(new FileOutputStream(branding), "MCUpdater ServerPack branding");
} catch (IOException e1) {
apiLogger.log(Level.SEVERE, "I/O Error", e1);
}
final boolean doJarUpdate = updateJar;
TaskableExecutor jarExecutor = new TaskableExecutor(2, new Runnable() {
@Override
public void run() {
if (!doJarUpdate) {
try {
Archive.updateArchive(productionJar.toFile(), new File[]{ branding });
} catch (IOException e1) {
apiLogger.log(Level.SEVERE, "I/O Error", e1);
}
} else {
for (Map.Entry<String,Boolean> entry : keepMeta.entrySet()) {
File entryFile = new File(tmpFolder,entry.getKey());
Archive.extractZip(entryFile, tmpFolder, entry.getValue());
entryFile.delete();
}
try {
buildJar.createNewFile();
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
}
boolean doManifest = true;
List<File> buildList = recurseFolder(tmpFolder,true);
for (File entry : new ArrayList<>(buildList)) {
if (entry.getPath().contains("META-INF")) {
doManifest = false;
}
}
parent.log("Packaging updated jar...");
try {
Archive.createJar(buildJar, buildList, tmpFolder.getPath() + sep, doManifest);
} catch (IOException e1) {
parent.log("Failed to create jar!");
apiLogger.log(Level.SEVERE, "I/O Error", e1);
}
try {
Files.createDirectories(productionJar.getParent());
Files.copy(buildJar.toPath(), productionJar, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "Failed to copy new jar to instance!", e);
}
}
List<File> tempFiles = recurseFolder(tmpFolder,true);
ListIterator<File> li = tempFiles.listIterator(tempFiles.size());
while(li.hasPrevious()) {
File entry = li.previous();
entry.delete();
}
if (server.isGenerateList()) { writeMCServerFile(instancePath, server.getName(), server.getAddress()); }
instData.setMCVersion(server.getVersion());
instData.setRevision(server.getRevision());
String jsonOut = gson.toJson(instData);
try {
BufferedWriter writer = Files.newBufferedWriter(getInstanceRoot().resolve(server.getServerId()).resolve("instance.json"), StandardCharsets.UTF_8);
writer.append(jsonOut);
writer.close();
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O error", e);
}
}
});
jarQueue.processQueue(jarExecutor);
TaskableExecutor genExecutor = new TaskableExecutor(12, new Runnable(){
@Override
public void run() {
for (Map.Entry<String,Boolean> entry : modExtract.entrySet()) {
if (entry.getValue()) {
Archive.extractZip(instancePath.resolve(entry.getKey()).toFile(), instancePath.toFile());
} else {
Archive.extractZip(instancePath.resolve(entry.getKey()).toFile(), instancePath.resolve("mods").toFile());
}
instancePath.resolve(entry.getKey()).toFile().delete();
}
}
});
generalQueue.processQueue(genExecutor);
TaskableExecutor assetsExecutor = new TaskableExecutor(8, new Runnable(){
@Override
public void run() {
//check virtual
Gson gson = new Gson();
String indexName = version.getAssets();
if (indexName == null) {
indexName = "legacy";
}
File indexesPath = archiveFolder.resolve("assets").resolve("indexes").toFile();
File indexFile = new File(indexesPath, indexName + ".json");
String json;
try {
json = FileUtils.readFileToString(indexFile);
AssetIndex index = gson.fromJson(json, AssetIndex.class);
parent.log("Assets virtual: " + index.isVirtual());
if (index.isVirtual()) {
//Test symlink support
boolean doLinks = true;
try {
java.nio.file.Files.createSymbolicLink(archiveFolder.resolve("linktest"), archiveFolder.resolve("MCUpdater.log.0"));
archiveFolder.resolve("linktest").toFile().delete();
} catch (Exception e) {
doLinks = false;
}
Path assetsPath = archiveFolder.resolve("assets");
Path virtualPath = assetsPath.resolve("virtual");
for (Map.Entry<String, Asset> entry : index.getObjects().entrySet()) {
Path target = virtualPath.resolve(entry.getKey());
Path original = assetsPath.resolve("objects").resolve(entry.getValue().getHash().substring(0,2)).resolve(entry.getValue().getHash());
if (!Files.exists(target)) {
Files.createDirectories(target.getParent());
if (doLinks) {
Files.createSymbolicLink(target, original);
} else {
Files.copy(original, target);
}
}
}
}
} catch (IOException e) {
parent.baseLogger.log(Level.SEVERE, "Assets exception! " + e.getMessage());
}
}
});
assetsQueue.processQueue(assetsExecutor);
if( errorCount > 0 ) {
parent.baseLogger.severe("Errors were detected with this update, please verify your files. There may be a problem with the serverpack configuration or one of your download sites.");
return false;
}
return true;
}
private String cleanForFile(String id) {
return id.replaceAll("[^a-zA-Z_0-9\\-.]", "_");
}
public void writeMCServerFile(Path installPath, String name, String ip) {
byte[] header = new byte[]{
0x0A,0x00,0x00,0x09,0x00,0x07,0x73,0x65,0x72,0x76,0x65,0x72,0x73,0x0A,
0x00,0x00,0x00,0x01,0x01,0x00,0x0B,0x68,0x69,0x64,0x65,0x41,0x64,0x64,
0x72,0x65,0x73,0x73,0x01,0x08,0x00,0x04,0x6E,0x61,0x6D,0x65,0x00,
(byte) (name.length() + 12), (byte) 0xC2,(byte) 0xA7,0x41,0x5B,0x4D,0x43,0x55,0x5D,0x20,(byte) 0xC2,(byte) 0xA7,0x46
};
byte[] nameBytes = name.getBytes();
byte[] ipBytes = ip.getBytes();
byte[] middle = new byte[]{0x08,0x00,0x02,0x69,0x70,0x00,(byte) ip.length()};
byte[] end = new byte[]{0x00,0x00};
int size = header.length + nameBytes.length + middle.length + ipBytes.length + end.length;
byte[] full = new byte[size];
int pos = 0;
System.arraycopy(header, 0, full, pos, header.length);
pos += header.length;
System.arraycopy(nameBytes, 0, full, pos, nameBytes.length);
pos += nameBytes.length;
System.arraycopy(middle, 0, full, pos, middle.length);
pos += middle.length;
System.arraycopy(ipBytes, 0, full, pos, ipBytes.length);
pos += ipBytes.length;
System.arraycopy(end, 0, full, pos, end.length);
File serverFile = installPath.resolve("servers.dat").toFile();
try {
serverFile.createNewFile();
FileOutputStream fos = new FileOutputStream(serverFile);
fos.write(full,0,full.length);
fos.close();
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
}
}
private static void _debug(String msg) {
apiLogger.fine(msg);
}
public void setTimeout(int timeout) {
this.timeoutLength = timeout;
}
public int getTimeout() {
return this.timeoutLength;
}
public static String calculateGroupHash(Set<String> digests) {
BigInteger hash = BigInteger.valueOf(0);
for (String entry : digests) {
try {
BigInteger digest = new BigInteger(Hex.decodeHex(entry.toCharArray()));
hash = hash.xor(digest);
} catch (DecoderException e) {
//e.printStackTrace();
System.out.println("Entry '" + entry + "' is not a valid hexadecimal number");
}
}
return Hex.encodeHexString(hash.toByteArray());
}
}
| src/main/java/org/mcupdater/util/MCUpdater.java | package org.mcupdater.util;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.FileUtils;
import org.mcupdater.*;
import org.mcupdater.api.Version;
import org.mcupdater.downloadlib.*;
import org.mcupdater.instance.FileInfo;
import org.mcupdater.instance.Instance;
import org.mcupdater.model.*;
import org.mcupdater.mojang.AssetIndex;
import org.mcupdater.mojang.AssetIndex.Asset;
import org.mcupdater.mojang.Library;
import org.mcupdater.mojang.MinecraftVersion;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.PBEParameterSpec;
import javax.swing.*;
import java.awt.image.BufferedImage;
import java.io.*;
import java.math.BigInteger;
import java.net.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.*;
import java.util.Map.Entry;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
//import j7compat.Files;
//import java.nio.file.StandardCopyOption;
//import java.nio.file.StandardOpenOption;
//import j7compat.Path;
public class MCUpdater {
//public static final ResourceBundle Customization = ResourceBundle.getBundle("customization");
//private List<Module> modList = new ArrayList<Module>();
private final Path MCFolder;
private Path archiveFolder;
private Path instanceRoot;
private MCUApp parent;
private final String sep = System.getProperty("file.separator");
public MessageDigest md5;
public ImageIcon defaultIcon;
private String newestMC = "";
private final Map<String,String> versionMap = new HashMap<>();
public static Logger apiLogger;
//private Path lwjglFolder;
private int timeoutLength = 5000;
private final Gson gson = new GsonBuilder().setPrettyPrinting().create();
private static MCUpdater INSTANCE;
public static File getJarFile() {
try {
return new File(MCUpdater.class.getProtectionDomain().getCodeSource().getLocation().toURI());
} catch (URISyntaxException e) {
apiLogger.log(Level.SEVERE, "Error getting MCUpdater JAR URI", e);
}
return null;
}
public static MCUpdater getInstance(File file) {
if( INSTANCE == null ) {
INSTANCE = new MCUpdater(file);
}
return INSTANCE;
}
public static MCUpdater getInstance() {
if( INSTANCE == null ) {
INSTANCE = new MCUpdater(null);
}
return INSTANCE;
}
public static String cpDelimiter() {
String osName = System.getProperty("os.name");
if (osName.startsWith("Windows")) {
return ";";
} else {
return ":";
}
}
private MCUpdater(File desiredRoot)
{
apiLogger = Logger.getLogger("MCU-API");
apiLogger.setLevel(Level.ALL);
//String[] nativeNames;
//String nativePrefix;
if(System.getProperty("os.name").startsWith("Windows"))
{
MCFolder = new File(System.getenv("APPDATA")).toPath().resolve(".minecraft");
archiveFolder = new File(System.getenv("APPDATA")).toPath().resolve(".MCUpdater");
//nativePrefix = "lwjgl-2.9.0/native/windows/";
//nativeNames = new String[] {"jinput-dx8.dll","jinput-dx8_64.dll","jinput-raw.dll","jinput-raw_64.dll","lwjgl.dll","lwjgl64.dll","OpenAL32.dll","OpenAL64.dll"};
} else if(System.getProperty("os.name").startsWith("Mac"))
{
MCFolder = new File(System.getProperty("user.home")).toPath().resolve("Library").resolve("Application Support").resolve("minecraft");
archiveFolder = new File(System.getProperty("user.home")).toPath().resolve("Library").resolve("Application Support").resolve("MCUpdater");
//nativePrefix = "lwjgl-2.9.0/native/macosx/";
//nativeNames = new String[] {"libjinput-osx.jnilib","liblwjgl.jnilib","openal.dylib"};
}
else
{
MCFolder = new File(System.getProperty("user.home")).toPath().resolve(".minecraft");
archiveFolder = new File(System.getProperty("user.home")).toPath().resolve(".MCUpdater");
//nativePrefix = "lwjgl-2.9.0/native/linux/";
//nativeNames = new String[] {"libjinput-linux.so","libjinput-linux64.so","liblwjgl.so","liblwjgl64.so","libopenal.so","libopenal64.so"};
}
if (!(desiredRoot == null)) {
archiveFolder = desiredRoot.toPath();
}
//lwjglFolder = this.archiveFolder.resolve("LWJGL");
try {
FileHandler apiHandler = new FileHandler(archiveFolder.resolve("MCU-API.log").toString(), 0, 3);
apiHandler.setFormatter(new FMLStyleFormatter());
apiLogger.addHandler(apiHandler);
} catch (SecurityException | IOException e1) {
e1.printStackTrace(); // Will only be thrown if there is a problem with logging.
}
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
apiLogger.log(Level.SEVERE, "No MD5 support!", e);
}
try {
defaultIcon = new ImageIcon(MCUpdater.class.getResource("/minecraft.png"));
} catch( NullPointerException e ) {
_debug( "Unable to load default icon?!" );
defaultIcon = new ImageIcon(new BufferedImage(32,32,BufferedImage.TYPE_INT_ARGB));
}
// configure the download cache
try {
DownloadCache.init(archiveFolder.resolve("cache").toFile());
} catch (IllegalArgumentException e) {
_debug( "Suppressed attempt to re-init download cache?!" );
}
try {
long start = System.currentTimeMillis();
URL md5s = new URL("http://files.mcupdater.com/md5.dat");
URLConnection md5Con = md5s.openConnection();
md5Con.setConnectTimeout(this.timeoutLength);
md5Con.setReadTimeout(this.timeoutLength);
InputStreamReader input = new InputStreamReader(md5Con.getInputStream());
BufferedReader buffer = new BufferedReader(input);
String currentLine;
while(true){
currentLine = buffer.readLine();
if(currentLine != null){
String entry[] = currentLine.split("\\|");
versionMap.put(entry[0], entry[1]);
newestMC = entry[1]; // Most recent entry in md5.dat is the current release
} else {
break;
}
}
buffer.close();
input.close();
apiLogger.fine("Took "+(System.currentTimeMillis()-start)+"ms to load md5.dat");
apiLogger.fine("newest Minecraft in md5.dat: " + newestMC);
} catch (MalformedURLException e) {
apiLogger.log(Level.SEVERE, "Bad URL", e);
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
}
/* Download LWJGL
File tempFile = this.archiveFolder.resolve("lwjgl-2.9.0.zip").toFile();
if (!tempFile.exists()) {
try {
String jarPrefix = "lwjgl-2.9.0/jar/";
String[] jarNames = new String[] {"lwjgl.jar","lwjgl_util.jar","jinput.jar"};
URL lwjglURL = new URL("http://sourceforge.net/projects/java-game-lib/files/Official%20Releases/LWJGL%202.9.0/lwjgl-2.9.0.zip/download");
apiLogger.info("Downloading " + lwjglURL.getPath());
FileUtils.copyURLToFile(lwjglURL, tempFile);
Path nativePath = lwjglFolder.resolve("natives");
Files.createDirectories(nativePath);
ZipFile zf = new ZipFile(tempFile);
ZipEntry entry;
for (int index=0; index < jarNames.length; index++) {
entry = zf.getEntry(jarPrefix + jarNames[index]);
File outFile = lwjglFolder.resolve(jarNames[index]).toFile();
apiLogger.finest(" Extract: " + outFile.getPath());
FileOutputStream fos = new FileOutputStream(outFile);
InputStream zis = zf.getInputStream(entry);
int len;
byte[] buf = new byte[1024];
while((len = zis.read(buf, 0, 1024)) > -1) {
fos.write(buf, 0, len);
}
fos.close();
zis.close();
}
for (int index=0; index < nativeNames.length; index++) {
entry = zf.getEntry(nativePrefix + nativeNames[index]);
File outFile = nativePath.resolve(nativeNames[index]).toFile();
apiLogger.finest(" Extract: " + outFile.getPath());
FileOutputStream fos = new FileOutputStream(outFile);
InputStream zis = zf.getInputStream(entry);
int len;
byte[] buf = new byte[1024];
while((len = zis.read(buf, 0, 1024)) > -1) {
fos.write(buf, 0, len);
}
fos.close();
zis.close();
}
zf.close();
} catch (MalformedURLException e) {
apiLogger.log(Level.SEVERE, "Bad URL", e);
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
}
}
*/
}
public MCUApp getParent() {
return parent;
}
public void setParent(MCUApp parent) {
this.parent = parent;
}
public void writeServerList(List<ServerList> serverlist)
{
try
{
archiveFolder.toFile().mkdirs();
BufferedWriter writer = Files.newBufferedWriter(archiveFolder.resolve("mcuServers.dat"), StandardCharsets.UTF_8);
Iterator<ServerList> it = serverlist.iterator();
Set<String> urls = new HashSet<>();
while(it.hasNext())
{
ServerList entry = it.next();
urls.add(entry.getPackUrl());
}
for (String url : urls) {
writer.write(url);
writer.newLine();
}
writer.close();
}
catch( IOException x)
{
apiLogger.log(Level.SEVERE, "I/O Error", x);
}
}
public List<Backup> loadBackupList() {
List<Backup> bList = new ArrayList<>();
try {
BufferedReader reader = Files.newBufferedReader(archiveFolder.resolve("mcuBackups.dat"), StandardCharsets.UTF_8);
String entry = reader.readLine();
while(entry != null) {
String[] ele = entry.split("~~~~~");
bList.add(new Backup(ele[0], ele[1]));
entry = reader.readLine();
}
reader.close();
return bList;
} catch(FileNotFoundException notfound) {
apiLogger.log(Level.SEVERE, "File not found", notfound);
} catch(IOException ioe) {
apiLogger.log(Level.SEVERE, "I/O Error", ioe);
}
return bList;
}
public void writeBackupList(List<Backup> backupList) {
try {
BufferedWriter writer = Files.newBufferedWriter(archiveFolder.resolve("mcuBackups.dat"), StandardCharsets.UTF_8);
for (Backup entry : backupList) {
writer.write(entry.getDescription() + "~~~~~" + entry.getFilename());
writer.newLine();
}
writer.close();
} catch(IOException ioe) {
apiLogger.log(Level.SEVERE, "I/O Error", ioe);
}
}
public List<ServerList> loadServerList(String defaultUrl)
{
List<ServerList> slList = new ArrayList<>();
try
{
Set<String> urls = new HashSet<>();
urls.add(defaultUrl);
BufferedReader reader = Files.newBufferedReader(archiveFolder.resolve("mcuServers.dat"), StandardCharsets.UTF_8);
String entry = reader.readLine();
while(entry != null)
{
urls.add(entry);
entry = reader.readLine();
}
reader.close();
for (String serverUrl : urls) {
try {
Element docEle;
Document serverHeader = ServerPackParser.readXmlFromUrl(serverUrl);
if (!(serverHeader == null)) {
Element parent = serverHeader.getDocumentElement();
if (parent.getNodeName().equals("ServerPack")) {
String mcuVersion = parent.getAttribute("version");
NodeList servers = parent.getElementsByTagName("Server");
for (int i = 0; i < servers.getLength(); i++) {
docEle = (Element) servers.item(i);
System.out.println(serverUrl + ": " + docEle.getAttribute("id"));
ServerList sl = ServerList.fromElement(mcuVersion, serverUrl, docEle);
slList.add(sl);
}
} else {
System.out.println(serverUrl + ": *** " + parent.getAttribute("id"));
ServerList sl = ServerList.fromElement("1.0", serverUrl, parent);
slList.add(sl);
}
} else {
apiLogger.warning("Unable to get server information from " + serverUrl);
}
} catch (Exception e) {
apiLogger.log(Level.SEVERE, "General Error", e);
}
}
// String[] arrString = entry.split("\\|");
// slList.add(new ServerList(arrString[0], arrString[1], arrString[2]));
return slList;
}
catch( FileNotFoundException notfound)
{
apiLogger.log(Level.SEVERE, "File not found", notfound);
}
catch (IOException x)
{
apiLogger.log(Level.SEVERE, "I/O Error", x);
}
return slList;
}
public Path getMCFolder()
{
return MCFolder;
}
public Path getArchiveFolder() {
return archiveFolder;
}
// public Path getLWJGLFolder() {
// return lwjglFolder;
// }
public Path getInstanceRoot() {
return instanceRoot;
}
public void setInstanceRoot(Path instanceRoot) {
this.instanceRoot = instanceRoot;
}
public String getMCVersion() {
File jar = MCFolder.resolve("bin").resolve("minecraft.jar").toFile();
byte[] hash;
try {
InputStream is = new FileInputStream(jar);
hash = DigestUtils.md5(is);
is.close();
} catch (FileNotFoundException e) {
return "Not found";
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
return "Error reading file";
}
String hashString = new String(Hex.encodeHex(hash));
String version = lookupHash(hashString);
if(!version.isEmpty()) {
File backupJar = archiveFolder.resolve("mc-" + version + ".jar").toFile();
if(!backupJar.exists()) {
backupJar.getParentFile().mkdirs();
copyFile(jar, backupJar);
}
return version;
} else {
return "Unknown version";
}
}
private String lookupHash(String hash) {
String out = versionMap.get(hash);
if (out == null) {
out = "";
}
return out;
}
private void copyFile(File jar, File backupJar) {
try {
InputStream in = new FileInputStream(jar);
OutputStream out = new FileOutputStream(backupJar);
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
out.close();
} catch(IOException ioe) {
apiLogger.log(Level.SEVERE, "I/O Error", ioe);
}
}
public void saveConfig(String description) {
File folder = MCFolder.toFile();
List<File> contents = recurseFolder(folder, false);
try {
String uniqueName = UUID.randomUUID().toString() + ".zip";
for (File entry : new ArrayList<>(contents)) {
if (getExcludedNames(entry.getPath(), false) || entry.getPath().contains("temp")) {
contents.remove(entry);
}
}
Archive.createZip(archiveFolder.resolve(uniqueName).toFile(), contents, MCFolder, parent);
Backup entry = new Backup(description, uniqueName);
_debug("DEBUG: LoadBackupList");
List<Backup> bList = loadBackupList();
_debug("DEBUG: add");
bList.add(entry);
_debug("DEBUG: writeBackupList");
writeBackupList(bList);
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
}
}
private boolean getExcludedNames(String path, boolean forDelete) {
if(path.contains("mcu" + sep)) {
// never delete from the mcu folder
return true;
}
if (path.contains("mods") && (path.contains(".zip") || path.contains(".jar"))) {
// always delete mods in archive form
return false;
}
if(path.contains("bin" + sep + "minecraft.jar")) {
// always delete bin/minecraft.jar
return false;
}
if(path.contains("bin" + sep)) {
// never delete anything else in bin/
return true;
}
if(path.contains("resources") && !path.contains("mods")) {
// never delete resources unless it is under the mods directory
return true;
}
if(path.contains("lib" + sep)) {
// never delete the lib/ folder
return true;
}
if(path.contains("saves")) {
// never delete saves
return true;
}
if(path.contains("screenshots")) {
// never delete screenshots
return true;
}
if(path.contains("stats")) {
return true;
}
if(path.contains("texturepacks")) {
return true;
}
if(path.contains("lastlogin")) {
return true;
}
if(path.contains("mcuServers.dat")) {
return true;
}
if(path.contains("instance.dat")) {
return true;
}
if(path.contains("minecraft.jar")) {
return true;
}
if(path.contains("options.txt")) {
return forDelete;
}
if(path.contains("META-INF" + sep)) {
return true;
}
// Temporary hardcoding of client specific mod configs (i.e. Don't clobber on update)
if(path.contains("rei_minimap" + sep)) {
return true;
}
if(path.contains("macros" + sep)) {
return true;
}
if(path.contains("InvTweaks")) {
return true;
}
if(path.contains("optionsof.txt")){
return true;
}
if(path.contains("voxelMap")) {
return true;
}
//
return false;
}
private List<File> recurseFolder(File folder, boolean includeFolders)
{
List<File> output = new ArrayList<>();
List<File> input = new ArrayList<>(Arrays.asList(folder.listFiles()));
Iterator<File> fi = input.iterator();
if(includeFolders) {
output.add(folder);
}
while(fi.hasNext())
{
File entry = fi.next();
if(entry.isDirectory())
{
List<File> subfolder = recurseFolder(entry, includeFolders);
for (File aSubfolder : subfolder) {
output.add(aSubfolder);
}
} else {
output.add(entry);
}
}
return output;
}
public void restoreBackup(File archive) {
File folder = MCFolder.toFile();
List<File> contents = recurseFolder(folder, true);
for (File entry : new ArrayList<>(contents)) {
if (getExcludedNames(entry.getPath(), true)) {
contents.remove(entry);
}
}
ListIterator<File> liClear = contents.listIterator(contents.size());
while(liClear.hasPrevious()) {
File entry = liClear.previous();
entry.delete();
}
Archive.extractZip(archive, MCFolder.toFile());
}
public boolean checkForBackup(ServerList server) {
File jar = archiveFolder.resolve("mc-" + server.getVersion() + ".jar").toFile();
return jar.exists();
}
public boolean installMods(final ServerList server, List<GenericModule> toInstall, List<ConfigFile> configs, boolean clearExisting, final Instance instData, ModSide side) throws FileNotFoundException {
if (Version.requestedFeatureLevel(server.getMCUVersion(), "2.2")) {
// Sort mod list for InJar
Collections.sort(toInstall, new ModuleComparator());
}
final Path instancePath = instanceRoot.resolve(server.getServerId());
Path binPath = instancePath.resolve("bin");
final Path productionJar;
//File jar = null;
final File tmpFolder = instancePath.resolve("temp").toFile();
tmpFolder.mkdirs();
Set<Downloadable> jarMods = new HashSet<>();
Set<Downloadable> generalFiles = new HashSet<>();
DownloadQueue assetsQueue = null;
DownloadQueue jarQueue;
DownloadQueue generalQueue;
DownloadQueue libraryQueue = null;
final List<String> libExtract = new ArrayList<>();
final Map<String,Boolean> modExtract = new HashMap<>();
final Map<String,Boolean> keepMeta = new TreeMap<>();
Downloadable baseJar = null;
final MinecraftVersion version = MinecraftVersion.loadVersion(server.getVersion());
switch (side){
case CLIENT:
assetsQueue = parent.submitAssetsQueue("Assets", server.getServerId(), version);
//executor = new ThreadPoolExecutor(0, 8, 30000, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>());
// jar = archiveFolder.resolve("mc-" + server.getVersion() + ".jar").toFile();
// if(!jar.exists()) {
// parent.log("! Unable to find a backup copy of minecraft.jar for "+server.getVersion());
// throw new FileNotFoundException("A backup copy of minecraft.jar for version " + server.getVersion() + " was not found.");
// }
Set<Downloadable> libSet = new HashSet<>();
for (Library lib : version.getLibraries()) {
if (lib.validForOS()) {
List<URL> urls = new ArrayList<>();
try {
urls.add(new URL(lib.getDownloadUrl()));
} catch (MalformedURLException e) {
apiLogger.log(Level.SEVERE, "Bad URL", e);
}
Downloadable entry = new Downloadable(lib.getName(),lib.getFilename(),"",100000,urls);
libSet.add(entry);
if (lib.hasNatives()) {
libExtract.add(lib.getFilename());
}
}
}
libraryQueue = parent.submitNewQueue("Libraries", server.getServerId(), libSet, instancePath.resolve("lib").toFile(), DownloadCache.getDir());
productionJar = binPath.resolve("minecraft.jar");
List<URL> jarUrl = new ArrayList<>();
try {
jarUrl.add(new URL("https://s3.amazonaws.com/Minecraft.Download/versions/" + server.getVersion() + "/" + server.getVersion() + ".jar"));
} catch (MalformedURLException e2) {
apiLogger.log(Level.SEVERE, "Bad URL", e2);
}
String jarMD5 = "";
for (Entry<String,String> entry : versionMap.entrySet()) {
if (entry.getValue().equals(server.getVersion())) {
jarMD5 = entry.getKey();
break;
}
}
baseJar = new Downloadable("Minecraft jar","0.jar",jarMD5,3000000,jarUrl);
keepMeta.put("0.jar", Version.requestedFeatureLevel(server.getVersion(), "1.6"));
break;
case SERVER:
//jar = archiveFolder.resolve("mc-server-" + server.getVersion() + ".jar").toFile();
productionJar = instancePath.resolve("minecraft_server.jar");
break;
default:
apiLogger.severe("Invalid API call to MCUpdater.installMods! (side cannot be " + side.toString() + ")");
return false;
}
Boolean updateJar = clearExisting;
if (side == ModSide.CLIENT) {
if (!productionJar.toFile().exists()) {
updateJar = true;
}
} else {
//TODO:Server jar detection
}
Iterator<GenericModule> iMods = toInstall.iterator();
List<String> modIds = new ArrayList<>();
int jarModCount = 0;
while (iMods.hasNext() && !updateJar) {
GenericModule current = iMods.next();
if (current.getModType() == ModType.Jar) {
FileInfo jarMod = instData.findJarMod(current.getId());
if (jarMod == null) {
updateJar = true;
} else if (current.getMD5().isEmpty() || (!current.getMD5().equalsIgnoreCase(jarMod.getMD5()))) {
updateJar = true;
}
jarModCount++;
} else {
modIds.add(current.getId());
}
}
if (jarModCount != instData.getJarMods().size()) {
updateJar = true;
}
if (updateJar && baseJar != null) {
jarMods.add(baseJar);
}
for (FileInfo entry : instData.getInstanceFiles()) {
if (!modIds.contains(entry.getModId())) {
instancePath.resolve(entry.getFilename()).toFile().delete();
}
}
instData.setJarMods(new ArrayList<FileInfo>());
instData.setInstanceFiles(new ArrayList<FileInfo>());
jarModCount = 0;
apiLogger.info("Instance path: " + instancePath.toString());
List<File> contents = recurseFolder(instancePath.toFile(), true);
if (clearExisting){
parent.setStatus("Clearing existing configuration");
parent.log("Clearing existing configuration...");
for (File entry : new ArrayList<>(contents)) {
if (getExcludedNames(entry.getPath(), true)) {
contents.remove(entry);
}
}
ListIterator<File> liClear = contents.listIterator(contents.size());
while(liClear.hasPrevious()) {
File entry = liClear.previous();
entry.delete();
}
}
Iterator<GenericModule> itMods = toInstall.iterator();
final File buildJar = archiveFolder.resolve("build.jar").toFile();
if(buildJar.exists()) {
buildJar.delete();
}
int modCount = toInstall.size();
int modsLoaded = 0;
int errorCount = 0;
while(itMods.hasNext()) {
GenericModule entry = itMods.next();
parent.log("Mod: "+entry.getName());
Collections.sort(entry.getPrioritizedUrls());
String filename;
switch (entry.getModType()) {
case Jar:
if (updateJar) {
jarMods.add(new Downloadable(entry.getName(),String.valueOf(entry.getJarOrder()) + "-" + entry.getId() + ".jar",entry.getMD5(),100000,entry.getUrls()));
keepMeta.put(String.valueOf(entry.getJarOrder()) + "-" + cleanForFile(entry.getId()) + ".jar", entry.getKeepMeta());
instData.addJarMod(entry.getId(), entry.getMD5());
jarModCount++;
}
break;
case Coremod:
filename = "coremods/" + cleanForFile(entry.getId()) + ".jar";
generalFiles.add(new Downloadable(entry.getName(),filename,entry.getMD5(),100000,entry.getUrls()));
instData.addMod(entry.getId(), entry.getMD5(), filename);
break;
case Library:
filename = "lib/" + cleanForFile(entry.getId()) + ".jar";
generalFiles.add(new Downloadable(entry.getName(),filename,entry.getMD5(),100000,entry.getUrls()));
instData.addMod(entry.getId(), entry.getMD5(), filename);
break;
case Extract:
generalFiles.add(new Downloadable(entry.getName(),cleanForFile(entry.getId()) + ".zip",entry.getMD5(),100000,entry.getUrls()));
modExtract.put(cleanForFile(entry.getId()) + ".zip", entry.getInRoot());
break;
case Litemod:
filename = entry.getPath().isEmpty() ? "mods/" + cleanForFile(entry.getId()) + ".litemod" : entry.getPath();
generalFiles.add(new Downloadable(entry.getName(),filename,entry.getMD5(),100000,entry.getUrls()));
instData.addMod(entry.getId(), entry.getMD5(), filename);
break;
case Regular:
filename = entry.getPath().isEmpty() ? "mods/" + cleanForFile(entry.getId()) + ".jar" : entry.getPath();
generalFiles.add(new Downloadable(entry.getName(),filename,entry.getMD5(),100000,entry.getUrls()));
instData.addMod(entry.getId(), entry.getMD5(), filename);
break;
case Option:
//TODO: Unimplemented
}
// 0
modsLoaded++;
// parent.setProgressBar((int)( (65 / modCount) * modsLoaded + 25));
parent.log(" Done ("+modsLoaded+"/"+modCount+")");
}
for (ConfigFile cfEntry : configs) {
final File confFile = instancePath.resolve(cfEntry.getPath()).toFile();
if (confFile.exists() && cfEntry.isNoOverwrite()) {
continue;
}
List<URL> configUrl = new ArrayList<>();
try {
configUrl.add(new URL(cfEntry.getUrl()));
} catch (MalformedURLException e) {
++errorCount;
apiLogger.log(Level.SEVERE, "General Error", e);
}
generalFiles.add(new Downloadable(cfEntry.getPath(), cfEntry.getPath(), cfEntry.getMD5(), 10000, configUrl));
//1
// save in cache for future reference
// if( MD5 != null ) {
// final boolean cached = DownloadCache.cacheFile(confFile, MD5);
// if( cached ) {
// _debug(confFile.getName() + " saved in cache");
// }
// }
}
generalQueue = parent.submitNewQueue("Instance files", server.getServerId(), generalFiles, instancePath.toFile(), DownloadCache.getDir());
jarQueue = parent.submitNewQueue("Jar build files", server.getServerId(), jarMods, tmpFolder, DownloadCache.getDir());
TaskableExecutor libExecutor = new TaskableExecutor(2, new Runnable(){
@Override
public void run() {
for (String entry : libExtract){
Archive.extractZip(instancePath.resolve("lib").resolve(entry).toFile(), instancePath.resolve("lib").resolve("natives").toFile(), false);
}
}});
libraryQueue.processQueue(libExecutor);
final File branding = new File(tmpFolder, "fmlbranding.properties");
try {
branding.createNewFile();
Properties propBrand = new Properties();
propBrand.setProperty("fmlbranding", "MCUpdater: " + server.getName() + " (rev " + server.getRevision() + ")");
propBrand.store(new FileOutputStream(branding), "MCUpdater ServerPack branding");
} catch (IOException e1) {
apiLogger.log(Level.SEVERE, "I/O Error", e1);
}
final boolean doJarUpdate = updateJar;
TaskableExecutor jarExecutor = new TaskableExecutor(2, new Runnable() {
@Override
public void run() {
if (!doJarUpdate) {
try {
Archive.updateArchive(productionJar.toFile(), new File[]{ branding });
} catch (IOException e1) {
apiLogger.log(Level.SEVERE, "I/O Error", e1);
}
} else {
for (Map.Entry<String,Boolean> entry : keepMeta.entrySet()) {
File entryFile = new File(tmpFolder,entry.getKey());
Archive.extractZip(entryFile, tmpFolder, entry.getValue());
entryFile.delete();
}
try {
buildJar.createNewFile();
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
}
boolean doManifest = true;
List<File> buildList = recurseFolder(tmpFolder,true);
for (File entry : new ArrayList<>(buildList)) {
if (entry.getPath().contains("META-INF")) {
doManifest = false;
}
}
parent.log("Packaging updated jar...");
try {
Archive.createJar(buildJar, buildList, tmpFolder.getPath() + sep, doManifest);
} catch (IOException e1) {
parent.log("Failed to create jar!");
apiLogger.log(Level.SEVERE, "I/O Error", e1);
}
//Archive.patchJar(jar, buildJar, new ArrayList<File>(Arrays.asList(tmpFolder.listFiles())));
//copyFile(buildJar, new File(MCFolder + sep + "bin" + sep + "minecraft.jar"));
try {
Files.createDirectories(productionJar.getParent());
Files.copy(buildJar.toPath(), productionJar, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "Failed to copy new jar to instance!", e);
}
}
List<File> tempFiles = recurseFolder(tmpFolder,true);
ListIterator<File> li = tempFiles.listIterator(tempFiles.size());
while(li.hasPrevious()) {
File entry = li.previous();
entry.delete();
}
if (server.isGenerateList()) { writeMCServerFile(server.getName(), server.getAddress(), server.getServerId()); }
instData.setMCVersion(server.getVersion());
instData.setRevision(server.getRevision());
String jsonOut = gson.toJson(instData);
try {
BufferedWriter writer = Files.newBufferedWriter(getInstanceRoot().resolve(server.getServerId()).resolve("instance.json"), StandardCharsets.UTF_8);
writer.append(jsonOut);
writer.close();
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O error", e);
}
}
});
jarQueue.processQueue(jarExecutor);
TaskableExecutor genExecutor = new TaskableExecutor(12, new Runnable(){
@Override
public void run() {
for (Map.Entry<String,Boolean> entry : modExtract.entrySet()) {
if (entry.getValue()) {
Archive.extractZip(instancePath.resolve(entry.getKey()).toFile(), instancePath.toFile());
} else {
Archive.extractZip(instancePath.resolve(entry.getKey()).toFile(), instancePath.resolve("mods").toFile());
}
instancePath.resolve(entry.getKey()).toFile().delete();
}
}
});
generalQueue.processQueue(genExecutor);
TaskableExecutor assetsExecutor = new TaskableExecutor(8, new Runnable(){
@Override
public void run() {
//check virtual
Gson gson = new Gson();
String indexName = version.getAssets();
if (indexName == null) {
indexName = "legacy";
}
File indexesPath = archiveFolder.resolve("assets").resolve("indexes").toFile();
File indexFile = new File(indexesPath, indexName + ".json");
String json;
try {
json = FileUtils.readFileToString(indexFile);
AssetIndex index = gson.fromJson(json, AssetIndex.class);
parent.log("Assets virtual: " + index.isVirtual());
if (index.isVirtual()) {
//Test symlink support
boolean doLinks = true;
try {
java.nio.file.Files.createSymbolicLink(archiveFolder.resolve("linktest"), archiveFolder.resolve("MCUpdater.log.0"));
archiveFolder.resolve("linktest").toFile().delete();
} catch (Exception e) {
doLinks = false;
}
Path assetsPath = archiveFolder.resolve("assets");
Path virtualPath = assetsPath.resolve("virtual");
for (Map.Entry<String, Asset> entry : index.getObjects().entrySet()) {
Path target = virtualPath.resolve(entry.getKey());
Path original = assetsPath.resolve("objects").resolve(entry.getValue().getHash().substring(0,2)).resolve(entry.getValue().getHash());
if (!Files.exists(target)) {
Files.createDirectories(target.getParent());
if (doLinks) {
Files.createSymbolicLink(target, original);
} else {
Files.copy(original, target);
}
}
}
}
} catch (IOException e) {
parent.baseLogger.log(Level.SEVERE, "Assets exception! " + e.getMessage());
}
}
});
assetsQueue.processQueue(assetsExecutor);
if( errorCount > 0 ) {
parent.baseLogger.severe("Errors were detected with this update, please verify your files. There may be a problem with the serverpack configuration or one of your download sites.");
return false;
}
//copyFile(jar, buildJar);
return true;
}
private String cleanForFile(String id) {
return id.replaceAll("[^a-zA-Z_0-9\\-.]", "_");
}
public void writeMCServerFile(String name, String ip, String instance) {
byte[] header = new byte[]{
0x0A,0x00,0x00,0x09,0x00,0x07,0x73,0x65,0x72,0x76,0x65,0x72,0x73,0x0A,
0x00,0x00,0x00,0x01,0x01,0x00,0x0B,0x68,0x69,0x64,0x65,0x41,0x64,0x64,
0x72,0x65,0x73,0x73,0x01,0x08,0x00,0x04,0x6E,0x61,0x6D,0x65,0x00,
(byte) (name.length() + 12), (byte) 0xC2,(byte) 0xA7,0x41,0x5B,0x4D,0x43,0x55,0x5D,0x20,(byte) 0xC2,(byte) 0xA7,0x46
};
byte[] nameBytes = name.getBytes();
byte[] ipBytes = ip.getBytes();
byte[] middle = new byte[]{0x08,0x00,0x02,0x69,0x70,0x00,(byte) ip.length()};
byte[] end = new byte[]{0x00,0x00};
int size = header.length + nameBytes.length + middle.length + ipBytes.length + end.length;
byte[] full = new byte[size];
int pos = 0;
System.arraycopy(header, 0, full, pos, header.length);
pos += header.length;
System.arraycopy(nameBytes, 0, full, pos, nameBytes.length);
pos += nameBytes.length;
System.arraycopy(middle, 0, full, pos, middle.length);
pos += middle.length;
System.arraycopy(ipBytes, 0, full, pos, ipBytes.length);
pos += ipBytes.length;
System.arraycopy(end, 0, full, pos, end.length);
File serverFile = instanceRoot.resolve(instance).resolve("servers.dat").toFile();
try {
serverFile.createNewFile();
FileOutputStream fos = new FileOutputStream(serverFile);
fos.write(full,0,full.length);
fos.close();
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
}
}
public static void openLink(URI uri) {
try {
Object o = Class.forName("java.awt.Desktop").getMethod("getDesktop", new Class[0]).invoke(null);
o.getClass().getMethod("browse", new Class[] { URI.class }).invoke(o, uri);
} catch (Throwable e) {
_log("Failed to open link " + uri.toString());
}
}
private static void _log(String msg) {
apiLogger.info(msg);
}
private static void _debug(String msg) {
apiLogger.fine(msg);
}
/*
public boolean checkVersionCache(String version, ModSide side) {
File requestedJar;
switch (side) {
case CLIENT:
requestedJar = archiveFolder.resolve("mc-" + version + ".jar").toFile();
File newestJar = archiveFolder.resolve("mc-" + newestMC + ".jar").toFile();
if (requestedJar.exists()) return true;
if (newestJar.exists()) {
doPatch(requestedJar, newestJar, version);
return true;
} else {
if (this.getParent().requestLogin()) {
try {
parent.setStatus("Downloading Minecraft");
apiLogger.info("Downloading Minecraft (" + newestMC + ")");
FileUtils.copyURLToFile(new URL("http://assets.minecraft.net/" + newestMC.replace(".","_") + "/minecraft.jar"), newestJar);
} catch (MalformedURLException e) {
apiLogger.log(Level.SEVERE, "Bad URL", e);
return false;
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
return false;
}
if (!requestedJar.toString().equals(newestJar.toString())) {
doPatch(requestedJar, newestJar, version);
}
return true;
} else {
return false;
}
}
case SERVER:
requestedJar = archiveFolder.resolve("mc-server-" + version + ".jar").toFile();
if (requestedJar.exists()) return true;
try {
apiLogger.info("Downloading server jar (" + version + ")");
FileUtils.copyURLToFile(new URL("http://assets.minecraft.net/" + version.replace(".","_") + "/minecraft_server.jar"), requestedJar);
} catch (MalformedURLException e) {
apiLogger.log(Level.SEVERE, "Bad URL", e);
return false;
} catch (IOException e) {
apiLogger.log(Level.SEVERE, "I/O Error", e);
return false;
}
return true;
default:
break;
}
return false;
}
*/
/*
private void doPatch(File requestedJar, File newestJar, String version) {
try {
URL patchURL;
File patchFile = archiveFolder.resolve("temp.patch").toFile();
try {
patchURL = new URL("http://files.mcupdater.com/mcu_patches/" + newestMC.replace(".", "") + "to" + version.replace(".","") + ".patch");
patchURL.openConnection().connect();
} catch (IOException ioe) {
patchURL = new URL("https://dl.dropboxusercontent.com/u/75552727/mcu_patches/" + newestMC.replace(".", "") + "to" + version.replace(".","") + ".patch");
}
_debug(patchURL.toString());
parent.setStatus("Downloading downgrade patch");
apiLogger.info("Downloading downgrade patch (" + newestMC + " -> " + version + ")");
FileUtils.copyURLToFile(patchURL, patchFile, 2000, 5000);
parent.setStatus("Applying downgrade patch");
apiLogger.info("Applying downgrade patch");
Transmogrify.applyPatch(new Path(newestJar), new Path(requestedJar), new Path(patchFile));
patchFile.delete();
} catch (Exception e) {
apiLogger.log(Level.SEVERE, "General Error", e);
}
}
*/
private Cipher getCipher(int mode, String password) throws Exception {
Random random = new Random(92845025L);
byte[] salt = new byte[8];
random.nextBytes(salt);
PBEParameterSpec pbeParamSpec = new PBEParameterSpec(salt, 5);
SecretKey pbeKey = SecretKeyFactory.getInstance("PBEWithMD5AndDES").generateSecret(new PBEKeySpec(password.toCharArray()));
Cipher cipher = Cipher.getInstance("PBEWithMD5AndDES");
cipher.init(mode, pbeKey, pbeParamSpec);
return cipher;
}
public String encrypt(String password) {
try {
Cipher cipher = getCipher(Cipher.ENCRYPT_MODE, "MCUpdater");
byte[] utf8 = password.getBytes("UTF8");
byte[] enc = cipher.doFinal(utf8);
return Base64.encodeBase64String(enc);
} catch (Exception e) {
apiLogger.log(Level.SEVERE, "General error", e);
}
return null;
}
public String decrypt(String property) {
try {
Cipher cipher = getCipher(Cipher.DECRYPT_MODE, "MCUpdater");
byte[] dec = Base64.decodeBase64(property);
byte[] utf8 = cipher.doFinal(dec);
return new String(utf8, "UTF8");
} catch (Exception e) {
apiLogger.log(Level.SEVERE, "General error", e);
}
return null;
}
public void setTimeout(int timeout) {
this.timeoutLength = timeout;
}
public int getTimeout() {
return this.timeoutLength;
}
public static String calculateGroupHash(Set<String> digests) {
BigInteger hash = BigInteger.valueOf(0);
for (String entry : digests) {
try {
BigInteger digest = new BigInteger(Hex.decodeHex(entry.toCharArray()));
hash = hash.xor(digest);
} catch (DecoderException e) {
//e.printStackTrace();
System.out.println("Entry '" + entry + "' is not a valid hexadecimal number");
}
}
return Hex.encodeHexString(hash.toByteArray());
}
}
/* 0
//for (PrioritizedURL pUrl : entry.getUrls()) {
// _debug("Mod @ "+pUrl.getUrl());
// URL modURL = new URL(pUrl.getUrl());
//String modFilename = modURL.getFile().substring(modURL.getFile().lastIndexOf('/'));
File modPath;
if(entry.getInJar()) {
if (updateJar) {
//modPath = new File(tmpFolder.getPath() + sep + loadOrder + ".zip");
//loadOrder++;
//_log(modPath.getPath());
ModDownload jarMod;
try {
jarMod = new ModDownload(modURL, File.createTempFile(entry.getId(), ".jar"), entry.getMD5());
if( jarMod.cacheHit ) {
parent.log(" Adding to jar (cached).");
} else {
parent.log(" Adding to jar (downloaded).");
}
_debug(jarMod.url + " -> " + jarMod.getDestFile().getPath());
//FileUtils.copyURLToFile(modURL, modPath);
Archive.extractZip(jarMod.getDestFile(), tmpFolder, entry.getKeepMeta());
jarMod.getDestFile().delete();
instData.setProperty("mod:" + entry.getId(), entry.getMD5());
jarModCount++;
} catch (Exception e) {
++errorCount;
apiLogger.log(Level.SEVERE, "General Error", e); }
} else {
parent.log("Skipping jar mod: " + entry.getName());
}
} else if (entry.getExtract()) {
//modPath = new File(tmpFolder.getPath() + sep + modFilename);
//modPath.getParentFile().mkdirs();
//_log(modPath.getPath());
ModDownload extractMod;
try {
extractMod = new ModDownload(modURL, File.createTempFile(entry.getId(), ".jar") , entry.getMD5());
if( extractMod.cacheHit ) {
parent.log(" Extracting to filesystem (cached).");
} else {
parent.log(" Extracting to filesystem (downloaded).");
}
_debug(extractMod.url + " -> " + extractMod.getDestFile().getPath());
//FileUtils.copyURLToFile(modURL, modPath);
Path destPath = instancePath;
if(!entry.getInRoot()) destPath = instancePath.resolve("mods");
Archive.extractZip(extractMod.getDestFile(), destPath.toFile());
extractMod.getDestFile().delete();
} catch (Exception e) {
++errorCount;
apiLogger.log(Level.SEVERE, "General Error", e);
}
} else if (entry.getCoreMod()) {
modPath = instancePath.resolve("coremods").resolve(cleanForFile(entry.getId()) + ".jar").toFile();
modPath.getParentFile().mkdirs();
try {
ModDownload normalMod = new ModDownload(modURL, modPath, entry.getMD5());
if( normalMod.cacheHit ) {
parent.log(" Installing in /coremods (cached).");
} else {
parent.log(" Installing in /coremods (downloaded).");
}
_debug(normalMod.url + " -> " + normalMod.getDestFile().getPath());
} catch (Exception e) {
++errorCount;
apiLogger.log(Level.SEVERE, "General Error", e);
}
} else {
if (entry.getPath().equals("")){
modPath = instancePath.resolve("mods").resolve(cleanForFile(entry.getId()) + ".jar").toFile();
} else {
modPath = instancePath.resolve(entry.getPath()).toFile();
}
modPath.getParentFile().mkdirs();
//_log("~~~ " + modPath.getPath());
try {
ModDownload normalMod = new ModDownload(modURL, modPath, entry.getMD5());
if( normalMod.cacheHit ) {
parent.log(" Installing in /mods (cached).");
} else {
parent.log(" Installing in /mods (downloaded).");
}
_debug(normalMod.url + " -> " + normalMod.getDestFile().getPath());
} catch (Exception e) {
++errorCount;
apiLogger.log(Level.SEVERE, "General Error", e);
}
//FileUtils.copyURLToFile(modURL, modPath);
}
}*/
/* 1
final String MD5 = cfEntry.getMD5();
_debug(cfEntry.getUrl());
URL configURL = new URL(cfEntry.getUrl());
final File confFile = instancePath.resolve(cfEntry.getPath()).toFile();
confFile.getParentFile().mkdirs();
// if( MD5 != null ) {
// final File cacheFile = DownloadCache.getFile(MD5);
// if( cacheFile.exists() ) {
// parent.log(" Found config for "+cfEntry.getPath()+" (cached)");
// FileUtils.copyFile(cacheFile, confFile);
// continue;
// }
// }
//_debug(confFile.getPath());
if (cfEntry.isNoOverwrite() && confFile.exists()) {
parent.log(" Config for "+cfEntry.getPath()+" skipped - NoOverwrite is true");
} else {
//parent.log(" Found config for "+cfEntry.getPath()+", downloading...");
try {
ModDownload configDL = new ModDownload(configURL, confFile, MD5);
if( configDL.cacheHit ) {
parent.log(" Found config for "+cfEntry.getPath()+" (cached).");
} else {
parent.log(" Found config for "+cfEntry.getPath()+" (downloaded).");
}
String strPath = configDL.getDestFile() == null ? "???" : configDL.getDestFile().getPath();
_debug(configDL.url + " -> " + strPath);
} catch (Exception e) {
++errorCount;
apiLogger.log(Level.SEVERE, "General Error", e);
}
//FileUtils.copyURLToFile(configURL, confFile);
}
*/
| API refactor and cleanup
| src/main/java/org/mcupdater/util/MCUpdater.java | API refactor and cleanup |
|
Java | apache-2.0 | 3c3496f30b14eab35de42896e8bb852922253bae | 0 | andryr/Harmony-Music-Player,andryr/Harmony-Music-Player | package com.andryr.musicplayer;
import android.app.Service;
import android.appwidget.AppWidgetManager;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.ContentUris;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.net.Uri;
import android.os.Binder;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.PowerManager;
import android.preference.PreferenceManager;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyManager;
import android.util.Log;
import com.andryr.musicplayer.audiofx.AudioEffectsReceiver;
import com.andryr.musicplayer.model.Song;
import com.andryr.musicplayer.utils.Notification;
import org.acra.ACRA;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
//TODO déplacer certaines méthodes dans d'autres classes (égaliseur, mediaplayer, etc.)
public class PlaybackService extends Service implements OnPreparedListener,
OnErrorListener, OnCompletionListener {
public static final String PREF_AUTO_PAUSE = "com.andryr.musicplayer.AUTO_PAUSE";//pause automatique quand on détecte un appel entrant
public static final String ACTION_PLAY = "com.andryr.musicplayer.ACTION_PLAY";
public static final String ACTION_PAUSE = "com.andryr.musicplayer.ACTION_PAUSE";
public static final String ACTION_RESUME = "com.andryr.musicplayer.ACTION_RESUME";
public static final String ACTION_TOGGLE = "com.andryr.musicplayer.ACTION_TOGGLE";
public static final String ACTION_NEXT = "com.andryr.musicplayer.ACTION_NEXT";
public static final String ACTION_PREVIOUS = "com.andryr.musicplayer.ACTION_PREVIOUS";
public static final String ACTION_STOP = "com.andryr.musicplayer.ACTION_STOP";
public static final String ACTION_CHOOSE_SONG = "com.andryr.musicplayer.ACTION_CHOOSE_SONG";
public static final String META_CHANGED = "com.andryr.musicplayer.META_CHANGED";
public static final String PLAYSTATE_CHANGED = "com.andryr.musicplayer.PLAYSTATE_CHANGED";
public static final String QUEUE_CHANGED = "com.andryr.musicplayer.QUEUE_CHANGED";
public static final String POSITION_CHANGED = "com.andryr.musicplayer.POSITION_CHANGED";
public static final String ITEM_ADDED = "com.andryr.musicplayer.ITEM_ADDED";
public static final String ORDER_CHANGED = "com.andryr.musicplayer.ORDER_CHANGED";
public static final String EXTRA_POSITION = "com.andryr.musicplayer.POSITION";
public static final int NO_REPEAT = 20;
public static final int REPEAT_ALL = 21;
public static final int REPEAT_CURRENT = 22;
private static final String TAG = "PlaybackService";
private static final int IDLE_DELAY = 60000;
private PlaybackBinder mBinder = new PlaybackBinder();
private MediaPlayer mMediaPlayer;
private List<Song> mOriginalSongList;
private List<Song> mPlayList = new ArrayList<>();
private Song mCurrentSong;
private boolean mIsPlaying = false;
private boolean mIsPaused = false;
private boolean mHasPlaylist = false;
private boolean mShuffle = false;
private int mStartId;
private int mRepeatMode = NO_REPEAT;
private int mCurrentPosition;
private boolean mBound = false;
private boolean mAutoPause = false;
//
private boolean mPlayImmediately = false;
private Handler mDelayedStopHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (isPlaying() || mBound) {
return;
}
stopSelf(mStartId);
}
};
private BroadcastReceiver mHeadsetStateReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(Intent.ACTION_HEADSET_PLUG)) {
boolean plugged = intent.getIntExtra("state", 0) == 1;
if (!plugged) {
pause();
}
}
}
};
private TelephonyManager mTelephonyManager;
private PhoneStateListener mPhoneStateListener = new PhoneStateListener() {
@Override
public void onCallStateChanged(int state, String incomingNumber) {
super.onCallStateChanged(state, incomingNumber);
switch (state) {
case TelephonyManager.CALL_STATE_OFFHOOK:
case TelephonyManager.CALL_STATE_RINGING:
pause();
break;
}
}
};
@Override
public void onCreate() {
super.onCreate();
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setOnCompletionListener(this);
mMediaPlayer.setOnErrorListener(this);
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.setWakeMode(getApplicationContext(),
PowerManager.PARTIAL_WAKE_LOCK);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
Intent i = new Intent(this, AudioEffectsReceiver.class);
i.setAction(AudioEffectsReceiver.ACTION_OPEN_AUDIO_EFFECT_SESSION);
i.putExtra(AudioEffectsReceiver.EXTRA_AUDIO_SESSION_ID, mMediaPlayer.getAudioSessionId());
sendBroadcast(i);
IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
registerReceiver(mHeadsetStateReceiver, receiverFilter);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
mAutoPause = prefs.getBoolean(PREF_AUTO_PAUSE, false);
initTelephony();
}
private void initTelephony() {
if (mAutoPause) {
mTelephonyManager = (TelephonyManager) getSystemService(TELEPHONY_SERVICE);
if (mTelephonyManager != null) {
mTelephonyManager.listen(mPhoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
}
}
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
mStartId = startId;
if (intent != null) {
String action = intent.getAction();
if (action != null) {
if (mPlayList.size() == 0 || action.equals(ACTION_CHOOSE_SONG)) {
startMainActivity();
} else if (action.equals(ACTION_TOGGLE)) {
toggle();
} else if (action.equals(ACTION_STOP)) {
if (!mBound) {
stopSelf(mStartId);
}
} else if (action.equals(ACTION_NEXT)) {
playNext(true);
} else if (action.equals(ACTION_PREVIOUS)) {
playPrev(true);
}
}
}
return START_STICKY;
}
@Override
public void onDestroy() {
unregisterReceiver(mHeadsetStateReceiver);
if (mTelephonyManager != null) {
mTelephonyManager.listen(mPhoneStateListener, PhoneStateListener.LISTEN_NONE);
}
mMediaPlayer.stop();
Intent i = new Intent(this, AudioEffectsReceiver.class);
i.setAction(AudioEffectsReceiver.ACTION_CLOSE_AUDIO_EFFECT_SESSION);
sendBroadcast(i);
mMediaPlayer.release();
super.onDestroy();
}
@Override
public IBinder onBind(Intent intent) {
mBound = true;
return mBinder;
}
@Override
public boolean onUnbind(Intent intent) {
mBound = false;
if (mMediaPlayer.isPlaying()) {
return true;
}
if (mPlayList.size() > 0) {
Message msg = mDelayedStopHandler.obtainMessage();
mDelayedStopHandler.sendMessageDelayed(msg, IDLE_DELAY);
return true;
}
stopSelf(mStartId);
return true;
}
public void setAutoPauseEnabled(boolean enable) {
if (enable == !mAutoPause) {
mAutoPause = enable;
if (enable) {
initTelephony();
}
//si !enable on a rien à faire à priori
}
}
public Song getCurrentSong() {
return mCurrentSong;
}
public long getSongId() {
if (mCurrentSong != null) {
return mCurrentSong.getId();
}
return -1;
}
public String getSongTitle() {
if (mCurrentSong != null) {
return mCurrentSong.getTitle();
}
return null;
}
public String getArtistName() {
if (mCurrentSong != null) {
return mCurrentSong.getArtist();
}
return null;
}
public String getAlbumName() {
if (mCurrentSong != null) {
return mCurrentSong.getAlbum();
}
return null;
}
public long getAlbumId() {
if (mCurrentSong != null) {
return mCurrentSong.getAlbumId();
}
return -1;
}
public List<Song> getPlayList() {
return mPlayList;
}
public void setPlayList(List<Song> songList, int position, boolean play) {
setPlayListInternal(songList);
setPosition(position, play);
if (mShuffle) {
shuffle();
}
}
private void setPlayListInternal(List<Song> songList) {
if (songList == null || songList.size() <= 0) {
return;
}
mOriginalSongList = songList;
mPlayList.clear();
mPlayList.addAll(mOriginalSongList);
mHasPlaylist = true;
}
public void setPlayListAndShuffle(List<Song> songList, boolean play) {
setPlayListInternal(songList);
mCurrentSong = null;
mShuffle = true;
shuffle();
setPosition(0, play);
}
public void addToQueue(Song song) {
if (mPlayList != null) {
mOriginalSongList.add(song);
mPlayList.add(song);
sendBroadcast(ITEM_ADDED);
}
}
private void sendBroadcast(String action) {
sendBroadcast(action, null);
}
private void sendBroadcast(String action, Bundle data) {
Log.d("action", action + "2");
Intent i = new Intent(action);
if (data != null) {
i.putExtras(data);
}
sendStickyBroadcast(i);
refreshAppWidgets();
}
private void refreshAppWidgets() {
AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(this);
int appWidgetIds[] = appWidgetManager.getAppWidgetIds(new ComponentName(this, PlaybackWidget.class));
PlaybackWidget.updateAppWidget(this, appWidgetIds);
}
public void setAsNextTrack(Song song) {
if (mPlayList != null) {
mOriginalSongList.add(song);
int currentPos = mCurrentPosition;
mPlayList.add(currentPos + 1, song);
sendBroadcast(ITEM_ADDED);
}
}
public void setPosition(int position, boolean play) {
mCurrentPosition = position;
Song song = mPlayList.get(position);
if (song != mCurrentSong) {
mCurrentSong = song;
if (play) {
openAndPlay();
} else {
open();
}
}
}
public boolean hasPlaylist() {
return mHasPlaylist;
}
public int getTrackDuration() {
return mMediaPlayer.getDuration();
}
public int getPlayerPosition() {
return mMediaPlayer.getCurrentPosition();
}
public void seekTo(int msec) {
mMediaPlayer.seekTo(msec);
}
private int getPreviousPosition(boolean force) {
updateCurrentPosition();
int position = mCurrentPosition;
if ((mRepeatMode == REPEAT_CURRENT && !force) || (isPlaying() && getPlayerPosition() >= 1500)) {
return position;
}
if (position - 1 < 0) {
if (mRepeatMode == REPEAT_ALL) {
return mPlayList.size() - 1;
}
return -1;// NO_REPEAT;
}
return position - 1;
}
public int getNextRepeatMode() {
switch (mRepeatMode) {
case NO_REPEAT:
return REPEAT_ALL;
case REPEAT_ALL:
return REPEAT_CURRENT;
case REPEAT_CURRENT:
return NO_REPEAT;
}
return NO_REPEAT;
}
public void play() {
mMediaPlayer.start();
mIsPlaying = true;
mIsPaused = false;
sendBroadcast(PLAYSTATE_CHANGED);
Notification.updateNotification(this);
}
public void pause() {
mMediaPlayer.pause();
mIsPlaying = false;
mIsPaused = true;
sendBroadcast(PLAYSTATE_CHANGED);
Notification.updateNotification(this);
}
public void resume() {
play();
}
public void toggle() {
if (mMediaPlayer.isPlaying()) {
pause();
} else {
resume();
}
}
public boolean isPaused() {
return mIsPaused;
}
public void stop() {
mMediaPlayer.stop();
mIsPlaying = false;
sendBroadcast(PLAYSTATE_CHANGED);
}
public void playPrev(boolean force) {
int position = getPreviousPosition(force);
Log.e("pos", String.valueOf(position));
if (position >= 0 && position < mPlayList.size()) {
mCurrentPosition = position;
mCurrentSong = mPlayList.get(position);
openAndPlay();
}
}
public int getRepeatMode() {
return mRepeatMode;
}
public void setRepeatMode(int mode) {
mRepeatMode = mode;
}
public boolean isShuffleEnabled() {
return mShuffle;
}
public void setShuffleEnabled(boolean enable) {
if (mShuffle != enable) {
mShuffle = enable;
if (enable) {
shuffle();
} else {
mPlayList.clear();
mPlayList.addAll(mOriginalSongList);
}
//on met à jour la position
updateCurrentPosition();
sendBroadcast(ORDER_CHANGED);
}
}
public void shuffle() {
boolean b = mPlayList.remove(mCurrentSong);
Collections.shuffle(mPlayList);
if (b) {
mPlayList.add(0, mCurrentSong);
}
}
private void updateCurrentPosition() {
int pos = mPlayList.indexOf(mCurrentSong);
if (pos != -1) {
mCurrentPosition = pos;
}
}
public boolean isPlaying() {
return mIsPlaying;
}
@Override
public void onCompletion(MediaPlayer mp) {
// mp.stop();
Log.d(TAG, "onCompletion");
playNext(false);
}
public void playNext(boolean force) {
int position = getNextPosition(force);
Log.e("pos", String.valueOf(position));
if (position >= 0 && position < mPlayList.size()) {
mCurrentPosition = position;
mCurrentSong = mPlayList.get(position);
openAndPlay();
}
}
private int getNextPosition(boolean force) {
updateCurrentPosition();
int position = mCurrentPosition;
if (mRepeatMode == REPEAT_CURRENT && !force) {
return position;
}
if (position + 1 >= mPlayList.size()) {
if (mRepeatMode == REPEAT_ALL) {
return 0;
}
return -1;// NO_REPEAT;
}
return position + 1;
}
private void openAndPlay() {
mPlayImmediately = true;
open();
}
private void open() {
// Intent i = new Intent(META_CHANGED);
// sendStickyBroadcast(i);
Bundle extras = new Bundle();
extras.putInt(EXTRA_POSITION, getPositionWithinPlayList());
sendBroadcast(POSITION_CHANGED, extras);
mMediaPlayer.reset();
Uri songUri = ContentUris.withAppendedId(
android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI,
mCurrentSong.getId());
try {
mMediaPlayer.setDataSource(getApplicationContext(), songUri);
mMediaPlayer.prepareAsync();
} catch (IllegalArgumentException | SecurityException
| IllegalStateException | IOException e) {
ACRA.getErrorReporter().handleException(e);
Log.e("ee", "ee", e);
}
}
public int getPositionWithinPlayList() {
if (mPlayList != null) {
return mPlayList.indexOf(mCurrentSong);
}
return -1;
}
@Override
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.d(TAG,
"onError " + String.valueOf(what) + " " + String.valueOf(extra));
return false;
}
@Override
public void onPrepared(MediaPlayer mp) {
sendBroadcast(META_CHANGED);
if (mPlayImmediately) {
play();
mPlayImmediately = false;
}
}
private void startMainActivity() {
Intent dialogIntent = new Intent(this, MainActivity.class);
dialogIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(dialogIntent);
}
public class PlaybackBinder extends Binder {
public PlaybackService getService() {
return PlaybackService.this;
}
}
}
| app/src/main/java/com/andryr/musicplayer/PlaybackService.java | package com.andryr.musicplayer;
import android.app.Service;
import android.appwidget.AppWidgetManager;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.ContentUris;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.net.Uri;
import android.os.Binder;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.PowerManager;
import android.preference.PreferenceManager;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyManager;
import android.util.Log;
import com.andryr.musicplayer.audiofx.AudioEffectsReceiver;
import com.andryr.musicplayer.model.Song;
import com.andryr.musicplayer.utils.Notification;
import org.acra.ACRA;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
//TODO déplacer certaines méthodes dans d'autres classes (égaliseur, mediaplayer, etc.)
public class PlaybackService extends Service implements OnPreparedListener,
OnErrorListener, OnCompletionListener {
public static final String PREF_AUTO_PAUSE = "com.andryr.musicplayer.AUTO_PAUSE";//pause automatique quand on détecte un appel entrant
public static final String ACTION_PLAY = "com.andryr.musicplayer.ACTION_PLAY";
public static final String ACTION_PAUSE = "com.andryr.musicplayer.ACTION_PAUSE";
public static final String ACTION_RESUME = "com.andryr.musicplayer.ACTION_RESUME";
public static final String ACTION_TOGGLE = "com.andryr.musicplayer.ACTION_TOGGLE";
public static final String ACTION_NEXT = "com.andryr.musicplayer.ACTION_NEXT";
public static final String ACTION_PREVIOUS = "com.andryr.musicplayer.ACTION_PREVIOUS";
public static final String ACTION_STOP = "com.andryr.musicplayer.ACTION_STOP";
public static final String ACTION_CHOOSE_SONG = "com.andryr.musicplayer.ACTION_CHOOSE_SONG";
public static final String META_CHANGED = "com.andryr.musicplayer.META_CHANGED";
public static final String PLAYSTATE_CHANGED = "com.andryr.musicplayer.PLAYSTATE_CHANGED";
public static final String QUEUE_CHANGED = "com.andryr.musicplayer.QUEUE_CHANGED";
public static final String POSITION_CHANGED = "com.andryr.musicplayer.POSITION_CHANGED";
public static final String ITEM_ADDED = "com.andryr.musicplayer.ITEM_ADDED";
public static final String ORDER_CHANGED = "com.andryr.musicplayer.ORDER_CHANGED";
public static final String EXTRA_POSITION = "com.andryr.musicplayer.POSITION";
public static final int NO_REPEAT = 20;
public static final int REPEAT_ALL = 21;
public static final int REPEAT_CURRENT = 22;
private static final String TAG = "PlaybackService";
private static final int IDLE_DELAY = 60000;
private PlaybackBinder mBinder = new PlaybackBinder();
private MediaPlayer mMediaPlayer;
private List<Song> mOriginalSongList;
private List<Song> mPlayList = new ArrayList<>();
private Song mCurrentSong;
private boolean mIsPlaying = false;
private boolean mIsPaused = false;
private boolean mHasPlaylist = false;
private boolean mShuffle = false;
private int mStartId;
private int mRepeatMode = NO_REPEAT;
private int mCurrentPosition;
private boolean mBound = false;
private boolean mAutoPause = false;
//
private boolean mPlayImmediately = false;
private Handler mDelayedStopHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (isPlaying() || mBound) {
return;
}
stopSelf(mStartId);
}
};
private BroadcastReceiver mHeadsetStateReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(Intent.ACTION_HEADSET_PLUG)) {
boolean plugged = intent.getIntExtra("state", 0) == 1;
if (!plugged) {
pause();
}
}
}
};
private TelephonyManager mTelephonyManager;
private PhoneStateListener mPhoneStateListener = new PhoneStateListener() {
@Override
public void onCallStateChanged(int state, String incomingNumber) {
super.onCallStateChanged(state, incomingNumber);
switch (state) {
case TelephonyManager.CALL_STATE_OFFHOOK:
case TelephonyManager.CALL_STATE_RINGING:
pause();
break;
}
}
};
@Override
public void onCreate() {
super.onCreate();
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setOnCompletionListener(this);
mMediaPlayer.setOnErrorListener(this);
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.setWakeMode(getApplicationContext(),
PowerManager.PARTIAL_WAKE_LOCK);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
Intent i = new Intent(this, AudioEffectsReceiver.class);
i.setAction(AudioEffectsReceiver.ACTION_OPEN_AUDIO_EFFECT_SESSION);
i.putExtra(AudioEffectsReceiver.EXTRA_AUDIO_SESSION_ID, mMediaPlayer.getAudioSessionId());
sendBroadcast(i);
IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
registerReceiver(mHeadsetStateReceiver, receiverFilter);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
mAutoPause = prefs.getBoolean(PREF_AUTO_PAUSE, false);
initTelephony();
}
private void initTelephony() {
if (mAutoPause) {
mTelephonyManager = (TelephonyManager) getSystemService(TELEPHONY_SERVICE);
if (mTelephonyManager != null) {
mTelephonyManager.listen(mPhoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
}
}
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
mStartId = startId;
if (intent != null) {
String action = intent.getAction();
if (action != null) {
if (mPlayList.size() == 0 || action.equals(ACTION_CHOOSE_SONG)) {
startMainActivity();
} else if (action.equals(ACTION_TOGGLE)) {
toggle();
} else if (action.equals(ACTION_STOP)) {
if (!mBound) {
stopSelf(mStartId);
}
} else if (action.equals(ACTION_NEXT)) {
playNext(true);
} else if (action.equals(ACTION_PREVIOUS)) {
playPrev(true);
}
}
}
return START_STICKY;
}
@Override
public void onDestroy() {
unregisterReceiver(mHeadsetStateReceiver);
if (mTelephonyManager != null) {
mTelephonyManager.listen(mPhoneStateListener, PhoneStateListener.LISTEN_NONE);
}
mMediaPlayer.stop();
Intent i = new Intent(this, AudioEffectsReceiver.class);
i.setAction(AudioEffectsReceiver.ACTION_CLOSE_AUDIO_EFFECT_SESSION);
sendBroadcast(i);
mMediaPlayer.release();
super.onDestroy();
}
@Override
public IBinder onBind(Intent intent) {
mBound = true;
return mBinder;
}
@Override
public boolean onUnbind(Intent intent) {
mBound = false;
if (mMediaPlayer.isPlaying()) {
return true;
}
if (mPlayList.size() > 0) {
Message msg = mDelayedStopHandler.obtainMessage();
mDelayedStopHandler.sendMessageDelayed(msg, IDLE_DELAY);
return true;
}
stopSelf(mStartId);
return true;
}
public void setAutoPauseEnabled(boolean enable) {
if (enable == !mAutoPause) {
mAutoPause = enable;
if (enable) {
initTelephony();
}
//si !enable on a rien à faire à priori
}
}
public Song getCurrentSong() {
return mCurrentSong;
}
public long getSongId() {
if (mCurrentSong != null) {
return mCurrentSong.getId();
}
return -1;
}
public String getSongTitle() {
if (mCurrentSong != null) {
return mCurrentSong.getTitle();
}
return null;
}
public String getArtistName() {
if (mCurrentSong != null) {
return mCurrentSong.getArtist();
}
return null;
}
public String getAlbumName() {
if (mCurrentSong != null) {
return mCurrentSong.getAlbum();
}
return null;
}
public long getAlbumId() {
if (mCurrentSong != null) {
return mCurrentSong.getAlbumId();
}
return -1;
}
public List<Song> getPlayList() {
return mPlayList;
}
public void setPlayList(List<Song> songList, int position, boolean play) {
setPlayListInternal(songList);
setPosition(position, play);
if (mShuffle) {
shuffle();
}
}
private void setPlayListInternal(List<Song> songList) {
if (songList == null || songList.size() <= 0) {
return;
}
mOriginalSongList = songList;
mPlayList.clear();
mPlayList.addAll(mOriginalSongList);
mHasPlaylist = true;
}
public void setPlayListAndShuffle(List<Song> songList, boolean play) {
setPlayListInternal(songList);
mCurrentSong = null;
mShuffle = true;
shuffle();
setPosition(0, play);
}
public void addToQueue(Song song) {
if (mPlayList != null) {
mOriginalSongList.add(song);
mPlayList.add(song);
sendBroadcast(ITEM_ADDED);
}
}
private void sendBroadcast(String action) {
sendBroadcast(action, null);
}
private void sendBroadcast(String action, Bundle data) {
Log.d("action", action + "2");
Intent i = new Intent(action);
if (data != null) {
i.putExtras(data);
}
sendStickyBroadcast(i);
refreshAppWidgets();
}
private void refreshAppWidgets() {
AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(this);
int appWidgetIds[] = appWidgetManager.getAppWidgetIds(new ComponentName(this, PlaybackWidget.class));
PlaybackWidget.updateAppWidget(this, appWidgetIds);
}
public void setAsNextTrack(Song song) {
if (mPlayList != null) {
mOriginalSongList.add(song);
int currentPos = mCurrentPosition;
mPlayList.add(currentPos + 1, song);
sendBroadcast(ITEM_ADDED);
}
}
public void setPosition(int position, boolean play) {
mCurrentPosition = position;
Song song = mPlayList.get(position);
if (song != mCurrentSong) {
mCurrentSong = song;
if (play) {
openAndPlay();
} else {
open();
}
}
}
public boolean hasPlaylist() {
return mHasPlaylist;
}
public int getTrackDuration() {
return mMediaPlayer.getDuration();
}
public int getPlayerPosition() {
return mMediaPlayer.getCurrentPosition();
}
public void seekTo(int msec) {
mMediaPlayer.seekTo(msec);
}
private int getPreviousPosition(boolean force) {
updateCurrentPosition();
int position = mCurrentPosition;
if ((mRepeatMode == REPEAT_CURRENT && !force) || (isPlaying() && getPlayerPosition() >= 1500)) {
return position;
}
if (position - 1 < 0) {
if (mRepeatMode == REPEAT_ALL) {
return mPlayList.size() - 1;
}
return -1;// NO_REPEAT;
}
return position - 1;
}
public int getNextRepeatMode() {
return 20 + (mRepeatMode + 1) % 20 % 3;
}
public void play() {
mMediaPlayer.start();
mIsPlaying = true;
mIsPaused = false;
sendBroadcast(PLAYSTATE_CHANGED);
Notification.updateNotification(this);
}
public void pause() {
mMediaPlayer.pause();
mIsPlaying = false;
mIsPaused = true;
sendBroadcast(PLAYSTATE_CHANGED);
Notification.updateNotification(this);
}
public void resume() {
play();
}
public void toggle() {
if (mMediaPlayer.isPlaying()) {
pause();
} else {
resume();
}
}
public boolean isPaused() {
return mIsPaused;
}
public void stop() {
mMediaPlayer.stop();
mIsPlaying = false;
sendBroadcast(PLAYSTATE_CHANGED);
}
public void playPrev(boolean force) {
int position = getPreviousPosition(force);
Log.e("pos", String.valueOf(position));
if (position >= 0 && position < mPlayList.size()) {
mCurrentPosition = position;
mCurrentSong = mPlayList.get(position);
openAndPlay();
}
}
public int getRepeatMode() {
return mRepeatMode;
}
public void setRepeatMode(int mode) {
mRepeatMode = mode;
}
public boolean isShuffleEnabled() {
return mShuffle;
}
public void setShuffleEnabled(boolean enable) {
if (mShuffle != enable) {
mShuffle = enable;
if (enable) {
shuffle();
} else {
mPlayList.clear();
mPlayList.addAll(mOriginalSongList);
}
//on met à jour la position
updateCurrentPosition();
sendBroadcast(ORDER_CHANGED);
}
}
public void shuffle() {
boolean b = mPlayList.remove(mCurrentSong);
Collections.shuffle(mPlayList);
if (b) {
mPlayList.add(0, mCurrentSong);
}
}
private void updateCurrentPosition() {
int pos = mPlayList.indexOf(mCurrentSong);
if (pos != -1) {
mCurrentPosition = pos;
}
}
public boolean isPlaying() {
return mIsPlaying;
}
@Override
public void onCompletion(MediaPlayer mp) {
// mp.stop();
Log.d(TAG, "onCompletion");
playNext(false);
}
public void playNext(boolean force) {
int position = getNextPosition(force);
Log.e("pos", String.valueOf(position));
if (position >= 0 && position < mPlayList.size()) {
mCurrentPosition = position;
mCurrentSong = mPlayList.get(position);
openAndPlay();
}
}
private int getNextPosition(boolean force) {
updateCurrentPosition();
int position = mCurrentPosition;
if (mRepeatMode == REPEAT_CURRENT && !force) {
return position;
}
if (position + 1 >= mPlayList.size()) {
if (mRepeatMode == REPEAT_ALL) {
return 0;
}
return -1;// NO_REPEAT;
}
return position + 1;
}
private void openAndPlay() {
mPlayImmediately = true;
open();
}
private void open() {
// Intent i = new Intent(META_CHANGED);
// sendStickyBroadcast(i);
Bundle extras = new Bundle();
extras.putInt(EXTRA_POSITION, getPositionWithinPlayList());
sendBroadcast(POSITION_CHANGED, extras);
mMediaPlayer.reset();
Uri songUri = ContentUris.withAppendedId(
android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI,
mCurrentSong.getId());
try {
mMediaPlayer.setDataSource(getApplicationContext(), songUri);
mMediaPlayer.prepareAsync();
} catch (IllegalArgumentException | SecurityException
| IllegalStateException | IOException e) {
ACRA.getErrorReporter().handleException(e);
Log.e("ee", "ee", e);
}
}
public int getPositionWithinPlayList() {
if (mPlayList != null) {
return mPlayList.indexOf(mCurrentSong);
}
return -1;
}
@Override
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.d(TAG,
"onError " + String.valueOf(what) + " " + String.valueOf(extra));
return false;
}
@Override
public void onPrepared(MediaPlayer mp) {
sendBroadcast(META_CHANGED);
if (mPlayImmediately) {
play();
mPlayImmediately = false;
}
}
private void startMainActivity() {
Intent dialogIntent = new Intent(this, MainActivity.class);
dialogIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(dialogIntent);
}
public class PlaybackBinder extends Binder {
public PlaybackService getService() {
return PlaybackService.this;
}
}
}
| code plus lisible
| app/src/main/java/com/andryr/musicplayer/PlaybackService.java | code plus lisible |
|
Java | apache-2.0 | c39f9f16fd0387884acd3bc8fd7a8dffe4606bf7 | 0 | ayltai/Newspaper | package com.github.ayltai.newspaper;
import android.app.Application;
import com.facebook.common.logging.FLog;
import com.facebook.stetho.Stetho;
import com.github.ayltai.newspaper.util.TestUtils;
import com.squareup.leakcanary.LeakCanary;
public abstract class BaseApplication extends Application {
@Override
public void onCreate() {
super.onCreate();
if (!TestUtils.isRunningUnitTest() && !TestUtils.isRunningInstrumentalTest()) {
if (!LeakCanary.isInAnalyzerProcess(this)) LeakCanary.install(this);
FLog.setMinimumLoggingLevel(FLog.WARN);
Stetho.initializeWithDefaults(this);
}
}
}
| app/src/debug/java/com/github/ayltai/newspaper/BaseApplication.java | package com.github.ayltai.newspaper;
import android.app.Application;
import com.facebook.common.logging.FLog;
import com.facebook.stetho.Stetho;
import com.github.ayltai.newspaper.util.TestUtils;
import com.squareup.leakcanary.LeakCanary;
public abstract class BaseApplication extends Application {
@Override
public void onCreate() {
super.onCreate();
if (!TestUtils.isRunningInstrumentalTest()) {
if (!LeakCanary.isInAnalyzerProcess(this)) LeakCanary.install(this);
FLog.setMinimumLoggingLevel(FLog.WARN);
Stetho.initializeWithDefaults(this);
}
}
}
| Disable Facebook Stetho in tests
| app/src/debug/java/com/github/ayltai/newspaper/BaseApplication.java | Disable Facebook Stetho in tests |
|
Java | apache-2.0 | 5a9da320d4c1bd54dfc95bcd8559c36fccf28e3a | 0 | zengfeng1993/ZFAndroidFramework | package com.zero.android.constact;
import com.zero.android.common.BasePresent;
import com.zero.android.common.BaseView;
import com.zero.android.entity.User;
/**
* Created by zengfeng on 16/7/10.
*/
public interface LoginConstact {
public interface View extends BaseView<BasePresent>{
void showLoginError();
void showLoading();
void showLoginSuccess(String userName);
void dismissLoading();
}
public interface Presenter extends BasePresent{
void loginTask(String username,String password);
}
}
| app/src/main/java/com/zero/android/constact/LoginConstact.java | package com.zero.android.constact;
import com.zero.android.common.BasePresent;
import com.zero.android.common.BaseView;
/**
* Created by zengfeng on 16/7/10.
*/
public class LoginConstact {
public interface View extends BaseView<BasePresent>{
void showLoginError();
void showLoading();
void showLoginSuccess();
}
public interface Presenter extends BasePresent{
void loginTask(String username,String password);
}
}
| mvp constact
| app/src/main/java/com/zero/android/constact/LoginConstact.java | mvp constact |
|
Java | apache-2.0 | d5cdaa72783a93cac8965987050696a6bd12e690 | 0 | linkedin/WhereHows,mars-lan/WhereHows,camelliazhang/WhereHows,theseyi/WhereHows,camelliazhang/WhereHows,camelliazhang/WhereHows,alyiwang/WhereHows,linkedin/WhereHows,theseyi/WhereHows,linkedin/WhereHows,theseyi/WhereHows,theseyi/WhereHows,mars-lan/WhereHows,linkedin/WhereHows,camelliazhang/WhereHows,theseyi/WhereHows,camelliazhang/WhereHows,linkedin/WhereHows,linkedin/WhereHows,alyiwang/WhereHows,mars-lan/WhereHows,camelliazhang/WhereHows,mars-lan/WhereHows,alyiwang/WhereHows,theseyi/WhereHows,alyiwang/WhereHows,alyiwang/WhereHows,mars-lan/WhereHows,mars-lan/WhereHows,alyiwang/WhereHows | /**
* Copyright 2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package controllers.api.v2;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import controllers.Application;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import play.Logger;
import play.libs.F.Promise;
import play.libs.Json;
import play.mvc.Controller;
import play.mvc.Result;
import play.mvc.Results;
import wherehows.dao.table.AclDao;
import wherehows.dao.table.DatasetComplianceDao;
import wherehows.dao.table.DatasetOwnerDao;
import wherehows.dao.table.DictDatasetDao;
import wherehows.dao.view.DataTypesViewDao;
import wherehows.dao.view.DatasetViewDao;
import wherehows.dao.view.OwnerViewDao;
import wherehows.models.table.AccessControlEntry;
import wherehows.models.view.DatasetCompliance;
import wherehows.models.view.DatasetOwner;
import wherehows.models.view.DatasetOwnership;
import wherehows.models.view.DatasetSchema;
import wherehows.models.view.DatasetView;
import wherehows.models.view.DsComplianceSuggestion;
import static controllers.api.v1.Dataset.*;
import static utils.Dataset.*;
public class Dataset extends Controller {
private static final DataTypesViewDao DATA_TYPES_DAO = Application.DAO_FACTORY.getDataTypesViewDao();
private static final DictDatasetDao DICT_DATASET_DAO = Application.DAO_FACTORY.getDictDatasetDao();
private static final DatasetViewDao DATASET_VIEW_DAO = Application.DAO_FACTORY.getDatasetViewDao();
private static final OwnerViewDao OWNER_VIEW_DAO = Application.DAO_FACTORY.getOwnerViewDao();
private static final DatasetOwnerDao OWNER_DAO = Application.DAO_FACTORY.getDatasteOwnerDao();
private static final DatasetComplianceDao COMPLIANCE_DAO = Application.DAO_FACTORY.getDatasetComplianceDao();
private static final AclDao ACL_DAO = initAclDao();
private static final int _DEFAULT_PAGE_SIZE = 20;
private static final long _DEFAULT_JIT_ACL_PERIOD = 48 * 3600; // 48 hour in seconds
private static final JsonNode _EMPTY_RESPONSE = Json.newObject();
private Dataset() {
}
private static AclDao initAclDao() {
try {
return Application.DAO_FACTORY.getAclDao();
} catch (Exception e) {
Logger.error("ACL DAO init error", e);
}
return null;
}
public static Promise<Result> listSegments(@Nullable String platform, @Nonnull String prefix) {
try {
if (StringUtils.isBlank(platform)) {
return Promise.promise(() -> ok(Json.toJson(DATA_TYPES_DAO.getAllPlatforms()
.stream()
.map(s -> String.format("[platform=%s]", s.get("name")))
.collect(Collectors.toList()))));
}
List<String> names = DATASET_VIEW_DAO.listSegments(platform, "PROD", getPlatformPrefix(platform, prefix));
// if prefix is a dataset name, then return empty list
if (names.size() == 1 && names.get(0).equalsIgnoreCase(prefix)) {
return Promise.promise(() -> ok(Json.newArray()));
}
return Promise.promise(() -> ok(Json.toJson(names)));
} catch (Exception e) {
Logger.error("Fail to list dataset names/sections", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
}
public static Promise<Result> listDatasets(@Nullable String platform, @Nonnull String prefix) {
try {
int start = NumberUtils.toInt(request().getQueryString("start"), 0);
int count = NumberUtils.toInt(request().getQueryString("count"), _DEFAULT_PAGE_SIZE);
int page = NumberUtils.toInt(request().getQueryString("page"), 0);
// 'start' takes precedence over 'page'
int startIndex = (request().getQueryString("start") == null && page > 0) ? page * _DEFAULT_PAGE_SIZE : start;
return Promise.promise(
() -> ok(Json.toJson(DATASET_VIEW_DAO.listDatasets(platform, "PROD", prefix, startIndex, count))));
} catch (Exception e) {
Logger.error("Fail to list datasets", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
}
public static Promise<Result> countDatasets(@Nullable String platform, @Nonnull String prefix) {
try {
return Promise.promise(
() -> ok(String.valueOf(DATASET_VIEW_DAO.listDatasets(platform, "PROD", prefix, 0, 1).getTotal())));
} catch (Exception e) {
Logger.error("Fail to count total datasets", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
}
public static Promise<Result> getComplianceDataTypes() {
try {
return Promise.promise(() -> ok(
Json.newObject().set("complianceDataTypes", Json.toJson(DATA_TYPES_DAO.getAllComplianceDataTypes()))));
} catch (Exception e) {
Logger.error("Fail to get compliance data types", e);
return Promise.promise(() -> notFound(errorResponse(e)));
}
}
public static Promise<Result> getDataPlatforms() {
try {
return Promise.promise(
() -> ok(Json.newObject().set("platforms", Json.toJson(DATA_TYPES_DAO.getAllPlatforms()))));
} catch (Exception e) {
Logger.error("Fail to get data platforms", e);
return Promise.promise(() -> notFound(errorResponse(e)));
}
}
public static Promise<Result> getWhUrnById(int id) {
String whUrn = getDatasetUrnByIdOrCache(id);
if (whUrn != null) {
response().setHeader("whUrn", whUrn);
return Promise.promise(Results::ok);
} else {
return Promise.promise(Results::notFound);
}
}
public static Promise<Result> getDataset(@Nonnull String datasetUrn) {
final DatasetView view;
try {
view = DATASET_VIEW_DAO.getDatasetView(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Failed to get dataset view", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(Json.newObject().set("dataset", Json.toJson(view))));
}
public static Promise<Result> updateDatasetDeprecation(String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
try {
JsonNode record = request().body().asJson();
boolean deprecated = record.get("deprecated").asBoolean();
String deprecationNote = record.hasNonNull("deprecationNote") ? record.get("deprecationNote").asText() : "";
Long decommissionTime = record.hasNonNull("decommissionTime") ? record.get("decommissionTime").asLong() : null;
DICT_DATASET_DAO.setDatasetDeprecation(datasetUrn, deprecated, deprecationNote, decommissionTime, username);
} catch (Exception e) {
Logger.error("Update dataset deprecation fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> getDatasetSchema(String datasetUrn) {
final DatasetSchema schema;
try {
schema = DATASET_VIEW_DAO.getDatasetSchema(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch schema fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
if (schema == null) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
return Promise.promise(() -> ok(Json.newObject().set("schema", Json.toJson(schema))));
}
public static Promise<Result> getDatasetOwners(String datasetUrn) {
final DatasetOwnership ownership;
try {
ownership = OWNER_VIEW_DAO.getDatasetOwners(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch owners fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(Json.toJson(ownership)));
}
public static Promise<Result> getDatasetSuggestedOwners(String datasetUrn) {
final DatasetOwnership ownership;
try {
ownership = OWNER_VIEW_DAO.getDatasetSuggestedOwners(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch owners fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(Json.toJson(ownership)));
}
public static Promise<Result> updateDatasetOwners(String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
final JsonNode content = request().body().asJson();
// content should contain arraynode 'owners': []
if (content == null || !content.has("owners") || !content.get("owners").isArray()) {
return Promise.promise(() -> badRequest(errorResponse("Update dataset owners fail: missing owners field")));
}
try {
final List<DatasetOwner> owners = Json.mapper().readerFor(new TypeReference<List<DatasetOwner>>() {
}).readValue(content.get("owners"));
long confirmedOwnerUserCount = owners.stream()
.filter(s -> "DataOwner".equalsIgnoreCase(s.getType()) && "user".equalsIgnoreCase(s.getIdType())
&& "UI".equalsIgnoreCase(s.getSource()))
.count();
// enforce at least two UI (confirmed) USER DataOwner for a dataset before making any changes
if (confirmedOwnerUserCount < 2) {
return Promise.promise(() -> badRequest(errorResponse("Less than 2 UI USER owners")));
}
OWNER_DAO.updateDatasetOwners(datasetUrn, owners, username);
} catch (Exception e) {
Logger.error("Update Dataset owners fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> getDatasetCompliance(@Nonnull String datasetUrn) {
final DatasetCompliance record;
try {
record = COMPLIANCE_DAO.getDatasetComplianceByUrn(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch compliance fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(Json.newObject().set("complianceInfo", Json.toJson(record))));
}
public static Promise<Result> updateDatasetCompliance(@Nonnull String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
try {
DatasetCompliance record = Json.mapper().convertValue(request().body().asJson(), DatasetCompliance.class);
if (record.getDatasetUrn() == null || !record.getDatasetUrn().equals(datasetUrn)) {
throw new IllegalArgumentException("Dataset Urn not exist or doesn't match.");
}
COMPLIANCE_DAO.updateDatasetComplianceByUrn(record, username);
} catch (Exception e) {
Logger.error("Update Compliance Info fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> getDatasetSuggestedCompliance(@Nonnull String datasetUrn) {
final DsComplianceSuggestion record;
try {
record = COMPLIANCE_DAO.getComplianceSuggestion(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch compliance suggestion fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
if (record == null) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
return Promise.promise(() -> ok(Json.newObject().set("complianceSuggestion", Json.toJson(record))));
}
public static Promise<Result> sendDatasetSuggestedComplianceFeedback(@Nonnull String datasetUrn) {
try {
JsonNode record = request().body().asJson();
String feedback = record.hasNonNull("feedback") ? record.get("feedback").asText().toUpperCase() : null;
String uid = record.hasNonNull("uid") ? record.get("uid").asText() : "";
if (!"ACCEPT".equals(feedback) && !"REJECT".equals(feedback)) {
return Promise.promise(() -> badRequest(_EMPTY_RESPONSE));
}
COMPLIANCE_DAO.sendSuggestedComplianceFeedback(datasetUrn, uid, feedback);
} catch (Exception e) {
Logger.error("Send compliance suggestion feedback fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> getDatasetAcls(@Nonnull String datasetUrn) {
final List<AccessControlEntry> acls;
try {
acls = ACL_DAO.getDatasetAcls(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch ACLs error", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
if (acls == null) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
return Promise.promise(() -> ok(Json.toJson(acls)));
}
public static Promise<Result> addUserToDatasetAcl(@Nonnull String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
JsonNode record = request().body().asJson();
String accessType = record.hasNonNull("accessType") ? record.get("accessType").asText() : "r"; // default read
Long expiresAt = record.hasNonNull("expiresAt") ? record.get("expiresAt").asLong()
: System.currentTimeMillis() / 1000 + _DEFAULT_JIT_ACL_PERIOD; // default now + 48h, in seconds
if (!record.hasNonNull("businessJustification")) {
return Promise.promise(() -> badRequest(errorResponse("Missing business justification")));
}
String businessJustification = record.get("businessJustification").asText();
try {
ACL_DAO.addUserToDatasetAcl(datasetUrn, username, accessType, businessJustification, expiresAt);
} catch (Exception e) {
Logger.error("Add user to ACL error", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> removeUserFromDatasetAcl(@Nonnull String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
try {
ACL_DAO.removeUserFromDatasetAcl(datasetUrn, username);
} catch (Exception e) {
Logger.error("Remove User from ACL error", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
private static <E extends Throwable> JsonNode errorResponse(E e) {
return errorResponse(e.toString());
}
private static JsonNode errorResponse(String msg) {
return Json.newObject().put("msg", msg);
}
}
| wherehows-frontend/app/controllers/api/v2/Dataset.java | /**
* Copyright 2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package controllers.api.v2;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import controllers.Application;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import play.Logger;
import play.libs.F.Promise;
import play.libs.Json;
import play.mvc.Controller;
import play.mvc.Result;
import play.mvc.Results;
import wherehows.dao.table.AclDao;
import wherehows.dao.table.DatasetComplianceDao;
import wherehows.dao.table.DatasetOwnerDao;
import wherehows.dao.table.DictDatasetDao;
import wherehows.dao.view.DataTypesViewDao;
import wherehows.dao.view.DatasetViewDao;
import wherehows.dao.view.OwnerViewDao;
import wherehows.models.table.AccessControlEntry;
import wherehows.models.view.DatasetCompliance;
import wherehows.models.view.DatasetOwner;
import wherehows.models.view.DatasetOwnership;
import wherehows.models.view.DatasetSchema;
import wherehows.models.view.DatasetView;
import wherehows.models.view.DsComplianceSuggestion;
import static controllers.api.v1.Dataset.*;
import static utils.Dataset.*;
public class Dataset extends Controller {
private static final DataTypesViewDao DATA_TYPES_DAO = Application.DAO_FACTORY.getDataTypesViewDao();
private static final DictDatasetDao DICT_DATASET_DAO = Application.DAO_FACTORY.getDictDatasetDao();
private static final DatasetViewDao DATASET_VIEW_DAO = Application.DAO_FACTORY.getDatasetViewDao();
private static final OwnerViewDao OWNER_VIEW_DAO = Application.DAO_FACTORY.getOwnerViewDao();
private static final DatasetOwnerDao OWNER_DAO = Application.DAO_FACTORY.getDatasteOwnerDao();
private static final DatasetComplianceDao COMPLIANCE_DAO = Application.DAO_FACTORY.getDatasetComplianceDao();
private static final AclDao ACL_DAO = initAclDao();
private static final int _DEFAULT_PAGE_SIZE = 20;
private static final long _DEFAULT_JIT_ACL_PERIOD = 48 * 3600; // 48 hour in seconds
private static final JsonNode _EMPTY_RESPONSE = Json.newObject();
private Dataset() {
}
private static AclDao initAclDao() {
try {
return Application.DAO_FACTORY.getAclDao();
} catch (Exception e) {
Logger.error("ACL DAO init error", e);
}
return null;
}
public static Promise<Result> listSegments(@Nullable String platform, @Nonnull String prefix) {
try {
if (StringUtils.isBlank(platform)) {
return Promise.promise(() -> ok(Json.toJson(DATA_TYPES_DAO.getAllPlatforms()
.stream()
.map(s -> String.format("[platform=%s]", s.get("name")))
.collect(Collectors.toList()))));
}
List<String> names = DATASET_VIEW_DAO.listSegments(platform, "PROD", getPlatformPrefix(platform, prefix));
// if prefix is a dataset name, then return empty list
if (names.size() == 1 && names.get(0).equalsIgnoreCase(prefix)) {
return Promise.promise(() -> ok(Json.newArray()));
}
return Promise.promise(() -> ok(Json.toJson(names)));
} catch (Exception e) {
Logger.error("Fail to list dataset names/sections", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
}
public static Promise<Result> listDatasets(@Nullable String platform, @Nonnull String prefix) {
try {
int start = NumberUtils.toInt(request().getQueryString("start"), 0);
int count = NumberUtils.toInt(request().getQueryString("count"), _DEFAULT_PAGE_SIZE);
int page = NumberUtils.toInt(request().getQueryString("page"), 0);
// 'start' takes precedence over 'page'
int startIndex = (request().getQueryString("start") == null && page > 0) ? page * _DEFAULT_PAGE_SIZE : start;
return Promise.promise(
() -> ok(Json.toJson(DATASET_VIEW_DAO.listDatasets(platform, "PROD", prefix, startIndex, count))));
} catch (Exception e) {
Logger.error("Fail to list datasets", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
}
public static Promise<Result> countDatasets(@Nullable String platform, @Nonnull String prefix) {
try {
return Promise.promise(
() -> ok(String.valueOf(DATASET_VIEW_DAO.listDatasets(platform, "PROD", prefix, 0, 1).getTotal())));
} catch (Exception e) {
Logger.error("Fail to count total datasets", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
}
public static Promise<Result> getComplianceDataTypes() {
try {
return Promise.promise(() -> ok(
Json.newObject().set("complianceDataTypes", Json.toJson(DATA_TYPES_DAO.getAllComplianceDataTypes()))));
} catch (Exception e) {
Logger.error("Fail to get compliance data types", e);
return Promise.promise(() -> notFound(errorResponse(e)));
}
}
public static Promise<Result> getDataPlatforms() {
try {
return Promise.promise(
() -> ok(Json.newObject().set("platforms", Json.toJson(DATA_TYPES_DAO.getAllPlatforms()))));
} catch (Exception e) {
Logger.error("Fail to get data platforms", e);
return Promise.promise(() -> notFound(errorResponse(e)));
}
}
public static Promise<Result> getWhUrnById(int id) {
String whUrn = getDatasetUrnByIdOrCache(id);
if (whUrn != null) {
response().setHeader("whUrn", whUrn);
return Promise.promise(Results::ok);
} else {
return Promise.promise(Results::notFound);
}
}
public static Promise<Result> getDataset(@Nonnull String datasetUrn) {
final DatasetView view;
try {
view = DATASET_VIEW_DAO.getDatasetView(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Failed to get dataset view", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(Json.newObject().set("dataset", Json.toJson(view))));
}
public static Promise<Result> updateDatasetDeprecation(String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
try {
JsonNode record = request().body().asJson();
boolean deprecated = record.get("deprecated").asBoolean();
String deprecationNote = record.hasNonNull("deprecationNote") ? record.get("deprecationNote").asText() : "";
Long decommissionTime = record.hasNonNull("decommissionTime") ? record.get("decommissionTime").asLong() : null;
DICT_DATASET_DAO.setDatasetDeprecation(datasetUrn, deprecated, deprecationNote, decommissionTime, username);
} catch (Exception e) {
Logger.error("Update dataset deprecation fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> getDatasetSchema(String datasetUrn) {
final DatasetSchema schema;
try {
schema = DATASET_VIEW_DAO.getDatasetSchema(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch schema fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
if (schema == null) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
return Promise.promise(() -> ok(Json.newObject().set("schema", Json.toJson(schema))));
}
public static Promise<Result> getDatasetOwners(String datasetUrn) {
final DatasetOwnership ownership;
try {
ownership = OWNER_VIEW_DAO.getDatasetOwners(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch owners fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(Json.toJson(ownership)));
}
public static Promise<Result> getDatasetSuggestedOwners(String datasetUrn) {
final DatasetOwnership ownership;
try {
ownership = OWNER_VIEW_DAO.getDatasetSuggestedOwners(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch owners fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(Json.toJson(ownership)));
}
public static Promise<Result> updateDatasetOwners(String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
final JsonNode content = request().body().asJson();
// content should contain arraynode 'owners': []
if (content == null || !content.has("owners") || !content.get("owners").isArray()) {
return Promise.promise(() -> badRequest(errorResponse("Update dataset owners fail: missing owners field")));
}
try {
final List<DatasetOwner> owners = Json.mapper().readerFor(new TypeReference<List<DatasetOwner>>() {
}).readValue(content.get("owners"));
long confirmedOwnerUserCount = owners.stream()
.filter(s -> "owner".equalsIgnoreCase(s.getType()) && "user".equalsIgnoreCase(s.getIdType())
&& "UI".equalsIgnoreCase(s.getSource()))
.count();
// enforce at least two UI (confirmed) USER DataOwner for a dataset before making any changes
if (confirmedOwnerUserCount < 2) {
return Promise.promise(() -> badRequest(errorResponse("Less than 2 UI USER owners")));
}
OWNER_DAO.updateDatasetOwners(datasetUrn, owners, username);
} catch (Exception e) {
Logger.error("Update Dataset owners fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> getDatasetCompliance(@Nonnull String datasetUrn) {
final DatasetCompliance record;
try {
record = COMPLIANCE_DAO.getDatasetComplianceByUrn(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch compliance fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(Json.newObject().set("complianceInfo", Json.toJson(record))));
}
public static Promise<Result> updateDatasetCompliance(@Nonnull String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
try {
DatasetCompliance record = Json.mapper().convertValue(request().body().asJson(), DatasetCompliance.class);
if (record.getDatasetUrn() == null || !record.getDatasetUrn().equals(datasetUrn)) {
throw new IllegalArgumentException("Dataset Urn not exist or doesn't match.");
}
COMPLIANCE_DAO.updateDatasetComplianceByUrn(record, username);
} catch (Exception e) {
Logger.error("Update Compliance Info fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> getDatasetSuggestedCompliance(@Nonnull String datasetUrn) {
final DsComplianceSuggestion record;
try {
record = COMPLIANCE_DAO.getComplianceSuggestion(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch compliance suggestion fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
if (record == null) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
return Promise.promise(() -> ok(Json.newObject().set("complianceSuggestion", Json.toJson(record))));
}
public static Promise<Result> sendDatasetSuggestedComplianceFeedback(@Nonnull String datasetUrn) {
try {
JsonNode record = request().body().asJson();
String feedback = record.hasNonNull("feedback") ? record.get("feedback").asText().toUpperCase() : null;
String uid = record.hasNonNull("uid") ? record.get("uid").asText() : "";
if (!"ACCEPT".equals(feedback) && !"REJECT".equals(feedback)) {
return Promise.promise(() -> badRequest(_EMPTY_RESPONSE));
}
COMPLIANCE_DAO.sendSuggestedComplianceFeedback(datasetUrn, uid, feedback);
} catch (Exception e) {
Logger.error("Send compliance suggestion feedback fail", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> getDatasetAcls(@Nonnull String datasetUrn) {
final List<AccessControlEntry> acls;
try {
acls = ACL_DAO.getDatasetAcls(datasetUrn);
} catch (Exception e) {
if (e.toString().contains("Response status 404")) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
Logger.error("Fetch ACLs error", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
if (acls == null) {
return Promise.promise(() -> notFound(_EMPTY_RESPONSE));
}
return Promise.promise(() -> ok(Json.toJson(acls)));
}
public static Promise<Result> addUserToDatasetAcl(@Nonnull String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
JsonNode record = request().body().asJson();
String accessType = record.hasNonNull("accessType") ? record.get("accessType").asText() : "r"; // default read
Long expiresAt = record.hasNonNull("expiresAt") ? record.get("expiresAt").asLong()
: System.currentTimeMillis() / 1000 + _DEFAULT_JIT_ACL_PERIOD; // default now + 48h, in seconds
if (!record.hasNonNull("businessJustification")) {
return Promise.promise(() -> badRequest(errorResponse("Missing business justification")));
}
String businessJustification = record.get("businessJustification").asText();
try {
ACL_DAO.addUserToDatasetAcl(datasetUrn, username, accessType, businessJustification, expiresAt);
} catch (Exception e) {
Logger.error("Add user to ACL error", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
public static Promise<Result> removeUserFromDatasetAcl(@Nonnull String datasetUrn) {
final String username = session("user");
if (StringUtils.isBlank(username)) {
return Promise.promise(() -> unauthorized(_EMPTY_RESPONSE));
}
try {
ACL_DAO.removeUserFromDatasetAcl(datasetUrn, username);
} catch (Exception e) {
Logger.error("Remove User from ACL error", e);
return Promise.promise(() -> internalServerError(errorResponse(e)));
}
return Promise.promise(() -> ok(_EMPTY_RESPONSE));
}
private static <E extends Throwable> JsonNode errorResponse(E e) {
return errorResponse(e.toString());
}
private static JsonNode errorResponse(String msg) {
return Json.newObject().put("msg", msg);
}
}
| Fix DatasetOwenr number check (#1094)
| wherehows-frontend/app/controllers/api/v2/Dataset.java | Fix DatasetOwenr number check (#1094) |
|
Java | apache-2.0 | 086d41100994de361803a56747c0bbdf5550efbe | 0 | gotmyjobs/couchbase-lite-android,0359xiaodong/couchbase-lite-android,vladoatanasov/couchbase-lite-android,cesine/couchbase-lite-android,vladoatanasov/couchbase-lite-android,0359xiaodong/couchbase-lite-android,cesine/couchbase-lite-android,msdgwzhy6/couchbase-lite-android,Spotme/couchbase-lite-android,netsense-sas/couchbase-lite-android,netsense-sas/couchbase-lite-android,msdgwzhy6/couchbase-lite-android,Spotme/couchbase-lite-android,netsense-sas/couchbase-lite-android,vladoatanasov/couchbase-lite-android,cesine/couchbase-lite-android,Spotme/couchbase-lite-android,msdgwzhy6/couchbase-lite-android,msdgwzhy6/couchbase-lite-android,netsense-sas/couchbase-lite-android,0359xiaodong/couchbase-lite-android,couchbase/couchbase-lite-android,cesine/couchbase-lite-android,couchbase/couchbase-lite-android,gotmyjobs/couchbase-lite-android,0359xiaodong/couchbase-lite-android,vladoatanasov/couchbase-lite-android,Spotme/couchbase-lite-android | package com.couchbase.cblite.testapp.tests;
import android.test.InstrumentationTestCase;
import com.couchbase.cblite.support.CBLMultipartReader;
import com.couchbase.cblite.support.CBLMultipartReaderDelegate;
import junit.framework.Assert;
import org.apache.http.util.ByteArrayBuffer;
import java.io.ByteArrayInputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MultipartReader extends InstrumentationTestCase {
class TestMultipartReaderDelegate implements CBLMultipartReaderDelegate {
private ByteArrayBuffer currentPartData;
private List<Map<String, String>> headersList;
private List<ByteArrayBuffer> partList;
public void startedPart(Map<String, String> headers) {
Assert.assertNull(currentPartData);
if (partList == null) {
partList = new ArrayList<ByteArrayBuffer>();
}
currentPartData = new ByteArrayBuffer(1024);
partList.add(currentPartData);
if (headersList == null) {
headersList = new ArrayList<Map<String, String>>();
}
headersList.add(headers);
}
public void appendToPart(byte[] data) {
Assert.assertNotNull(currentPartData);
currentPartData.append(data, 0, data.length);
}
public void finishedPart() {
Assert.assertNotNull(currentPartData);
currentPartData = null;
}
}
public void testParseContentType() {
Charset utf8 = Charset.forName("UTF-8");
HashMap<String, byte[]> contentTypes = new HashMap<String, byte[]>();
contentTypes.put("multipart/related; boundary=\"BOUNDARY\"", new String("\r\n--BOUNDARY").getBytes(utf8));
contentTypes.put("multipart/related; boundary=BOUNDARY", new String("\r\n--BOUNDARY").getBytes(utf8));
contentTypes.put("multipart/related;boundary=X", new String("\r\n--X").getBytes(utf8));
for (String contentType : contentTypes.keySet()) {
CBLMultipartReaderDelegate delegate = null;
CBLMultipartReader reader = new CBLMultipartReader("multipart/related; boundary=\"BOUNDARY\"", delegate);
byte[] expectedBoundary = (byte[]) contentTypes.get(contentType);
byte[] boundary = reader.getBoundary();
Assert.assertEquals(boundary, expectedBoundary);
}
try {
CBLMultipartReaderDelegate delegate = null;
CBLMultipartReader reader = new CBLMultipartReader("multipart/related; boundary=\"BOUNDARY", delegate);
Assert.assertTrue("Should not have gotten here, above lines should have thrown exception", false);
} catch (Exception e) {
// expected exception
}
}
public void testReaderOperation() {
Charset utf8 = Charset.forName("UTF-8");
byte[] mime = new String("--BOUNDARY\r\nFoo: Bar\r\n Header : Val ue \r\n\r\npart the first\r\n--BOUNDARY \r\n\r\n2nd part\r\n--BOUNDARY--").getBytes(utf8);
ByteArrayInputStream mimeInputStream = new ByteArrayInputStream(mime);
for (int chunkSize=1; chunkSize <= mime.length; ++chunkSize) {
TestMultipartReaderDelegate delegate = new TestMultipartReaderDelegate();
String contentType = "multipart/related; boundary=\"BOUNDARY\"";
CBLMultipartReader reader = new CBLMultipartReader(contentType, delegate);
Assert.assertFalse(reader.finished());
int location = 0;
int length = 0;
do {
Assert.assertTrue("Parser didn't stop at end", location < mime.length);
length = Math.min(chunkSize, (mime.length - location));
byte[] bytesRead = new byte[length];
mimeInputStream.read(bytesRead, location, length);
reader.appendData(bytesRead);
location += chunkSize;
} while (!reader.finished());
Assert.assertEquals(delegate.partList.size(), 2);
Assert.assertEquals(delegate.headersList.size(), 2);
byte[] part1Expected = new String("part the first").getBytes(utf8);
byte[] part2Expected = new String("2nd part").getBytes(utf8);
ByteArrayBuffer part1 = delegate.partList.get(0);
ByteArrayBuffer part2 = delegate.partList.get(1);
Assert.assertTrue(Arrays.equals(part1.toByteArray(), part1Expected));
Assert.assertTrue(Arrays.equals(part2.toByteArray(), part2Expected));
Map<String, String> headers1 = delegate.headersList.get(0);
Assert.assertTrue(headers1.containsKey("Foo"));
Assert.assertEquals(headers1.get("Foo"), "Bar");
Map<String, String> headers2 = delegate.headersList.get(1);
Assert.assertTrue(headers2.containsKey("Header"));
Assert.assertEquals(headers2.get("Header"), "Val ue");
}
}
}
| java/com/couchbase/cblite/testapp/tests/MultipartReader.java | package com.couchbase.cblite.testapp.tests;
import android.test.InstrumentationTestCase;
import com.couchbase.cblite.support.CBLMultipartReader;
import com.couchbase.cblite.support.CBLMultipartReaderDelegate;
import junit.framework.Assert;
import org.apache.http.util.ByteArrayBuffer;
import java.io.ByteArrayInputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MultipartReader extends InstrumentationTestCase {
class TestMultipartReaderDelegate implements CBLMultipartReaderDelegate {
private ByteArrayBuffer currentPartData;
private List<Map<String, String>> headersList;
private List<ByteArrayBuffer> partList;
public void startedPart(Map<String, String> headers) {
Assert.assertNull(currentPartData);
if (partList == null) {
partList = new ArrayList<ByteArrayBuffer>();
}
partList.add(currentPartData);
if (headersList == null) {
headersList = new ArrayList<Map<String, String>>();
}
headersList.add(headers);
}
public void appendToPart(byte[] data) {
Assert.assertNotNull(currentPartData);
currentPartData.append(data, 0, data.length);
}
public void finishedPart() {
Assert.assertNotNull(currentPartData);
currentPartData = null;
}
}
public void testParseContentType() {
Charset utf8 = Charset.forName("UTF-8");
HashMap<String, byte[]> contentTypes = new HashMap<String, byte[]>();
contentTypes.put("multipart/related; boundary=\"BOUNDARY\"", new String("\r\n--BOUNDARY").getBytes(utf8));
contentTypes.put("multipart/related; boundary=BOUNDARY", new String("\r\n--BOUNDARY").getBytes(utf8));
contentTypes.put("multipart/related;boundary=X", new String("\r\n--X").getBytes(utf8));
for (String contentType : contentTypes.keySet()) {
CBLMultipartReaderDelegate delegate = null;
CBLMultipartReader reader = new CBLMultipartReader("multipart/related; boundary=\"BOUNDARY\"", delegate);
byte[] expectedBoundary = (byte[]) contentTypes.get(contentType);
byte[] boundary = reader.getBoundary();
Assert.assertEquals(boundary, expectedBoundary);
}
try {
CBLMultipartReaderDelegate delegate = null;
CBLMultipartReader reader = new CBLMultipartReader("multipart/related; boundary=\"BOUNDARY", delegate);
Assert.assertTrue("Should not have gotten here, above lines should have thrown exception", false);
} catch (Exception e) {
// expected exception
}
}
public void testReaderOperation() {
Charset utf8 = Charset.forName("UTF-8");
byte[] mime = new String("--BOUNDARY\r\nFoo: Bar\r\n Header : Val ue \r\n\r\npart the first\r\n--BOUNDARY \r\n\r\n2nd part\r\n--BOUNDARY--").getBytes(utf8);
ByteArrayInputStream mimeInputStream = new ByteArrayInputStream(mime);
for (int chunkSize=1; chunkSize <= mime.length; ++chunkSize) {
TestMultipartReaderDelegate delegate = new TestMultipartReaderDelegate();
String contentType = "multipart/related; boundary=\"BOUNDARY\"";
CBLMultipartReader reader = new CBLMultipartReader(contentType, delegate);
Assert.assertFalse(reader.finished());
int location = 0;
int length = 0;
do {
Assert.assertTrue("Parser didn't stop at end", location < mime.length);
length = Math.min(chunkSize, (mime.length - location));
byte[] bytesRead = new byte[length];
mimeInputStream.read(bytesRead, location, length);
reader.appendData(bytesRead);
location += chunkSize;
} while (!reader.finished());
Assert.assertEquals(delegate.partList.size(), 2);
Assert.assertEquals(delegate.headersList.size(), 2);
byte[] part1Expected = new String("part the first").getBytes(utf8);
byte[] part2Expected = new String("2nd part").getBytes(utf8);
ByteArrayBuffer part1 = delegate.partList.get(0);
ByteArrayBuffer part2 = delegate.partList.get(1);
Assert.assertTrue(Arrays.equals(part1.toByteArray(), part1Expected));
Assert.assertTrue(Arrays.equals(part2.toByteArray(), part2Expected));
Map<String, String> headers1 = delegate.headersList.get(0);
Assert.assertTrue(headers1.containsKey("Foo"));
Assert.assertEquals(headers1.get("Foo"), "Bar");
Map<String, String> headers2 = delegate.headersList.get(1);
Assert.assertTrue(headers2.containsKey("Header"));
Assert.assertEquals(headers2.get("Header"), "Val ue");
}
}
}
| implement append(). issue #47
| java/com/couchbase/cblite/testapp/tests/MultipartReader.java | implement append(). issue #47 |
|
Java | apache-2.0 | 85ea177b76688590762b2f68bb61f5277eda6c7c | 0 | samaitra/jena,apache/jena,CesarPantoja/jena,samaitra/jena,atsolakid/jena,tr3vr/jena,apache/jena,samaitra/jena,adrapereira/jena,jianglili007/jena,atsolakid/jena,atsolakid/jena,tr3vr/jena,jianglili007/jena,adrapereira/jena,CesarPantoja/jena,kamir/jena,jianglili007/jena,kamir/jena,jianglili007/jena,kidaa/jena,tr3vr/jena,kamir/jena,jianglili007/jena,apache/jena,CesarPantoja/jena,atsolakid/jena,kidaa/jena,tr3vr/jena,kidaa/jena,samaitra/jena,apache/jena,kamir/jena,atsolakid/jena,CesarPantoja/jena,tr3vr/jena,kidaa/jena,tr3vr/jena,samaitra/jena,adrapereira/jena,adrapereira/jena,kidaa/jena,kidaa/jena,atsolakid/jena,kidaa/jena,kamir/jena,CesarPantoja/jena,tr3vr/jena,adrapereira/jena,kamir/jena,CesarPantoja/jena,apache/jena,apache/jena,jianglili007/jena,adrapereira/jena,jianglili007/jena,CesarPantoja/jena,kamir/jena,samaitra/jena,samaitra/jena,atsolakid/jena,apache/jena,apache/jena,adrapereira/jena | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hp.hpl.jena.rdf.model;
import com.hp.hpl.jena.datatypes.*;
import com.hp.hpl.jena.shared.*;
import java.io.*;
import java.util.*;
/**
An RDF Model.
<p>
An RDF model is a set of Statements. Methods are provided for creating
resources, properties and literals and the Statements which link them,
for adding statements to and removing them from a model, for
querying a model and set operations for combining models.
<p>
Models may create Resources [URI nodes and bnodes]. Creating a Resource does
<i>not</i> make the Resource visible to the model; Resources are only "in" Models
if Statements about them are added to the Model. Similarly the only way to "remove"
a Resource from a Model is to remove all the Statements that mention it.
<p>
When a Resource or Literal is created by a Model, the Model is free to re-use an
existing Resource or Literal object with the correct values, or it may create a fresh
one. [All Jena RDFNodes and Statements are immutable, so this is generally safe.]
<p>
This interface defines a set of primitive methods. A set of
convenience methods which extends this interface, e.g. performing
automatic type conversions and support for enhanced resources,
is defined in {@link ModelCon}.</P>
<h2>System Properties</h2>
<h3>Firewalls and Proxies</h3>
Some of the methods, e.g. the read methods, may have to traverse a
firewall. This can be accomplished using the standard java method
of setting system properties. To use a socks proxy, include on the
java command line:</p>
* <blockquote>
* -DsocksProxyHost=[your-proxy-domain-name-or-ip-address]
* </blockquote>
*
* <p>To use an http proxy, include on the command line:</p>
* <blockquote>
* -DproxySet=true -DproxyHost=[your-proxy] -DproxyPort=[your-proxy-port-number]
* </blockquote>
*
* <p>Alternatively, these properties can be set programatically, e.g.</p>
*
* <code><pre>
* System.getProperties().put("proxySet","true");
* System.getProperties().put("proxyHost","proxy.hostname");
* System.getProperties().put("proxyPort",port_number);
* </pre></code>
*/
public interface Model
extends ModelCon, ModelGraphInterface,
RDFReaderF, RDFWriterF, PrefixMapping, Lock
{
/**
* size will return the number of statements in a concrete model,
* for a virtualized model such as one created by an inference engine,
* it will return an estimated lower bound for the numberof statements
* in the model but it is possible for a subsequent listStatements on
* such a model to discover more statements than size() indicated.
* @return the number of statements in a concrete model or an estimated
* lower bound on the number of statements in an virtualized model
*/
long size() ;
/**
Answer true iff the model contains no explicit statements (ie it's size is zero,
listStatements() would deliver the empty iterator).
@return true iff the model contains no explicit statements.
*/
boolean isEmpty();
/** List all resources which are subjects of statements.
*
* <p>Subsequent operations on those resource may modify this model.</p>
* @return an iterator over a set of resources which are subjects of statements
* in the model. .remove() is not implemented on this iterator.
*
*/
ResIterator listSubjects() ;
/**
(You probably don't want this method; more likely you want the
PrefixMapping methods that Model supports.) List the namespaces used
by predicates and types in the model. This method is really intended
for use by the RDF/XML writer, which needs to know these
namespaces to generate correct and vaguely pretty XML.
<p>
The namespaces returned are those of (a) every URI used as a property in the
model and (b) those of every URI that appears as the object of an rdf:type statement.
<p>
Note that the notion of "namespace" used here is not that of an XML
prefix-namespace, but just of the minimal legal left part of a URI
(see Util.splitNamespace for details). If you want the RDF/XML (or
N3) namespaces, treat the Model as a PrefixMapping.
@see com.hp.hpl.jena.shared.PrefixMapping
@return an iterator over every predicate and type namespace
*/
NsIterator listNameSpaces() ;
/**
Return a Resource instance with the given URI in this model. <i>This method
behaves identically to <code>createResource(String)</code></i> and exists as
legacy: createResource is now capable of, and allowed to, reuse existing objects.
<p>
Subsequent operations on the returned object may modify this model.
@return a resource instance
@param uri the URI of the resource
*/
Resource getResource(String uri) ;
/**
Return a Property instance with the given URI in this model. <i>This method
behaves identically to <code>createProperty(String,String)</code></i> and exists as
legacy: createProperty is now capable of, and allowed to, reuse existing objects.
<p>
Subsequent operations on the returned property may modify this model.
@return a property linked to this model
@param nameSpace the RDF namespace of the property
@param localName the localName of the property in its namespace
*/
Property getProperty(String nameSpace, String localName);
/**
Create a new anonymous resource whose model is this model. This bnode will
have a new AnonId distinct from any allocated by any other call of this method.
<p>
Subsequent operations on the returned resource may modify this model.
@return a new anonymous resource linked to this model.
*/
public Resource createResource() ;
/**
Create a blank node resource with a specified identifier. The resulting bnode
will be equal to any other bnode with the same AnonId (even if they are in
separate models - be warned). The intended use for this method is to allow
bnode round-tripping between Jena models and other representations.
<p>
This method may return an existing bnode with the correct AnonId and model, or it
may construct a fresh one, as it sees fit.
<p>
Operations on the result may modify this model
@param id the identifier to use for this blank node
@return a blank node with that identifier
*/
public Resource createResource( AnonId id );
/**
Create a new resource associated with this model. If the uri string is null, this creates
a bnode, as per <code>createResource()</code>. Otherwise it creates a URI node.
A URI resource is .equals() to any other URI Resource with the same URI (even in
a different model - be warned).
<p>
This method may return an existing Resource with the correct URI and model, or it
may construct a fresh one, as it sees fit.
<p>
Operations on the result Resource may change this model.
@param uri the URI of the resource to be created
@return a new resource linked to this model.
*/
public Resource createResource( String uri ) ;
/**
Create a property with a given URI composed from a namespace part and a
localname part by concatenating the strings.
<p>
This method may return an existing property with the correct URI and model, or it
may construct a fresh one, as it sees fit.
<p>
Subsequent operations on the returned property may modify this model.
@param nameSpace the nameSpace of the property
@param localName the name of the property within its namespace
@return a property instance
*/
public Property createProperty(String nameSpace, String localName);
/**
Create an untyped literal from a String value with a specified language.
@param v the lexical form of the literal
@param language the language associated with the literal
@return a new literal representing the value v with the given language
*/
public Literal createLiteral(String v, String language);
/**
Create a literal from a String value. An existing literal
of the right value may be returned, or a fresh one created.
The use of the wellFormed flag is to create typed literals of
type rdf:XMLLiteral, without error checking. This should
only be use when the lexical form is known to already be
in exclusive canonical XML.
@param v the lexical form of the literal
@param wellFormed true if the Literal is well formed XML, in the lexical space of rdf:XMLLiteral
@return a new literal
*/
public Literal createLiteral(String v, boolean wellFormed);
/**
Build a typed literal from its lexical form. The
lexical form will be parsed now and the value stored. If
the form is not legal this will throw an exception.
<p>
Note that in preview releases of Jena2 it was also possible to specify
a language type. Changes to the RDF specification mean that this is no longer
legal except for plain literals. To create a plain literal with a language tag
use {@link #createLiteral(String, String) createLiteral}.
@param lex the lexical form of the literal
@param dtype the type of the literal, null for old style "plain" literals
@throws DatatypeFormatException if lex is not a legal form of dtype
*/
public Literal createTypedLiteral(String lex, RDFDatatype dtype);
/**
* Build a typed literal from its value form.
* <p>
* Note that in preview releases of Jena2 it was also possible to specify
* a language type. Changes to the RDF specification mean that this is no longer
* legal except for plain literals. To create a plain literal with a language tag
* use {@link #createLiteral(String, String) createLiteral}.
* </p>
* @param value the value of the literal
* @param dtype the type of the literal, null for old style "plain" literals
*/
public Literal createTypedLiteral(Object value, RDFDatatype dtype);
/**
* Build a typed literal label from its value form using
* whatever datatype is currently registered as the the default
* representation for this java class. No language tag is supplied.
* @param value the literal value to encapsulate
*/
@Override
public Literal createTypedLiteral(Object value);
/**
Create a Statement instance. (Creating a statement does not add it to the set of
statements in the model; see Model::add). This method may return an existing
Statement with the correct components and model, or it may construct a fresh one,
as it sees fit.
<p>
Subsequent operations on the statement or any of its parts may modify this model.
@param s the subject of the statement
@param p the predicate of the statement
@param o the object of the statement
@return the new statement
*/
public Statement createStatement( Resource s, Property p, RDFNode o );
/**
Answer a new empty list. This is equivalent to a list consisting only
of <code>rdf:nil</code>.
@return An RDF-encoded list of no elements
*/
public RDFList createList();
/**
* <p>Answer a new list containing the resources from the given iterator, in order.</p>
* @param members An iterator, each value of which is expected to be an RDFNode
* @return An RDF-encoded list of the elements of the iterator
*/
public RDFList createList( Iterator<? extends RDFNode> members );
/**
* <p>Answer a new list containing the nodes from the given array, in order</p>
* @param members An array of RDF nodes that will be the members of the list
* @return An RDF-encoded list
*/
public RDFList createList( RDFNode[] members );
/** Add a statement to this model.
* @return This model.
* @param s The statement to be added.
*/
Model add(Statement s) ;
/**
Add all the statements to the Model, using through the bulk update interface.
@param statements the array of statements to add
@return this model, to allow cascading
*/
Model add( Statement [] statements );
/**
Remove all the statements from the Model, using the bulk update interface.
@param statements the array of statements to be added
@return this model, to allow cascading
*/
Model remove( Statement [] statements );
/**
add all the statements in the List to this Model, going through the bulk
update interface (which means turning them into triples in one form or
another).
@param statements a List of Statements
@return this model, to allow cascading
*/
Model add( List<Statement> statements );
/**
Remove all the statements in the list from this model, using the bulk
update interface.
@param statements a List of Statements to remove
@return this model, to allow cascading
*/
Model remove( List<Statement> statements );
/** Add all the statements returned by an iterator to this model.
* @return this model
* @param iter An iterator which returns the statements to be added.
*/
Model add(StmtIterator iter) ;
/** Add all the statements in another model to this model, including the
* reified statements.
* @return this model
* @param m The model whose statements are to be added.
*/
Model add(Model m) ;
/**
Add all the statements of the given model m to this model.
Optionally supress the addition of reified statements.
@param m the model containing the statements to add
@param suppressReifications true to suppress adding reified statements
@return this model for cascading
@deprecated suppressReifications no longer has any effect.
*/
@Deprecated
Model add( Model m, boolean suppressReifications );
/** Add the RDF statements from a document.
* Uses content negotiation to request appropriate mime types.
* If the content type is not found, it may guess from the URL.
* <p>See {@link Model} for a description of how to traverse a firewall.</p>
* <p>
* See <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about determining the syntax.
* </p>
*
* @return this model
* @param url of the document containing the RDF statements.
*/
public Model read(String url) ;
/** Add statements from a document.
* This method assumes the concrete syntax is RDF/XML.
* See {@link #read(InputStream, String, String)} for explicitly setting the language.
* <p>
* See <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about concrete syntaxes.
* </p>
*
* @param in the input stream
@param base the base uri to be used when converting relative
URI's to absolute URI's. (Resolving relative URIs and fragment IDs is done
by prepending the base URI to the relative URI/fragment.) If there are no
relative URIs in the source, this argument may safely be <code>null</code>.
If the base is the empty string, then relative URIs <i>will be retained in
the model</i>. This is typically unwise and will usually generate errors
when writing the model back out.
* @return the current model
*/
public Model read(InputStream in, String base) ;
/** Add RDF statements represented in language <code>lang</code> to the model.
* <br />Predefined values for <code>lang</code> are "RDF/XML", "N-TRIPLE",
* "TURTLE" (or "TTL") and "N3".
* <code>null</code> represents the default language, "RDF/XML".
* "RDF/XML-ABBREV" is a synonym for "RDF/XML".
* <br />
*
* @return this model
@param base the base uri to be used when converting relative
URI's to absolute URI's. (Resolving relative URIs and fragment IDs is done
by prepending the base URI to the relative URI/fragment.) If there are no
relative URIs in the source, this argument may safely be <code>null</code>.
If the base is the empty string, then relative URIs <i>will be retained in
the model</i>. This is typically unwise and will usually generate errors
when writing the model back out.
* <p>
* See <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about concrete syntaxes.
* </p>
* @param lang the langauge of the serialization <code>null</code>
* selects the default
* @param in the source of the input serialization
*/
public Model read(InputStream in, String base, String lang);
/** Using this method is often a mistake.
* Add statements from an RDF/XML serialization.
* It is generally better to use an InputStream if possible,
* otherwise there is a danger of a
* mismatch between the character encoding of say the FileReader and the
* character encoding of the data in the file.
*
* It is better to explicitly set the serialization format.
* See {@link #read(InputStream, String, String)} for explicitily setting the serialization language.
*
* <p>
* See <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about concrete syntaxes.
* </p>
* @param reader
* @param base the base uri to be used when converting relative URI's to absolute URI's and to guess the RDF serialization syntax.
* @return the current model
*/
public Model read(Reader reader, String base) ;
/**
* Add statements from a serializion in language <code>lang</code> to the
* model.
* <br />Predefined values for <code>lang</code> are "RDF/XML", "N-TRIPLE",
* "TURTLE" (or "TTL") and "N3".
* <code>null</code> represents the default language, "RDF/XML".
* "RDF/XML-ABBREV" is a synonym for "RDF/XML".
* <br />
*
* <p>See {@link Model} for a description of how to traverse a firewall.</p>
* <p>
* See <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about concrete syntaxes.
* </p>
* @param url a string representation of the url to read from
* @param lang the language of the serialization
* @return this model
*/
public Model read(String url, String lang) ;
/** Using this method is often a mistake.
* Add RDF statements represented in language <code>lang</code> to the model.
* <br />Predefined values for <code>lang</code> are "RDF/XML", "N-TRIPLE",
* "TURTLE" (or "TTL") and "N3".
* <code>null</code> represents the default language, "RDF/XML".
*"RDF/XML-ABBREV" is a synonym for "RDF/XML".
* <br />
* It is generally better to use an InputStream if possible.
* {@link Model#read(InputStream,String)}, otherwise there is a danger of a
* mismatch between the character encoding of say the FileReader and the
* character encoding of the data in the file.
* @return this model
@param base the base uri to be used when converting relative
URI's to absolute URI's. (Resolving relative URIs and fragment IDs is done
by prepending the base URI to the relative URI/fragment.) If there are no
relative URIs in the source, this argument may safely be <code>null</code>.
If the base is the empty string, then relative URIs <i>will be retained in
the model</i>. This is typically unwise and will usually generate errors
when writing the model back out.
* @param lang the langauge of the serialization <code>null</code>
* selects the default
* @param reader the source of the input serialization
*/
public Model read(Reader reader, String base, String lang);
/**
Read into this model the RDF at <code>url</code>, using
<code>baseURI</code> as the base URI if it is non-null. The RDF is assumed
to be RDF/XML unless <code>lang</code> is non-null, in which case it names
the language to be used. Answer this model.
*/
Model read( String url, String base, String lang );
// output operations
/**
* <p>Write the model as an XML document.
* It is often better to use an OutputStream rather than a Writer, since this
* will avoid character encoding errors.
* </p>
*
* @param writer A writer to which the XML will be written
* @return this model
*/
public Model write( Writer writer ) ;
/**
* <p>Write a serialized represention of a model in a specified language.
* It is often better to use an OutputStream rather than a Writer, since this
* will avoid character encoding errors.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE", "TURTLE", (and "TTL") and "N3". The default value,
* represented by <code>null</code> is "RDF/XML".</p>
* @param writer The output writer
* @param lang The output language
* @return this model
*/
public Model write( Writer writer, String lang ) ;
/**
* <p>Write a serialized represention of a model in a specified language.
* It is often better to use an OutputStream rather than a Writer,
* since this will avoid character encoding errors.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE", "TURTLE", (and "TTL") and "N3". The default value,
* represented by <code>null</code>, is "RDF/XML".</p>
* @param writer The output writer
* @param base The base uri for relative URI calculations.
* <code>null</code> means use only absolute URI's.
* @param lang The language in which the RDF should be written
* @return this model
*/
public Model write( Writer writer, String lang, String base );
/**
* <p>Write a serialization of this model as an XML document.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE" and "N3". The default value is
* represented by <code>null</code> is "RDF/XML".</p>
* @param out The output stream to which the XML will be written
* @return This model
*/
public Model write(OutputStream out) ;
/**
* <p>Write a serialized represention of this model in a specified language.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE", "TURTLE", (and "TTL") and "N3". The default value,
* represented by <code>null</code>, is "RDF/XML".</p>
* @param out The output stream to which the RDF is written
* @param lang The output langauge
* @return This model
*/
public Model write( OutputStream out, String lang ) ;
/**
* <p>Write a serialized represention of a model in a specified language.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE", "TURTLE", (and "TTL") and "N3". The default value,
* represented by <code>null</code>, is "RDF/XML".</p>
* @param out The output stream to which the RDF is written
* @param base The base uri to use when writing relative URI's. <code>null</code>
* means use only absolute URI's. This is used for relative
* URIs that would be resolved against the document retrieval URL.
* For some values of <code>lang</code>, this value may be included in the output.
* @param lang The language in which the RDF should be written
* @return This model
*/
public Model write( OutputStream out, String lang, String base );
/** Removes a statement.
*
* <p> The statement with the same subject, predicate and object as
* that supplied will be removed from the model.</p>
* @return this model
* @param s The statement to be removed.
*/
Model remove(Statement s) ;
/** Return a statement with given subject and property.
* <p>If more than one statement witht the given subject and property
* exists in the model, it is undefined which will be returned. If none
* exist, an exception is thrown.
* @return A statement from the model with the given subject and property.
* @param s The subject of the statement to be returned.
* @param p The property of the statement to be returned.
* @throws PropertyNotFoundException
*/
Statement getRequiredProperty(Resource s, Property p) ;
/**
Answer a statement (s, p, ?O) from this model. If none exist, return null;
if several exist, pick one arbitrarily.
@param s the subject of the statement to return
@param p the predicate of the statement to return
@return some statement (s, p, ?O) or null if none can be found
*/
Statement getProperty( Resource s, Property p );
/**
An alias for <code>istResourcesWithProperty(Property)</code>,
retained for backward compatability. It may be deprecated in later
releases.
*/
ResIterator listSubjectsWithProperty( Property p );
/**
Answer an iterator [with no duplicates] over all the resources in this
model that have property <code>p</code>. <code>remove()</code>
is not implemented on this iterator.
*/
ResIterator listResourcesWithProperty( Property p );
/**
An alias for <code>listResourcesWithProperty</code>, retained for
backward compatability. It may be deprecated in later releases.
*/
ResIterator listSubjectsWithProperty( Property p, RDFNode o );
/**
Answer an iterator [with no duplicates] over all the resources in this
model that have property <code>p</code> with value <code>o</code>.
<code>remove()</code> is not implemented on this iterator.
*/
ResIterator listResourcesWithProperty( Property p, RDFNode o );
/** List all objects in a model.
* @return an iterator over the objects. .remove() is not implemented on this iterator.
*/
NodeIterator listObjects() ;
/** List all objects of a given property. .remove() is not implemented on this iterator.
* @return an iterator over the objects
* @param p The predicate sought
*/
NodeIterator listObjectsOfProperty(Property p) ;
/** List the values of a property of a resource.
* @return an iterator over the objects. .remove() is not implemented on this iterator.
* @param p The predicate sought
*/
NodeIterator listObjectsOfProperty(Resource s, Property p);
/** Determine whether this model contains any statements with a given subject
* and property.
* @return true if there exists within this model a statement with
* subject s and property p, false otherwise
* @param s The subject sought (null for any).
* @param p The predicate sought (null for any).
*/
boolean contains(Resource s, Property p) ;
/**
determine if the RDFNode r appears in any statement of this model.
(containsRDFNode is a horrible name, and in any case, even literals
will be resources one day)
@param r the RDFNode to be searched for
@return true iff r appears as some subject, predicate, or object
*/
boolean containsResource( RDFNode r );
/** Determine if an (S, P, O) pattern is present in this model, with null allowed
* to represent a wildcard match.
* @return true if the statement with subject s, property p and object o
* is in the model, false otherwise
* @param s The subject of the statment tested (null as wildcard).
* @param p The predicate of the statement tested (null as wildcard).
* @param o The object of the statement tested (null as wildcard).
*/
boolean contains(Resource s, Property p, RDFNode o) ;
/** Determine if a statement is present in this model.
* @param s The statement tested.
* @return true if the statement s is in this model, false otherwise
*/
boolean contains(Statement s) ;
/** Determine if any of the statements returned by an iterator are
* contained in this model.
* @param iter an iterator of the statements to be tested
* @return true if any of the statements returns by iter are contained
* in this model and false otherwise.
*/
boolean containsAny(StmtIterator iter) ;
/** Determine if all of the statements returned by an iterator are
* contained in this model.
* @param iter an iterator of the statements to be tested
* @return true if any of the statements returns by iter are contained
* in this model and false otherwise.
*/
boolean containsAll(StmtIterator iter) ;
/** Determine if any of the statements in a model are also contained
* in this model.
* @param model the model containing the statements to be tested
* @return true if any of the statements in model are also contained
* in this model and false otherwise.
*/
boolean containsAny(Model model) ;
/** Determine if all of the statements in a model are also contained
* in this model.
* @param model the model containing the statements to be tested
* @return true if all of the statements in model are also contained
* in this model and false otherwise.
*/
boolean containsAll(Model model) ;
/**
Determine if this Statement has been reified in this Model.
@param s The statement tested.
@return true iff a ReifiedStatement(s) has been created in this model
*/
boolean isReified( Statement s );
/**
Find or create a {@link ReifiedStatement} corresponding to a Statement.
@param s Statement which may or may not already be reified
@return a Resource [ReifiedStatement] that reifies the specified Statement.
*/
Resource getAnyReifiedStatement( Statement s );
/**
Remove all reifications (ie implicit reification quads) of _s_.
*/
void removeAllReifications( Statement s );
/**
Remove a particular reificiation.
*/
void removeReification( ReifiedStatement rs );
/** List all statements.
*
* <p>Subsequent operations on those statements may modify this model.</p>
* @return an iterator over all statements in the model.
*/
StmtIterator listStatements() ;
/** List the statements matching a selector.
*
* <p>A statment is considered to match if the <CODE>test</CODE> method
* of s returns true when called on s.</p>
* @return an iterator over the matching statements
* @param s A selector object.
.
*/
StmtIterator listStatements(Selector s) ;
/** Find all the statements matching a pattern.
* <p>Return an iterator over all the statements in a model
* that match a pattern. The statements selected are those
* whose subject matches the <code>subject</code> argument,
* whose predicate matches the <code>predicate</code> argument
* and whose object matches the <code>object</code> argument.
* If an argument is <code>null</code> it matches anything.</p>
* @return an iterator over the subjects
* @param s The subject sought
* @param p The predicate sought
* @param o The value sought
*/
StmtIterator listStatements( Resource s, Property p, RDFNode o );
/**
Answer a ReifiedStatement that encodes _s_ and belongs to this Model.
<br>
result.getModel() == this
<br>
result.getStatement() .equals ( s )
*/
ReifiedStatement createReifiedStatement( Statement s );
/**
answer a ReifiedStatement that encodes _s_, belongs to this Model,
and is a Resource with that _uri_.
*/
ReifiedStatement createReifiedStatement( String uri, Statement s );
/**
answer an iterator delivering all the reified statements "in" this model
*/
RSIterator listReifiedStatements();
/**
answer an iterator delivering all the reified statements "in" this model
that match the statement _st_.
*/
RSIterator listReifiedStatements( Statement st );
/**
Answer the reification style of the model.
@return the reification style
createMemModelMaker()
*/
@Deprecated
ReificationStyle getReificationStyle();
/** Create a new model containing the statements matching a query.
*
* <p>A statment is considered to match if the <CODE>test</CODE> method
* of s returns true when called on s.</p>
* @return an iterator over the matching statements
* @param s A selector object.
.
*/
Model query(Selector s) ;
/**
Create a new, independant, model containing all the statements in this model
together with all of those in another given model. By <i>independant</i>
we mean that changes to the result model do not affect the operand
models, and <i>vice versa</i>.
<p>
The new model need not be of the same type as either this model or
the argument model: typically it will be a memory-based model, even
if this model is a database model.
@return A new model containing all the statements that are in either model
@param model The other model whose statements are to be included.
*/
Model union(Model model) ;
/**
Create a new, independant, model containing all the statements which are in both
this model and another. As models are sets of statements, a statement
contained in both models will only appear once in the resulting model.
The new model need not be of the same type as either this model or
the argument model: typically it will be a memory-based model.
@return A new model containing all the statements that are in both models.
@param model The other model.
*/
Model intersection(Model model) ;
/** Create a new, independant, model containing all the statements in this model which
* are not in another.
The new model need not be of the same type as either this model or
the argument model: typically it will be a memory-based model.
* @return a new model containing all the statements in this model that
* are not in the given model.
* @param model the other model whose statements are to be excluded.
*/
Model difference(Model model) ;
/**
* Test whether the given object <code>m</code>
* is a model that is equal to this model,
* which is true iff the underlying graphs are identical Java
* objects. This is not the same test as comparing whether two models
* have the same structure (i.e. contain the same set of statements).
* To test for strucutural equivalence, see {@link #isIsomorphicWith}.
* @param m the model to be compared
* @return true if <code>m</code> shares a graph object with this model
* @see #isIsomorphicWith(Model)
*/
@Override
public boolean equals(Object m);
/** Begin a new transation.
*
* <p> All changes made to a model within a transaction, will either
* be made, or none of them will be made.</p>
* @return this model to enable cascading.
*/
Model begin() ;
/** Abort the current transaction and abandon any changes in progress.
* @return this model to enable cascading.
*/
Model abort() ;
/** Commit the current transaction.
* @return this model to enable cascading.
*/
Model commit() ;
/**
Execute the command <code>cmd</code> inside a transaction. If it
completes, commit the transaction and return the result; if it fails
(by throwing an exception), abort the transaction and throw an
exception.
*/
Object executeInTransaction( Command cmd );
/** Determine whether this model is independent.
*
* <p>For efficiency reasons, some implementations may create models which
* which are dependent on others, i.e. a change in one model may cause
* a change in another. If this is the case this method will return false,
* otherwise it will return true.</p>
*
* @return true if this model is indepdent of others
*/
boolean independent();
/** Determine whether this model supports transactions.
* @return true if this model supports transactions.
*/
boolean supportsTransactions();
/** Determine whether this model supports set operations.
* @return true if this model supports set operations.
*/
boolean supportsSetOperations();
/**
* Compare this Model with another for equality ignoring the labels on
* bNodes.
* See
* <a href="http://www.w3.org/TR/rdf-concepts#section-Graph-syntax">RDF
* Concepts</a>.
* <p>Two models are isomorphic when each statement in one can be matched
* with a statement in the other. Statements which are identical match.</p>
*
* <p>Special treatment is given to anonymous nodes. A binding is a one to
* one mapping which maps each anonymous node in <code>this</code> model to
* an anonymous node in <code>model</code>. Two statements s1 and s2 match
* under a binding if if s1.subject is anonymous and s2.subject is anonymous
* and the binding maps s1.subject to s2.subject.</p>
*
* <p>Two models are isomorphic if there is a binding that allows all the
* statements in one model to match a a statement in the other.</p>
* @param g Compare against this.
* @return boolean True if the two RDF graphs are isomorphic.
*/
boolean isIsomorphicWith(Model g);
/** Close the Model and free up resources held.
*
* <p>Not all implementations of Model require this method to be called. But
* some do, so in general its best to call it when done with the object,
* rather than leave it to the finalizer.</p>
*/
public void close();
// /** Get the model lock for this model.
// * See also the convenience operations enterCriticalSection and leaveCriticalSection.
// *
// * @see ModelLock
// * @return The ModelLock object associated with this model
// * @deprecated Applications should use {@link #getLock()}
// */
// public ModelLock getModelLock() ;
/** Get the model lock for this model.
* See also the convenience operations enterCriticalSection and leaveCriticalSection.
*
* @see Lock
* @return The ModelLock object associated with this model
*/
public Lock getLock() ;
/**
Register a listener for model-changed events on this model. The methods on
the listener will be called when API add/remove calls on the model succeed
[in whole or in part].
<p>
The same listener may be registered many times; if so, it's methods will
be called as many times as it's registered for each event.
@see ModelChangedListener
@return this model, for cascading
*/
public Model register( ModelChangedListener listener );
/**
Unregister a listener from model-changed events on this model. The
listener is dtached from the model. The model is returned to permit
cascading. If the listener is not attached to the model, then nothing happens.
@see ModelChangedListener
@return this model, for cascading
*/
public Model unregister( ModelChangedListener listener );
/**
Notify any listeners that the event e has occurred.
@param e the event that has occurred
*/
public Model notifyEvent( Object e );
/**
Remove all the statements from this model.
*/
public Model removeAll();
/**
Remove all the statements matching (s, p, o) from this model.
*/
public Model removeAll( Resource s, Property p, RDFNode r );
/**
Answer true iff .close() has been called on this Model.
*/
public boolean isClosed();
}
| jena-core/src/main/java/com/hp/hpl/jena/rdf/model/Model.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hp.hpl.jena.rdf.model;
import com.hp.hpl.jena.datatypes.*;
import com.hp.hpl.jena.shared.*;
import java.io.*;
import java.util.*;
/**
An RDF Model.
<p>
An RDF model is a set of Statements. Methods are provided for creating
resources, properties and literals and the Statements which link them,
for adding statements to and removing them from a model, for
querying a model and set operations for combining models.
<p>
Models may create Resources [URI nodes and bnodes]. Creating a Resource does
<i>not</i> make the Resource visible to the model; Resources are only "in" Models
if Statements about them are added to the Model. Similarly the only way to "remove"
a Resource from a Model is to remove all the Statements that mention it.
<p>
When a Resource or Literal is created by a Model, the Model is free to re-use an
existing Resource or Literal object with the correct values, or it may create a fresh
one. [All Jena RDFNodes and Statements are immutable, so this is generally safe.]
<p>
This interface defines a set of primitive methods. A set of
convenience methods which extends this interface, e.g. performing
automatic type conversions and support for enhanced resources,
is defined in {@link ModelCon}.</P>
<h2>System Properties</h2>
<h3>Firewalls and Proxies</h3>
Some of the methods, e.g. the read methods, may have to traverse a
firewall. This can be accomplished using the standard java method
of setting system properties. To use a socks proxy, include on the
java command line:</p>
* <blockquote>
* -DsocksProxyHost=[your-proxy-domain-name-or-ip-address]
* </blockquote>
*
* <p>To use an http proxy, include on the command line:</p>
* <blockquote>
* -DproxySet=true -DproxyHost=[your-proxy] -DproxyPort=[your-proxy-port-number]
* </blockquote>
*
* <p>Alternatively, these properties can be set programatically, e.g.</p>
*
* <code><pre>
* System.getProperties().put("proxySet","true");
* System.getProperties().put("proxyHost","proxy.hostname");
* System.getProperties().put("proxyPort",port_number);
* </pre></code>
*/
public interface Model
extends ModelCon, ModelGraphInterface,
RDFReaderF, RDFWriterF, PrefixMapping, Lock
{
/**
* size will return the number of statements in a concrete model,
* for a virtualized model such as one created by an inference engine,
* it will return an estimated lower bound for the numberof statements
* in the model but it is possible for a subsequent listStatements on
* such a model to discover more statements than size() indicated.
* @return the number of statements in a concrete model or an estimated
* lower bound on the number of statements in an virtualized model
*/
long size() ;
/**
Answer true iff the model contains no explicit statements (ie it's size is zero,
listStatements() would deliver the empty iterator).
@return true iff the model contains no explicit statements.
*/
boolean isEmpty();
/** List all resources which are subjects of statements.
*
* <p>Subsequent operations on those resource may modify this model.</p>
* @return an iterator over a set of resources which are subjects of statements
* in the model. .remove() is not implemented on this iterator.
*
*/
ResIterator listSubjects() ;
/**
(You probably don't want this method; more likely you want the
PrefixMapping methods that Model supports.) List the namespaces used
by predicates and types in the model. This method is really intended
for use by the RDF/XML writer, which needs to know these
namespaces to generate correct and vaguely pretty XML.
<p>
The namespaces returned are those of (a) every URI used as a property in the
model and (b) those of every URI that appears as the object of an rdf:type statement.
<p>
Note that the notion of "namespace" used here is not that of an XML
prefix-namespace, but just of the minimal legal left part of a URI
(see Util.splitNamespace for details). If you want the RDF/XML (or
N3) namespaces, treat the Model as a PrefixMapping.
@see com.hp.hpl.jena.shared.PrefixMapping
@return an iterator over every predicate and type namespace
*/
NsIterator listNameSpaces() ;
/**
Return a Resource instance with the given URI in this model. <i>This method
behaves identically to <code>createResource(String)</code></i> and exists as
legacy: createResource is now capable of, and allowed to, reuse existing objects.
<p>
Subsequent operations on the returned object may modify this model.
@return a resource instance
@param uri the URI of the resource
*/
Resource getResource(String uri) ;
/**
Return a Property instance with the given URI in this model. <i>This method
behaves identically to <code>createProperty(String,String)</code></i> and exists as
legacy: createProperty is now capable of, and allowed to, reuse existing objects.
<p>
Subsequent operations on the returned property may modify this model.
@return a property linked to this model
@param nameSpace the RDF namespace of the property
@param localName the localName of the property in its namespace
*/
Property getProperty(String nameSpace, String localName);
/**
Create a new anonymous resource whose model is this model. This bnode will
have a new AnonId distinct from any allocated by any other call of this method.
<p>
Subsequent operations on the returned resource may modify this model.
@return a new anonymous resource linked to this model.
*/
public Resource createResource() ;
/**
Create a blank node resource with a specified identifier. The resulting bnode
will be equal to any other bnode with the same AnonId (even if they are in
separate models - be warned). The intended use for this method is to allow
bnode round-tripping between Jena models and other representations.
<p>
This method may return an existing bnode with the correct AnonId and model, or it
may construct a fresh one, as it sees fit.
<p>
Operations on the result may modify this model
@param id the identifier to use for this blank node
@return a blank node with that identifier
*/
public Resource createResource( AnonId id );
/**
Create a new resource associated with this model. If the uri string is null, this creates
a bnode, as per <code>createResource()</code>. Otherwise it creates a URI node.
A URI resource is .equals() to any other URI Resource with the same URI (even in
a different model - be warned).
<p>
This method may return an existing Resource with the correct URI and model, or it
may construct a fresh one, as it sees fit.
<p>
Operations on the result Resource may change this model.
@param uri the URI of the resource to be created
@return a new resource linked to this model.
*/
public Resource createResource( String uri ) ;
/**
Create a property with a given URI composed from a namespace part and a
localname part by concatenating the strings.
<p>
This method may return an existing property with the correct URI and model, or it
may construct a fresh one, as it sees fit.
<p>
Subsequent operations on the returned property may modify this model.
@param nameSpace the nameSpace of the property
@param localName the name of the property within its namespace
@return a property instance
*/
public Property createProperty(String nameSpace, String localName);
/**
Create an untyped literal from a String value with a specified language.
@param v the lexical form of the literal
@param language the language associated with the literal
@return a new literal representing the value v with the given language
*/
public Literal createLiteral(String v, String language);
/**
Create a literal from a String value. An existing literal
of the right value may be returned, or a fresh one created.
The use of the wellFormed flag is to create typed literals of
type rdf:XMLLiteral, without error checking. This should
only be use when the lexical form is known to already be
in exclusive canonical XML.
@param v the lexical form of the literal
@param wellFormed true if the Literal is well formed XML, in the lexical space of rdf:XMLLiteral
@return a new literal
*/
public Literal createLiteral(String v, boolean wellFormed);
/**
Build a typed literal from its lexical form. The
lexical form will be parsed now and the value stored. If
the form is not legal this will throw an exception.
<p>
Note that in preview releases of Jena2 it was also possible to specify
a language type. Changes to the RDF specification mean that this is no longer
legal except for plain literals. To create a plain literal with a language tag
use {@link #createLiteral(String, String) createLiteral}.
@param lex the lexical form of the literal
@param dtype the type of the literal, null for old style "plain" literals
@throws DatatypeFormatException if lex is not a legal form of dtype
*/
public Literal createTypedLiteral(String lex, RDFDatatype dtype);
/**
* Build a typed literal from its value form.
* <p>
* Note that in preview releases of Jena2 it was also possible to specify
* a language type. Changes to the RDF specification mean that this is no longer
* legal except for plain literals. To create a plain literal with a language tag
* use {@link #createLiteral(String, String) createLiteral}.
* </p>
* @param value the value of the literal
* @param dtype the type of the literal, null for old style "plain" literals
*/
public Literal createTypedLiteral(Object value, RDFDatatype dtype);
/**
* Build a typed literal label from its value form using
* whatever datatype is currently registered as the the default
* representation for this java class. No language tag is supplied.
* @param value the literal value to encapsulate
*/
@Override
public Literal createTypedLiteral(Object value);
/**
Create a Statement instance. (Creating a statement does not add it to the set of
statements in the model; see Model::add). This method may return an existing
Statement with the correct components and model, or it may construct a fresh one,
as it sees fit.
<p>
Subsequent operations on the statement or any of its parts may modify this model.
@param s the subject of the statement
@param p the predicate of the statement
@param o the object of the statement
@return the new statement
*/
public Statement createStatement( Resource s, Property p, RDFNode o );
/**
Answer a new empty list. This is equivalent to a list consisting only
of <code>rdf:nil</code>.
@return An RDF-encoded list of no elements
*/
public RDFList createList();
/**
* <p>Answer a new list containing the resources from the given iterator, in order.</p>
* @param members An iterator, each value of which is expected to be an RDFNode
* @return An RDF-encoded list of the elements of the iterator
*/
public RDFList createList( Iterator<? extends RDFNode> members );
/**
* <p>Answer a new list containing the nodes from the given array, in order</p>
* @param members An array of RDF nodes that will be the members of the list
* @return An RDF-encoded list
*/
public RDFList createList( RDFNode[] members );
/** Add a statement to this model.
* @return This model.
* @param s The statement to be added.
*/
Model add(Statement s) ;
/**
Add all the statements to the Model, using through the bulk update interface.
@param statements the array of statements to add
@return this model, to allow cascading
*/
Model add( Statement [] statements );
/**
Remove all the statements from the Model, using the bulk update interface.
@param statements the array of statements to be added
@return this model, to allow cascading
*/
Model remove( Statement [] statements );
/**
add all the statements in the List to this Model, going through the bulk
update interface (which means turning them into triples in one form or
another).
@param statements a List of Statements
@return this model, to allow cascading
*/
Model add( List<Statement> statements );
/**
Remove all the statements in the list from this model, using the bulk
update interface.
@param statements a List of Statements to remove
@return this model, to allow cascading
*/
Model remove( List<Statement> statements );
/** Add all the statements returned by an iterator to this model.
* @return this model
* @param iter An iterator which returns the statements to be added.
*/
Model add(StmtIterator iter) ;
/** Add all the statements in another model to this model, including the
* reified statements.
* @return this model
* @param m The model whose statements are to be added.
*/
Model add(Model m) ;
/**
Add all the statements of the given model m to this model.
Optionally supress the addition of reified statements.
@param m the model containing the statements to add
@param suppressReifications true to suppress adding reified statements
@return this model for cascading
@deprecated suppressReifications no longer has any effect.
*/
@Deprecated
Model add( Model m, boolean suppressReifications );
/** Add the RDF statements from a document.
* Uses content negotiation to request appropriate mime types.
* If the content type is not found, it may guess from the URL.
* <p>See {@link Model} for a description of how to traverse a firewall.</p>
*
* @return this model
* @param url of the document containing the RDF statements.
*
* @see <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about determining the syntax.
*/
public Model read(String url) ;
/** Add statements from a document.
* This method assumes the concrete syntax is RDF/XML.
* See {@link #read(InputStream, String, String)} for explicitly setting the language.
*
* @param in the input stream
@param base the base uri to be used when converting relative
URI's to absolute URI's. (Resolving relative URIs and fragment IDs is done
by prepending the base URI to the relative URI/fragment.) If there are no
relative URIs in the source, this argument may safely be <code>null</code>.
If the base is the empty string, then relative URIs <i>will be retained in
the model</i>. This is typically unwise and will usually generate errors
when writing the model back out.
* @return the current model
* @see <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about concrete syntaxes.
*/
public Model read(InputStream in, String base) ;
/** Add RDF statements represented in language <code>lang</code> to the model.
* <br />Predefined values for <code>lang</code> are "RDF/XML", "N-TRIPLE",
* "TURTLE" (or "TTL") and "N3".
* <code>null</code> represents the default language, "RDF/XML".
* "RDF/XML-ABBREV" is a synonym for "RDF/XML".
* <br />
*
* @return this model
@param base the base uri to be used when converting relative
URI's to absolute URI's. (Resolving relative URIs and fragment IDs is done
by prepending the base URI to the relative URI/fragment.) If there are no
relative URIs in the source, this argument may safely be <code>null</code>.
If the base is the empty string, then relative URIs <i>will be retained in
the model</i>. This is typically unwise and will usually generate errors
when writing the model back out.
* @param lang the langauge of the serialization <code>null</code>
* selects the default
* @param in the source of the input serialization
* @see <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about concrete syntaxes.
*/
public Model read(InputStream in, String base, String lang);
/** Using this method is often a mistake.
* Add statements from an RDF/XML serialization.
* It is generally better to use an InputStream if possible,
* otherwise there is a danger of a
* mismatch between the character encoding of say the FileReader and the
* character encoding of the data in the file.
*
* It is better to explicitly set the serialization format.
* See {@link #read(InputStream, String, String)} for explicitily setting the serialization language.
*
* @param reader
* @param base the base uri to be used when converting relative URI's to absolute URI's and to guess the RDF serialization syntax.
* @return the current model
* @see <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about concrete syntaxes.
*/
public Model read(Reader reader, String base) ;
/**
* Add statements from a serializion in language <code>lang</code> to the
* model.
* <br />Predefined values for <code>lang</code> are "RDF/XML", "N-TRIPLE",
* "TURTLE" (or "TTL") and "N3".
* <code>null</code> represents the default language, "RDF/XML".
* "RDF/XML-ABBREV" is a synonym for "RDF/XML".
* <br />
*
* <p>See {@link Model} for a description of how to traverse a firewall.</p>
* @param url a string representation of the url to read from
* @param lang the language of the serialization
* @return this model
* @see <a href="http://jena.apache.org/documentation/io/index.html">"Reading and Writing RDF in Apache Jena"</a>
* for more information about concrete syntaxes.
*/
public Model read(String url, String lang) ;
/** Using this method is often a mistake.
* Add RDF statements represented in language <code>lang</code> to the model.
* <br />Predefined values for <code>lang</code> are "RDF/XML", "N-TRIPLE",
* "TURTLE" (or "TTL") and "N3".
* <code>null</code> represents the default language, "RDF/XML".
*"RDF/XML-ABBREV" is a synonym for "RDF/XML".
* <br />
* It is generally better to use an InputStream if possible.
* {@link Model#read(InputStream,String)}, otherwise there is a danger of a
* mismatch between the character encoding of say the FileReader and the
* character encoding of the data in the file.
* @return this model
@param base the base uri to be used when converting relative
URI's to absolute URI's. (Resolving relative URIs and fragment IDs is done
by prepending the base URI to the relative URI/fragment.) If there are no
relative URIs in the source, this argument may safely be <code>null</code>.
If the base is the empty string, then relative URIs <i>will be retained in
the model</i>. This is typically unwise and will usually generate errors
when writing the model back out.
* @param lang the langauge of the serialization <code>null</code>
* selects the default
* @param reader the source of the input serialization
*/
public Model read(Reader reader, String base, String lang);
/**
Read into this model the RDF at <code>url</code>, using
<code>baseURI</code> as the base URI if it is non-null. The RDF is assumed
to be RDF/XML unless <code>lang</code> is non-null, in which case it names
the language to be used. Answer this model.
*/
Model read( String url, String base, String lang );
// output operations
/**
* <p>Write the model as an XML document.
* It is often better to use an OutputStream rather than a Writer, since this
* will avoid character encoding errors.
* </p>
*
* @param writer A writer to which the XML will be written
* @return this model
*/
public Model write( Writer writer ) ;
/**
* <p>Write a serialized represention of a model in a specified language.
* It is often better to use an OutputStream rather than a Writer, since this
* will avoid character encoding errors.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE", "TURTLE", (and "TTL") and "N3". The default value,
* represented by <code>null</code> is "RDF/XML".</p>
* @param writer The output writer
* @param lang The output language
* @return this model
*/
public Model write( Writer writer, String lang ) ;
/**
* <p>Write a serialized represention of a model in a specified language.
* It is often better to use an OutputStream rather than a Writer,
* since this will avoid character encoding errors.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE", "TURTLE", (and "TTL") and "N3". The default value,
* represented by <code>null</code>, is "RDF/XML".</p>
* @param writer The output writer
* @param base The base uri for relative URI calculations.
* <code>null</code> means use only absolute URI's.
* @param lang The language in which the RDF should be written
* @return this model
*/
public Model write( Writer writer, String lang, String base );
/**
* <p>Write a serialization of this model as an XML document.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE" and "N3". The default value is
* represented by <code>null</code> is "RDF/XML".</p>
* @param out The output stream to which the XML will be written
* @return This model
*/
public Model write(OutputStream out) ;
/**
* <p>Write a serialized represention of this model in a specified language.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE", "TURTLE", (and "TTL") and "N3". The default value,
* represented by <code>null</code>, is "RDF/XML".</p>
* @param out The output stream to which the RDF is written
* @param lang The output langauge
* @return This model
*/
public Model write( OutputStream out, String lang ) ;
/**
* <p>Write a serialized represention of a model in a specified language.
* </p>
* <p>The language in which to write the model is specified by the
* <code>lang</code> argument. Predefined values are "RDF/XML",
* "RDF/XML-ABBREV", "N-TRIPLE", "TURTLE", (and "TTL") and "N3". The default value,
* represented by <code>null</code>, is "RDF/XML".</p>
* @param out The output stream to which the RDF is written
* @param base The base uri to use when writing relative URI's. <code>null</code>
* means use only absolute URI's. This is used for relative
* URIs that would be resolved against the document retrieval URL.
* For some values of <code>lang</code>, this value may be included in the output.
* @param lang The language in which the RDF should be written
* @return This model
*/
public Model write( OutputStream out, String lang, String base );
/** Removes a statement.
*
* <p> The statement with the same subject, predicate and object as
* that supplied will be removed from the model.</p>
* @return this model
* @param s The statement to be removed.
*/
Model remove(Statement s) ;
/** Return a statement with given subject and property.
* <p>If more than one statement witht the given subject and property
* exists in the model, it is undefined which will be returned. If none
* exist, an exception is thrown.
* @return A statement from the model with the given subject and property.
* @param s The subject of the statement to be returned.
* @param p The property of the statement to be returned.
* @throws PropertyNotFoundException
*/
Statement getRequiredProperty(Resource s, Property p) ;
/**
Answer a statement (s, p, ?O) from this model. If none exist, return null;
if several exist, pick one arbitrarily.
@param s the subject of the statement to return
@param p the predicate of the statement to return
@return some statement (s, p, ?O) or null if none can be found
*/
Statement getProperty( Resource s, Property p );
/**
An alias for <code>istResourcesWithProperty(Property)</code>,
retained for backward compatability. It may be deprecated in later
releases.
*/
ResIterator listSubjectsWithProperty( Property p );
/**
Answer an iterator [with no duplicates] over all the resources in this
model that have property <code>p</code>. <code>remove()</code>
is not implemented on this iterator.
*/
ResIterator listResourcesWithProperty( Property p );
/**
An alias for <code>listResourcesWithProperty</code>, retained for
backward compatability. It may be deprecated in later releases.
*/
ResIterator listSubjectsWithProperty( Property p, RDFNode o );
/**
Answer an iterator [with no duplicates] over all the resources in this
model that have property <code>p</code> with value <code>o</code>.
<code>remove()</code> is not implemented on this iterator.
*/
ResIterator listResourcesWithProperty( Property p, RDFNode o );
/** List all objects in a model.
* @return an iterator over the objects. .remove() is not implemented on this iterator.
*/
NodeIterator listObjects() ;
/** List all objects of a given property. .remove() is not implemented on this iterator.
* @return an iterator over the objects
* @param p The predicate sought
*/
NodeIterator listObjectsOfProperty(Property p) ;
/** List the values of a property of a resource.
* @return an iterator over the objects. .remove() is not implemented on this iterator.
* @param p The predicate sought
*/
NodeIterator listObjectsOfProperty(Resource s, Property p);
/** Determine whether this model contains any statements with a given subject
* and property.
* @return true if there exists within this model a statement with
* subject s and property p, false otherwise
* @param s The subject sought (null for any).
* @param p The predicate sought (null for any).
*/
boolean contains(Resource s, Property p) ;
/**
determine if the RDFNode r appears in any statement of this model.
(containsRDFNode is a horrible name, and in any case, even literals
will be resources one day)
@param r the RDFNode to be searched for
@return true iff r appears as some subject, predicate, or object
*/
boolean containsResource( RDFNode r );
/** Determine if an (S, P, O) pattern is present in this model, with null allowed
* to represent a wildcard match.
* @return true if the statement with subject s, property p and object o
* is in the model, false otherwise
* @param s The subject of the statment tested (null as wildcard).
* @param p The predicate of the statement tested (null as wildcard).
* @param o The object of the statement tested (null as wildcard).
*/
boolean contains(Resource s, Property p, RDFNode o) ;
/** Determine if a statement is present in this model.
* @param s The statement tested.
* @return true if the statement s is in this model, false otherwise
*/
boolean contains(Statement s) ;
/** Determine if any of the statements returned by an iterator are
* contained in this model.
* @param iter an iterator of the statements to be tested
* @return true if any of the statements returns by iter are contained
* in this model and false otherwise.
*/
boolean containsAny(StmtIterator iter) ;
/** Determine if all of the statements returned by an iterator are
* contained in this model.
* @param iter an iterator of the statements to be tested
* @return true if any of the statements returns by iter are contained
* in this model and false otherwise.
*/
boolean containsAll(StmtIterator iter) ;
/** Determine if any of the statements in a model are also contained
* in this model.
* @param model the model containing the statements to be tested
* @return true if any of the statements in model are also contained
* in this model and false otherwise.
*/
boolean containsAny(Model model) ;
/** Determine if all of the statements in a model are also contained
* in this model.
* @param model the model containing the statements to be tested
* @return true if all of the statements in model are also contained
* in this model and false otherwise.
*/
boolean containsAll(Model model) ;
/**
Determine if this Statement has been reified in this Model.
@param s The statement tested.
@return true iff a ReifiedStatement(s) has been created in this model
*/
boolean isReified( Statement s );
/**
Find or create a {@link ReifiedStatement} corresponding to a Statement.
@param s Statement which may or may not already be reified
@return a Resource [ReifiedStatement] that reifies the specified Statement.
*/
Resource getAnyReifiedStatement( Statement s );
/**
Remove all reifications (ie implicit reification quads) of _s_.
*/
void removeAllReifications( Statement s );
/**
Remove a particular reificiation.
*/
void removeReification( ReifiedStatement rs );
/** List all statements.
*
* <p>Subsequent operations on those statements may modify this model.</p>
* @return an iterator over all statements in the model.
*/
StmtIterator listStatements() ;
/** List the statements matching a selector.
*
* <p>A statment is considered to match if the <CODE>test</CODE> method
* of s returns true when called on s.</p>
* @return an iterator over the matching statements
* @param s A selector object.
.
*/
StmtIterator listStatements(Selector s) ;
/** Find all the statements matching a pattern.
* <p>Return an iterator over all the statements in a model
* that match a pattern. The statements selected are those
* whose subject matches the <code>subject</code> argument,
* whose predicate matches the <code>predicate</code> argument
* and whose object matches the <code>object</code> argument.
* If an argument is <code>null</code> it matches anything.</p>
* @return an iterator over the subjects
* @param s The subject sought
* @param p The predicate sought
* @param o The value sought
*/
StmtIterator listStatements( Resource s, Property p, RDFNode o );
/**
Answer a ReifiedStatement that encodes _s_ and belongs to this Model.
<br>
result.getModel() == this
<br>
result.getStatement() .equals ( s )
*/
ReifiedStatement createReifiedStatement( Statement s );
/**
answer a ReifiedStatement that encodes _s_, belongs to this Model,
and is a Resource with that _uri_.
*/
ReifiedStatement createReifiedStatement( String uri, Statement s );
/**
answer an iterator delivering all the reified statements "in" this model
*/
RSIterator listReifiedStatements();
/**
answer an iterator delivering all the reified statements "in" this model
that match the statement _st_.
*/
RSIterator listReifiedStatements( Statement st );
/**
Answer the reification style of the model.
@return the reification style
createMemModelMaker()
*/
@Deprecated
ReificationStyle getReificationStyle();
/** Create a new model containing the statements matching a query.
*
* <p>A statment is considered to match if the <CODE>test</CODE> method
* of s returns true when called on s.</p>
* @return an iterator over the matching statements
* @param s A selector object.
.
*/
Model query(Selector s) ;
/**
Create a new, independant, model containing all the statements in this model
together with all of those in another given model. By <i>independant</i>
we mean that changes to the result model do not affect the operand
models, and <i>vice versa</i>.
<p>
The new model need not be of the same type as either this model or
the argument model: typically it will be a memory-based model, even
if this model is a database model.
@return A new model containing all the statements that are in either model
@param model The other model whose statements are to be included.
*/
Model union(Model model) ;
/**
Create a new, independant, model containing all the statements which are in both
this model and another. As models are sets of statements, a statement
contained in both models will only appear once in the resulting model.
The new model need not be of the same type as either this model or
the argument model: typically it will be a memory-based model.
@return A new model containing all the statements that are in both models.
@param model The other model.
*/
Model intersection(Model model) ;
/** Create a new, independant, model containing all the statements in this model which
* are not in another.
The new model need not be of the same type as either this model or
the argument model: typically it will be a memory-based model.
* @return a new model containing all the statements in this model that
* are not in the given model.
* @param model the other model whose statements are to be excluded.
*/
Model difference(Model model) ;
/**
* Test whether the given object <code>m</code>
* is a model that is equal to this model,
* which is true iff the underlying graphs are identical Java
* objects. This is not the same test as comparing whether two models
* have the same structure (i.e. contain the same set of statements).
* To test for strucutural equivalence, see {@link #isIsomorphicWith}.
* @param m the model to be compared
* @return true if <code>m</code> shares a graph object with this model
* @see #isIsomorphicWith(Model)
*/
@Override
public boolean equals(Object m);
/** Begin a new transation.
*
* <p> All changes made to a model within a transaction, will either
* be made, or none of them will be made.</p>
* @return this model to enable cascading.
*/
Model begin() ;
/** Abort the current transaction and abandon any changes in progress.
* @return this model to enable cascading.
*/
Model abort() ;
/** Commit the current transaction.
* @return this model to enable cascading.
*/
Model commit() ;
/**
Execute the command <code>cmd</code> inside a transaction. If it
completes, commit the transaction and return the result; if it fails
(by throwing an exception), abort the transaction and throw an
exception.
*/
Object executeInTransaction( Command cmd );
/** Determine whether this model is independent.
*
* <p>For efficiency reasons, some implementations may create models which
* which are dependent on others, i.e. a change in one model may cause
* a change in another. If this is the case this method will return false,
* otherwise it will return true.</p>
*
* @return true if this model is indepdent of others
*/
boolean independent();
/** Determine whether this model supports transactions.
* @return true if this model supports transactions.
*/
boolean supportsTransactions();
/** Determine whether this model supports set operations.
* @return true if this model supports set operations.
*/
boolean supportsSetOperations();
/**
* Compare this Model with another for equality ignoring the labels on
* bNodes.
* See
* <a href="http://www.w3.org/TR/rdf-concepts#section-Graph-syntax">RDF
* Concepts</a>.
* <p>Two models are isomorphic when each statement in one can be matched
* with a statement in the other. Statements which are identical match.</p>
*
* <p>Special treatment is given to anonymous nodes. A binding is a one to
* one mapping which maps each anonymous node in <code>this</code> model to
* an anonymous node in <code>model</code>. Two statements s1 and s2 match
* under a binding if if s1.subject is anonymous and s2.subject is anonymous
* and the binding maps s1.subject to s2.subject.</p>
*
* <p>Two models are isomorphic if there is a binding that allows all the
* statements in one model to match a a statement in the other.</p>
* @param g Compare against this.
* @return boolean True if the two RDF graphs are isomorphic.
*/
boolean isIsomorphicWith(Model g);
/** Close the Model and free up resources held.
*
* <p>Not all implementations of Model require this method to be called. But
* some do, so in general its best to call it when done with the object,
* rather than leave it to the finalizer.</p>
*/
public void close();
// /** Get the model lock for this model.
// * See also the convenience operations enterCriticalSection and leaveCriticalSection.
// *
// * @see ModelLock
// * @return The ModelLock object associated with this model
// * @deprecated Applications should use {@link #getLock()}
// */
// public ModelLock getModelLock() ;
/** Get the model lock for this model.
* See also the convenience operations enterCriticalSection and leaveCriticalSection.
*
* @see Lock
* @return The ModelLock object associated with this model
*/
public Lock getLock() ;
/**
Register a listener for model-changed events on this model. The methods on
the listener will be called when API add/remove calls on the model succeed
[in whole or in part].
<p>
The same listener may be registered many times; if so, it's methods will
be called as many times as it's registered for each event.
@see ModelChangedListener
@return this model, for cascading
*/
public Model register( ModelChangedListener listener );
/**
Unregister a listener from model-changed events on this model. The
listener is dtached from the model. The model is returned to permit
cascading. If the listener is not attached to the model, then nothing happens.
@see ModelChangedListener
@return this model, for cascading
*/
public Model unregister( ModelChangedListener listener );
/**
Notify any listeners that the event e has occurred.
@param e the event that has occurred
*/
public Model notifyEvent( Object e );
/**
Remove all the statements from this model.
*/
public Model removeAll();
/**
Remove all the statements matching (s, p, o) from this model.
*/
public Model removeAll( Resource s, Property p, RDFNode r );
/**
Answer true iff .close() has been called on this Model.
*/
public boolean isClosed();
}
| Fix javadoc
git-svn-id: bc509ec38c1227b3e85ea1246fda136342965d36@1522578 13f79535-47bb-0310-9956-ffa450edef68
| jena-core/src/main/java/com/hp/hpl/jena/rdf/model/Model.java | Fix javadoc |
|
Java | apache-2.0 | 464f05c2c3e5eac98e4f0f78419cc54938b2827a | 0 | dvamedveda/b.savelev,dvamedveda/b.savelev | package ru.job4j.condition;
/**
* Класс Point, содержащий класс, конструктор и метод для работы с точнами.
*
* @author - b.savelev (mailto: [email protected])
* @version - 1.0
* @since 0.1
*/
public class Point {
/**
* Поля класса, обозначающие его координаты.
*/
private int x, y;
/**
* Конструктор класса Point.
* @param x координата x.
* @param y координата y.
*/
public Point(int x, int y) {
this.x = x;
this.y = y;
}
/**
* Возращает координату X точки.
*
* @return int x
*/
public int getX() {
return this.x;
}
/**
* Возвращает координату Y точки.
*
* @return int y
*/
public int getY() {
return this.y;
}
/**
* Метод, определящий, находится ли точка на заданной функции.
*
* @param a int один из параметров функции.
* @param b int второй из параметров функции.
* @return boolean нахождение точки на функции
*/
public boolean is(int a, int b) {
return (this.y == a * this.x + b);
}
} | chapter_001/src/main/java/ru/job4j/condition/Point.java | package ru.job4j.condition;
/**
* Класс Point, содержащий класс, конструктор и метод для работы с точнами.
*
* @author - b.savelev (mailto: [email protected])
* @version - 1.0
* @since 0.1
*/
public class Point {
/**
* Поля класса, обозначающие его координаты.
*/
private int x, y;
/**
* Конструктор класса Point.
* @param x координата x.
* @param y координата y.
*/
public Point(int x, int y) {
this.x = x;
this.y = y;
}
/**
* Возращает координату X точки.
*
* @return int x
*/
public int getX() {
return this.x;
}
/**
* Возвращает координату Y точки.
*
* @return int y
*/
public int getY() {
return this.y;
}
/**
* Метод, определящий, находится ли точка на заданной функции.
*
* @param a int один из параметров функции.
* @param b int второй из параметров функции.
* @return boolean нахождение точки на функции
*/
public boolean is(int a, int b) {
return (this.getY() == a * this.getX() + b);
}
} | Метод Point.is() теперь не использует геттеры, а обращается к полям Point напрямую / 3.2. Положение точки [#28516]
| chapter_001/src/main/java/ru/job4j/condition/Point.java | Метод Point.is() теперь не использует геттеры, а обращается к полям Point напрямую / 3.2. Положение точки [#28516] |
|
Java | apache-2.0 | e9f32256c438d3560a986ae5695e25da748587be | 0 | bmaupin/android-sms-plus | /*
* Copyright (C) 2008 Esmertec AG.
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.mms.ui;
import android.content.AsyncQueryHandler;
import android.content.ContentUris;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Handler;
import android.provider.BaseColumns;
import android.provider.ContactsContract.CommonDataKinds.Email;
import android.provider.ContactsContract.CommonDataKinds.Photo;
import android.provider.ContactsContract.Contacts;
import android.provider.ContactsContract.Data;
import android.provider.ContactsContract.PhoneLookup;
import android.provider.ContactsContract.RawContacts;
import android.provider.Telephony.Mms;
import android.provider.Telephony.MmsSms;
import android.provider.Telephony.MmsSms.PendingMessages;
import android.provider.Telephony.Sms;
import android.provider.Telephony.Sms.Conversations;
import android.util.Config;
import android.util.Log;
import android.util.LruCache;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CursorAdapter;
import android.widget.ListView;
import com.android.mms.R;
import com.google.android.mms.MmsException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.regex.Pattern;
/**
* The back-end data adapter of a message list.
*/
public class MessageListAdapter extends CursorAdapter {
private static final String TAG = "MessageListAdapter";
private static final boolean DEBUG = false;
private static final boolean LOCAL_LOGV = Config.LOGV && DEBUG;
static final String[] PROJECTION = new String[] {
// TODO: should move this symbol into com.android.mms.telephony.Telephony.
MmsSms.TYPE_DISCRIMINATOR_COLUMN,
BaseColumns._ID,
Conversations.THREAD_ID,
// For SMS
Sms.ADDRESS,
Sms.BODY,
Sms.DATE,
Sms.READ,
Sms.TYPE,
Sms.STATUS,
Sms.LOCKED,
Sms.ERROR_CODE,
// For MMS
Mms.SUBJECT,
Mms.SUBJECT_CHARSET,
Mms.DATE,
Mms.READ,
Mms.MESSAGE_TYPE,
Mms.MESSAGE_BOX,
Mms.DELIVERY_REPORT,
Mms.READ_REPORT,
PendingMessages.ERROR_TYPE,
Mms.LOCKED
};
// The indexes of the default columns which must be consistent
// with above PROJECTION.
static final int COLUMN_MSG_TYPE = 0;
static final int COLUMN_ID = 1;
static final int COLUMN_THREAD_ID = 2;
static final int COLUMN_SMS_ADDRESS = 3;
static final int COLUMN_SMS_BODY = 4;
static final int COLUMN_SMS_DATE = 5;
static final int COLUMN_SMS_READ = 6;
static final int COLUMN_SMS_TYPE = 7;
static final int COLUMN_SMS_STATUS = 8;
static final int COLUMN_SMS_LOCKED = 9;
static final int COLUMN_SMS_ERROR_CODE = 10;
static final int COLUMN_MMS_SUBJECT = 11;
static final int COLUMN_MMS_SUBJECT_CHARSET = 12;
static final int COLUMN_MMS_DATE = 13;
static final int COLUMN_MMS_READ = 14;
static final int COLUMN_MMS_MESSAGE_TYPE = 15;
static final int COLUMN_MMS_MESSAGE_BOX = 16;
static final int COLUMN_MMS_DELIVERY_REPORT = 17;
static final int COLUMN_MMS_READ_REPORT = 18;
static final int COLUMN_MMS_ERROR_TYPE = 19;
static final int COLUMN_MMS_LOCKED = 20;
private static final int CACHE_SIZE = 50;
protected LayoutInflater mInflater;
private final ListView mListView;
private final LruCache<Long, MessageItem> mMessageItemCache;
private final ColumnsMap mColumnsMap;
private OnDataSetChangedListener mOnDataSetChangedListener;
private Handler mMsgListItemHandler;
private Pattern mHighlight;
private Context mContext;
private HashMap<String, HashSet<MessageListItem>> mAddressToMessageListItems
= new HashMap<String, HashSet<MessageListItem>>();
public MessageListAdapter(
Context context, Cursor c, ListView listView,
boolean useDefaultColumnsMap, Pattern highlight) {
super(context, c, FLAG_REGISTER_CONTENT_OBSERVER);
mContext = context;
mHighlight = highlight;
mInflater = (LayoutInflater) context.getSystemService(
Context.LAYOUT_INFLATER_SERVICE);
mListView = listView;
mMessageItemCache = new LruCache<Long, MessageItem>(CACHE_SIZE);
if (useDefaultColumnsMap) {
mColumnsMap = new ColumnsMap();
} else {
mColumnsMap = new ColumnsMap(c);
}
mAvatarCache = new AvatarCache();
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
if (view instanceof MessageListItem) {
String type = cursor.getString(mColumnsMap.mColumnMsgType);
long msgId = cursor.getLong(mColumnsMap.mColumnMsgId);
MessageItem msgItem = getCachedMessageItem(type, msgId, cursor);
if (msgItem != null) {
MessageListItem mli = (MessageListItem) view;
// Remove previous item from mapping
MessageItem oldMessageItem = mli.getMessageItem();
if (oldMessageItem != null) {
String oldAddress = oldMessageItem.mAddress;
if (oldAddress != null) {
HashSet<MessageListItem> set = mAddressToMessageListItems.get(oldAddress);
if (set != null) {
set.remove(mli);
}
}
}
mli.bind(mAvatarCache, msgItem);
mli.setMsgListItemHandler(mMsgListItemHandler);
// Add current item to mapping
String addr;
if (!Sms.isOutgoingFolder(msgItem.mBoxId)) {
addr = msgItem.mAddress;
} else {
addr = MessageUtils.getLocalNumber();
}
HashSet<MessageListItem> set = mAddressToMessageListItems.get(addr);
if (set == null) {
set = new HashSet<MessageListItem>();
mAddressToMessageListItems.put(addr, set);
}
set.add(mli);
}
}
}
public interface OnDataSetChangedListener {
void onDataSetChanged(MessageListAdapter adapter);
void onContentChanged(MessageListAdapter adapter);
}
public void setOnDataSetChangedListener(OnDataSetChangedListener l) {
mOnDataSetChangedListener = l;
}
public void setMsgListItemHandler(Handler handler) {
mMsgListItemHandler = handler;
}
public void notifyImageLoaded(String address) {
HashSet<MessageListItem> set = mAddressToMessageListItems.get(address);
if (set != null) {
for (MessageListItem mli : set) {
mli.bind(mAvatarCache, mli.getMessageItem());
}
}
}
@Override
public void notifyDataSetChanged() {
super.notifyDataSetChanged();
if (LOCAL_LOGV) {
Log.v(TAG, "MessageListAdapter.notifyDataSetChanged().");
}
mListView.setSelection(mListView.getCount());
mMessageItemCache.evictAll();
if (mOnDataSetChangedListener != null) {
mOnDataSetChangedListener.onDataSetChanged(this);
}
}
@Override
protected void onContentChanged() {
if (getCursor() != null && !getCursor().isClosed()) {
if (mOnDataSetChangedListener != null) {
mOnDataSetChangedListener.onContentChanged(this);
}
}
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return mInflater.inflate(R.layout.message_list_item, parent, false);
}
public MessageItem getCachedMessageItem(String type, long msgId, Cursor c) {
MessageItem item = mMessageItemCache.get(getKey(type, msgId));
if (item == null && c != null && isCursorValid(c)) {
try {
item = new MessageItem(mContext, type, c, mColumnsMap, mHighlight);
mMessageItemCache.put(getKey(item.mType, item.mMsgId), item);
} catch (MmsException e) {
Log.e(TAG, "getCachedMessageItem: ", e);
}
}
return item;
}
private boolean isCursorValid(Cursor cursor) {
// Check whether the cursor is valid or not.
if (cursor.isClosed() || cursor.isBeforeFirst() || cursor.isAfterLast()) {
return false;
}
return true;
}
private static long getKey(String type, long id) {
if (type.equals("mms")) {
return -id;
} else {
return id;
}
}
public static class ColumnsMap {
public int mColumnMsgType;
public int mColumnMsgId;
public int mColumnSmsAddress;
public int mColumnSmsBody;
public int mColumnSmsDate;
public int mColumnSmsRead;
public int mColumnSmsType;
public int mColumnSmsStatus;
public int mColumnSmsLocked;
public int mColumnSmsErrorCode;
public int mColumnMmsSubject;
public int mColumnMmsSubjectCharset;
public int mColumnMmsDate;
public int mColumnMmsRead;
public int mColumnMmsMessageType;
public int mColumnMmsMessageBox;
public int mColumnMmsDeliveryReport;
public int mColumnMmsReadReport;
public int mColumnMmsErrorType;
public int mColumnMmsLocked;
public ColumnsMap() {
mColumnMsgType = COLUMN_MSG_TYPE;
mColumnMsgId = COLUMN_ID;
mColumnSmsAddress = COLUMN_SMS_ADDRESS;
mColumnSmsBody = COLUMN_SMS_BODY;
mColumnSmsDate = COLUMN_SMS_DATE;
mColumnSmsType = COLUMN_SMS_TYPE;
mColumnSmsStatus = COLUMN_SMS_STATUS;
mColumnSmsLocked = COLUMN_SMS_LOCKED;
mColumnSmsErrorCode = COLUMN_SMS_ERROR_CODE;
mColumnMmsSubject = COLUMN_MMS_SUBJECT;
mColumnMmsSubjectCharset = COLUMN_MMS_SUBJECT_CHARSET;
mColumnMmsMessageType = COLUMN_MMS_MESSAGE_TYPE;
mColumnMmsMessageBox = COLUMN_MMS_MESSAGE_BOX;
mColumnMmsDeliveryReport = COLUMN_MMS_DELIVERY_REPORT;
mColumnMmsReadReport = COLUMN_MMS_READ_REPORT;
mColumnMmsErrorType = COLUMN_MMS_ERROR_TYPE;
mColumnMmsLocked = COLUMN_MMS_LOCKED;
}
public ColumnsMap(Cursor cursor) {
// Ignore all 'not found' exceptions since the custom columns
// may be just a subset of the default columns.
try {
mColumnMsgType = cursor.getColumnIndexOrThrow(
MmsSms.TYPE_DISCRIMINATOR_COLUMN);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMsgId = cursor.getColumnIndexOrThrow(BaseColumns._ID);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsAddress = cursor.getColumnIndexOrThrow(Sms.ADDRESS);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsBody = cursor.getColumnIndexOrThrow(Sms.BODY);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsDate = cursor.getColumnIndexOrThrow(Sms.DATE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsType = cursor.getColumnIndexOrThrow(Sms.TYPE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsStatus = cursor.getColumnIndexOrThrow(Sms.STATUS);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsLocked = cursor.getColumnIndexOrThrow(Sms.LOCKED);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsErrorCode = cursor.getColumnIndexOrThrow(Sms.ERROR_CODE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsSubject = cursor.getColumnIndexOrThrow(Mms.SUBJECT);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsSubjectCharset = cursor.getColumnIndexOrThrow(Mms.SUBJECT_CHARSET);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsMessageType = cursor.getColumnIndexOrThrow(Mms.MESSAGE_TYPE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsMessageBox = cursor.getColumnIndexOrThrow(Mms.MESSAGE_BOX);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsDeliveryReport = cursor.getColumnIndexOrThrow(Mms.DELIVERY_REPORT);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsReadReport = cursor.getColumnIndexOrThrow(Mms.READ_REPORT);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsErrorType = cursor.getColumnIndexOrThrow(PendingMessages.ERROR_TYPE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsLocked = cursor.getColumnIndexOrThrow(Mms.LOCKED);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
}
}
private AvatarCache mAvatarCache;
/*
* Track avatars for each of the members of in the group chat.
*/
class AvatarCache {
private static final int TOKEN_PHONE_LOOKUP = 101;
private static final int TOKEN_EMAIL_LOOKUP = 102;
private static final int TOKEN_CONTACT_INFO = 201;
private static final int TOKEN_PHOTO_DATA = 301;
//Projection used for the summary info in the header.
private final String[] COLUMNS = new String[] {
Contacts._ID,
Contacts.PHOTO_ID,
// Other fields which we might want/need in the future (for example)
// Contacts.LOOKUP_KEY,
// Contacts.DISPLAY_NAME,
// Contacts.STARRED,
// Contacts.CONTACT_PRESENCE,
// Contacts.CONTACT_STATUS,
// Contacts.CONTACT_STATUS_TIMESTAMP,
// Contacts.CONTACT_STATUS_RES_PACKAGE,
// Contacts.CONTACT_STATUS_LABEL,
};
private final int PHOTO_ID = 1;
private final String[] PHONE_LOOKUP_PROJECTION = new String[] {
PhoneLookup._ID,
PhoneLookup.LOOKUP_KEY,
};
private static final int PHONE_LOOKUP_CONTACT_ID_COLUMN_INDEX = 0;
private static final int PHONE_LOOKUP_CONTACT_LOOKUP_KEY_COLUMN_INDEX = 1;
private final String[] EMAIL_LOOKUP_PROJECTION = new String[] {
RawContacts.CONTACT_ID,
Contacts.LOOKUP_KEY,
};
private static final int EMAIL_LOOKUP_CONTACT_ID_COLUMN_INDEX = 0;
private static final int EMAIL_LOOKUP_CONTACT_LOOKUP_KEY_COLUMN_INDEX = 1;
/*
* Map from mAddress to a blob of data which contains the contact id
* and the avatar.
*/
HashMap<String, ContactData> mImageCache = new HashMap<String, ContactData>();
public class ContactData {
private String mAddress;
private long mContactId;
private Uri mContactUri;
private Drawable mPhoto;
ContactData(String address) {
mAddress = address;
}
public Drawable getAvatar() {
return mPhoto;
}
public Uri getContactUri() {
return mContactUri;
}
private boolean startInitialQuery() {
if (Mms.isPhoneNumber(mAddress)) {
mQueryHandler.startQuery(
TOKEN_PHONE_LOOKUP,
this,
Uri.withAppendedPath(PhoneLookup.CONTENT_FILTER_URI, Uri.encode(mAddress)),
PHONE_LOOKUP_PROJECTION,
null,
null,
null);
return true;
} else if (Mms.isEmailAddress(mAddress)) {
mQueryHandler.startQuery(
TOKEN_EMAIL_LOOKUP,
this,
Uri.withAppendedPath(Email.CONTENT_LOOKUP_URI, Uri.encode(mAddress)),
EMAIL_LOOKUP_PROJECTION,
null,
null,
null);
return true;
} else {
return false;
}
}
/*
* Once we have the photo data load it into a drawable.
*/
private boolean onPhotoDataLoaded(Cursor c) {
if (c == null || !c.moveToFirst()) return false;
try {
byte[] photoData = c.getBlob(0);
Bitmap b = BitmapFactory.decodeByteArray(photoData, 0, photoData.length, null);
mPhoto = new BitmapDrawable(mContext.getResources(), b);
return true;
} catch (Exception ex) {
return false;
}
}
/*
* Once we have the contact info loaded take the photo id and query
* for the photo data.
*/
private boolean onContactInfoLoaded(Cursor c) {
if (c == null || !c.moveToFirst()) return false;
long photoId = c.getLong(PHOTO_ID);
Uri contactUri = ContentUris.withAppendedId(Data.CONTENT_URI, photoId);
mQueryHandler.startQuery(
TOKEN_PHOTO_DATA,
this,
contactUri,
new String[] { Photo.PHOTO },
null,
null,
null);
return true;
}
/*
* Once we have the contact id loaded start the query for the
* contact information (which will give us the photo id).
*/
private boolean onContactIdLoaded(Cursor c, int contactIdColumn, int lookupKeyColumn) {
if (c == null || !c.moveToFirst()) return false;
mContactId = c.getLong(contactIdColumn);
String lookupKey = c.getString(lookupKeyColumn);
mContactUri = Contacts.getLookupUri(mContactId, lookupKey);
mQueryHandler.startQuery(
TOKEN_CONTACT_INFO,
this,
mContactUri,
COLUMNS,
null,
null,
null);
return true;
}
/*
* If for whatever reason we can't get the photo load teh
* default avatar. NOTE that fasttrack tries to get fancy
* with various random images (upside down, etc.) we're not
* doing that here.
*/
private void loadDefaultAvatar() {
try {
if (mDefaultAvatarDrawable == null) {
Bitmap b = BitmapFactory.decodeResource(mContext.getResources(),
R.drawable.ic_contact_picture);
mDefaultAvatarDrawable = new BitmapDrawable(mContext.getResources(), b);
}
mPhoto = mDefaultAvatarDrawable;
} catch (java.lang.OutOfMemoryError e) {
Log.e(TAG, "loadDefaultAvatar: out of memory: ", e);
}
}
};
Drawable mDefaultAvatarDrawable = null;
AsyncQueryHandler mQueryHandler = new AsyncQueryHandler(mContext.getContentResolver()) {
@Override
protected void onQueryComplete(int token, Object cookieObject, Cursor cursor) {
super.onQueryComplete(token, cookieObject, cursor);
ContactData cookie = (ContactData) cookieObject;
switch (token) {
case TOKEN_PHONE_LOOKUP: {
if (!cookie.onContactIdLoaded(
cursor,
PHONE_LOOKUP_CONTACT_ID_COLUMN_INDEX,
PHONE_LOOKUP_CONTACT_LOOKUP_KEY_COLUMN_INDEX)) {
cookie.loadDefaultAvatar();
}
break;
}
case TOKEN_EMAIL_LOOKUP: {
if (!cookie.onContactIdLoaded(
cursor,
EMAIL_LOOKUP_CONTACT_ID_COLUMN_INDEX,
EMAIL_LOOKUP_CONTACT_LOOKUP_KEY_COLUMN_INDEX)) {
cookie.loadDefaultAvatar();
}
break;
}
case TOKEN_CONTACT_INFO: {
if (!cookie.onContactInfoLoaded(cursor)) {
cookie.loadDefaultAvatar();
}
break;
}
case TOKEN_PHOTO_DATA: {
if (!cookie.onPhotoDataLoaded(cursor)) {
cookie.loadDefaultAvatar();
} else {
MessageListAdapter.this.notifyImageLoaded(cookie.mAddress);
}
break;
}
default:
break;
}
}
};
public ContactData get(final String address) {
if (mImageCache.containsKey(address)) {
return mImageCache.get(address);
} else {
// Create the ContactData object and put it into the hashtable
// so that any subsequent requests for this same avatar do not kick
// off another query.
ContactData cookie = new ContactData(address);
mImageCache.put(address, cookie);
cookie.startInitialQuery();
cookie.loadDefaultAvatar();
return cookie;
}
}
public AvatarCache() {
}
};
}
| src/com/android/mms/ui/MessageListAdapter.java | /*
* Copyright (C) 2008 Esmertec AG.
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.mms.ui;
import com.android.mms.R;
import com.google.android.mms.MmsException;
import android.content.AsyncQueryHandler;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Handler;
import android.provider.BaseColumns;
import android.provider.ContactsContract.Contacts;
import android.provider.ContactsContract.Data;
import android.provider.ContactsContract.PhoneLookup;
import android.provider.ContactsContract.RawContacts;
import android.provider.ContactsContract.StatusUpdates;
import android.provider.ContactsContract.CommonDataKinds.Email;
import android.provider.ContactsContract.CommonDataKinds.Photo;
import android.provider.Telephony.Mms;
import android.provider.Telephony.MmsSms;
import android.provider.Telephony.Sms;
import android.provider.Telephony.MmsSms.PendingMessages;
import android.provider.Telephony.Sms.Conversations;
import android.text.TextUtils;
import android.text.format.DateUtils;
import android.util.Config;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CursorAdapter;
import android.widget.ListView;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.regex.Pattern;
/**
* The back-end data adapter of a message list.
*/
public class MessageListAdapter extends CursorAdapter {
private static final String TAG = "MessageListAdapter";
private static final boolean DEBUG = false;
private static final boolean LOCAL_LOGV = Config.LOGV && DEBUG;
static final String[] PROJECTION = new String[] {
// TODO: should move this symbol into com.android.mms.telephony.Telephony.
MmsSms.TYPE_DISCRIMINATOR_COLUMN,
BaseColumns._ID,
Conversations.THREAD_ID,
// For SMS
Sms.ADDRESS,
Sms.BODY,
Sms.DATE,
Sms.READ,
Sms.TYPE,
Sms.STATUS,
Sms.LOCKED,
Sms.ERROR_CODE,
// For MMS
Mms.SUBJECT,
Mms.SUBJECT_CHARSET,
Mms.DATE,
Mms.READ,
Mms.MESSAGE_TYPE,
Mms.MESSAGE_BOX,
Mms.DELIVERY_REPORT,
Mms.READ_REPORT,
PendingMessages.ERROR_TYPE,
Mms.LOCKED
};
// The indexes of the default columns which must be consistent
// with above PROJECTION.
static final int COLUMN_MSG_TYPE = 0;
static final int COLUMN_ID = 1;
static final int COLUMN_THREAD_ID = 2;
static final int COLUMN_SMS_ADDRESS = 3;
static final int COLUMN_SMS_BODY = 4;
static final int COLUMN_SMS_DATE = 5;
static final int COLUMN_SMS_READ = 6;
static final int COLUMN_SMS_TYPE = 7;
static final int COLUMN_SMS_STATUS = 8;
static final int COLUMN_SMS_LOCKED = 9;
static final int COLUMN_SMS_ERROR_CODE = 10;
static final int COLUMN_MMS_SUBJECT = 11;
static final int COLUMN_MMS_SUBJECT_CHARSET = 12;
static final int COLUMN_MMS_DATE = 13;
static final int COLUMN_MMS_READ = 14;
static final int COLUMN_MMS_MESSAGE_TYPE = 15;
static final int COLUMN_MMS_MESSAGE_BOX = 16;
static final int COLUMN_MMS_DELIVERY_REPORT = 17;
static final int COLUMN_MMS_READ_REPORT = 18;
static final int COLUMN_MMS_ERROR_TYPE = 19;
static final int COLUMN_MMS_LOCKED = 20;
private static final int CACHE_SIZE = 50;
protected LayoutInflater mInflater;
private final ListView mListView;
private final LinkedHashMap<Long, MessageItem> mMessageItemCache;
private final ColumnsMap mColumnsMap;
private OnDataSetChangedListener mOnDataSetChangedListener;
private Handler mMsgListItemHandler;
private Pattern mHighlight;
private Context mContext;
private HashMap<String, HashSet<MessageListItem>> mAddressToMessageListItems
= new HashMap<String, HashSet<MessageListItem>>();
public MessageListAdapter(
Context context, Cursor c, ListView listView,
boolean useDefaultColumnsMap, Pattern highlight) {
super(context, c, FLAG_REGISTER_CONTENT_OBSERVER);
mContext = context;
mHighlight = highlight;
mInflater = (LayoutInflater) context.getSystemService(
Context.LAYOUT_INFLATER_SERVICE);
mListView = listView;
mMessageItemCache = new LinkedHashMap<Long, MessageItem>(
10, 1.0f, true) {
@Override
protected boolean removeEldestEntry(Map.Entry eldest) {
return size() > CACHE_SIZE;
}
};
if (useDefaultColumnsMap) {
mColumnsMap = new ColumnsMap();
} else {
mColumnsMap = new ColumnsMap(c);
}
mAvatarCache = new AvatarCache();
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
if (view instanceof MessageListItem) {
String type = cursor.getString(mColumnsMap.mColumnMsgType);
long msgId = cursor.getLong(mColumnsMap.mColumnMsgId);
MessageItem msgItem = getCachedMessageItem(type, msgId, cursor);
if (msgItem != null) {
MessageListItem mli = (MessageListItem) view;
// Remove previous item from mapping
MessageItem oldMessageItem = mli.getMessageItem();
if (oldMessageItem != null) {
String oldAddress = oldMessageItem.mAddress;
if (oldAddress != null) {
HashSet<MessageListItem> set = mAddressToMessageListItems.get(oldAddress);
if (set != null) {
set.remove(mli);
}
}
}
mli.bind(mAvatarCache, msgItem);
mli.setMsgListItemHandler(mMsgListItemHandler);
// Add current item to mapping
String addr;
if (!Sms.isOutgoingFolder(msgItem.mBoxId)) {
addr = msgItem.mAddress;
} else {
addr = MessageUtils.getLocalNumber();
}
HashSet<MessageListItem> set = mAddressToMessageListItems.get(addr);
if (set == null) {
set = new HashSet<MessageListItem>();
mAddressToMessageListItems.put(addr, set);
}
set.add(mli);
}
}
}
public interface OnDataSetChangedListener {
void onDataSetChanged(MessageListAdapter adapter);
void onContentChanged(MessageListAdapter adapter);
}
public void setOnDataSetChangedListener(OnDataSetChangedListener l) {
mOnDataSetChangedListener = l;
}
public void setMsgListItemHandler(Handler handler) {
mMsgListItemHandler = handler;
}
public void notifyImageLoaded(String address) {
HashSet<MessageListItem> set = mAddressToMessageListItems.get(address);
if (set != null) {
for (MessageListItem mli : set) {
mli.bind(mAvatarCache, mli.getMessageItem());
}
}
}
@Override
public void notifyDataSetChanged() {
super.notifyDataSetChanged();
if (LOCAL_LOGV) {
Log.v(TAG, "MessageListAdapter.notifyDataSetChanged().");
}
mListView.setSelection(mListView.getCount());
mMessageItemCache.clear();
if (mOnDataSetChangedListener != null) {
mOnDataSetChangedListener.onDataSetChanged(this);
}
}
@Override
protected void onContentChanged() {
if (getCursor() != null && !getCursor().isClosed()) {
if (mOnDataSetChangedListener != null) {
mOnDataSetChangedListener.onContentChanged(this);
}
}
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return mInflater.inflate(R.layout.message_list_item, parent, false);
}
public MessageItem getCachedMessageItem(String type, long msgId, Cursor c) {
MessageItem item = mMessageItemCache.get(getKey(type, msgId));
if (item == null && c != null && isCursorValid(c)) {
try {
item = new MessageItem(mContext, type, c, mColumnsMap, mHighlight);
mMessageItemCache.put(getKey(item.mType, item.mMsgId), item);
} catch (MmsException e) {
Log.e(TAG, "getCachedMessageItem: ", e);
}
}
return item;
}
private boolean isCursorValid(Cursor cursor) {
// Check whether the cursor is valid or not.
if (cursor.isClosed() || cursor.isBeforeFirst() || cursor.isAfterLast()) {
return false;
}
return true;
}
private static long getKey(String type, long id) {
if (type.equals("mms")) {
return -id;
} else {
return id;
}
}
public static class ColumnsMap {
public int mColumnMsgType;
public int mColumnMsgId;
public int mColumnSmsAddress;
public int mColumnSmsBody;
public int mColumnSmsDate;
public int mColumnSmsRead;
public int mColumnSmsType;
public int mColumnSmsStatus;
public int mColumnSmsLocked;
public int mColumnSmsErrorCode;
public int mColumnMmsSubject;
public int mColumnMmsSubjectCharset;
public int mColumnMmsDate;
public int mColumnMmsRead;
public int mColumnMmsMessageType;
public int mColumnMmsMessageBox;
public int mColumnMmsDeliveryReport;
public int mColumnMmsReadReport;
public int mColumnMmsErrorType;
public int mColumnMmsLocked;
public ColumnsMap() {
mColumnMsgType = COLUMN_MSG_TYPE;
mColumnMsgId = COLUMN_ID;
mColumnSmsAddress = COLUMN_SMS_ADDRESS;
mColumnSmsBody = COLUMN_SMS_BODY;
mColumnSmsDate = COLUMN_SMS_DATE;
mColumnSmsType = COLUMN_SMS_TYPE;
mColumnSmsStatus = COLUMN_SMS_STATUS;
mColumnSmsLocked = COLUMN_SMS_LOCKED;
mColumnSmsErrorCode = COLUMN_SMS_ERROR_CODE;
mColumnMmsSubject = COLUMN_MMS_SUBJECT;
mColumnMmsSubjectCharset = COLUMN_MMS_SUBJECT_CHARSET;
mColumnMmsMessageType = COLUMN_MMS_MESSAGE_TYPE;
mColumnMmsMessageBox = COLUMN_MMS_MESSAGE_BOX;
mColumnMmsDeliveryReport = COLUMN_MMS_DELIVERY_REPORT;
mColumnMmsReadReport = COLUMN_MMS_READ_REPORT;
mColumnMmsErrorType = COLUMN_MMS_ERROR_TYPE;
mColumnMmsLocked = COLUMN_MMS_LOCKED;
}
public ColumnsMap(Cursor cursor) {
// Ignore all 'not found' exceptions since the custom columns
// may be just a subset of the default columns.
try {
mColumnMsgType = cursor.getColumnIndexOrThrow(
MmsSms.TYPE_DISCRIMINATOR_COLUMN);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMsgId = cursor.getColumnIndexOrThrow(BaseColumns._ID);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsAddress = cursor.getColumnIndexOrThrow(Sms.ADDRESS);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsBody = cursor.getColumnIndexOrThrow(Sms.BODY);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsDate = cursor.getColumnIndexOrThrow(Sms.DATE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsType = cursor.getColumnIndexOrThrow(Sms.TYPE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsStatus = cursor.getColumnIndexOrThrow(Sms.STATUS);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsLocked = cursor.getColumnIndexOrThrow(Sms.LOCKED);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnSmsErrorCode = cursor.getColumnIndexOrThrow(Sms.ERROR_CODE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsSubject = cursor.getColumnIndexOrThrow(Mms.SUBJECT);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsSubjectCharset = cursor.getColumnIndexOrThrow(Mms.SUBJECT_CHARSET);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsMessageType = cursor.getColumnIndexOrThrow(Mms.MESSAGE_TYPE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsMessageBox = cursor.getColumnIndexOrThrow(Mms.MESSAGE_BOX);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsDeliveryReport = cursor.getColumnIndexOrThrow(Mms.DELIVERY_REPORT);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsReadReport = cursor.getColumnIndexOrThrow(Mms.READ_REPORT);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsErrorType = cursor.getColumnIndexOrThrow(PendingMessages.ERROR_TYPE);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
try {
mColumnMmsLocked = cursor.getColumnIndexOrThrow(Mms.LOCKED);
} catch (IllegalArgumentException e) {
Log.w("colsMap", e.getMessage());
}
}
}
private AvatarCache mAvatarCache;
/*
* Track avatars for each of the members of in the group chat.
*/
class AvatarCache {
private static final int TOKEN_PHONE_LOOKUP = 101;
private static final int TOKEN_EMAIL_LOOKUP = 102;
private static final int TOKEN_CONTACT_INFO = 201;
private static final int TOKEN_PHOTO_DATA = 301;
//Projection used for the summary info in the header.
private final String[] COLUMNS = new String[] {
Contacts._ID,
Contacts.PHOTO_ID,
// Other fields which we might want/need in the future (for example)
// Contacts.LOOKUP_KEY,
// Contacts.DISPLAY_NAME,
// Contacts.STARRED,
// Contacts.CONTACT_PRESENCE,
// Contacts.CONTACT_STATUS,
// Contacts.CONTACT_STATUS_TIMESTAMP,
// Contacts.CONTACT_STATUS_RES_PACKAGE,
// Contacts.CONTACT_STATUS_LABEL,
};
private final int PHOTO_ID = 1;
private final String[] PHONE_LOOKUP_PROJECTION = new String[] {
PhoneLookup._ID,
PhoneLookup.LOOKUP_KEY,
};
private static final int PHONE_LOOKUP_CONTACT_ID_COLUMN_INDEX = 0;
private static final int PHONE_LOOKUP_CONTACT_LOOKUP_KEY_COLUMN_INDEX = 1;
private final String[] EMAIL_LOOKUP_PROJECTION = new String[] {
RawContacts.CONTACT_ID,
Contacts.LOOKUP_KEY,
};
private static final int EMAIL_LOOKUP_CONTACT_ID_COLUMN_INDEX = 0;
private static final int EMAIL_LOOKUP_CONTACT_LOOKUP_KEY_COLUMN_INDEX = 1;
/*
* Map from mAddress to a blob of data which contains the contact id
* and the avatar.
*/
HashMap<String, ContactData> mImageCache = new HashMap<String, ContactData>();
public class ContactData {
private String mAddress;
private long mContactId;
private Uri mContactUri;
private Drawable mPhoto;
ContactData(String address) {
mAddress = address;
}
public Drawable getAvatar() {
return mPhoto;
}
public Uri getContactUri() {
return mContactUri;
}
private boolean startInitialQuery() {
if (Mms.isPhoneNumber(mAddress)) {
mQueryHandler.startQuery(
TOKEN_PHONE_LOOKUP,
this,
Uri.withAppendedPath(PhoneLookup.CONTENT_FILTER_URI, Uri.encode(mAddress)),
PHONE_LOOKUP_PROJECTION,
null,
null,
null);
return true;
} else if (Mms.isEmailAddress(mAddress)) {
mQueryHandler.startQuery(
TOKEN_EMAIL_LOOKUP,
this,
Uri.withAppendedPath(Email.CONTENT_LOOKUP_URI, Uri.encode(mAddress)),
EMAIL_LOOKUP_PROJECTION,
null,
null,
null);
return true;
} else {
return false;
}
}
/*
* Once we have the photo data load it into a drawable.
*/
private boolean onPhotoDataLoaded(Cursor c) {
if (c == null || !c.moveToFirst()) return false;
try {
byte[] photoData = c.getBlob(0);
Bitmap b = BitmapFactory.decodeByteArray(photoData, 0, photoData.length, null);
mPhoto = new BitmapDrawable(mContext.getResources(), b);
return true;
} catch (Exception ex) {
return false;
}
}
/*
* Once we have the contact info loaded take the photo id and query
* for the photo data.
*/
private boolean onContactInfoLoaded(Cursor c) {
if (c == null || !c.moveToFirst()) return false;
long photoId = c.getLong(PHOTO_ID);
Uri contactUri = ContentUris.withAppendedId(Data.CONTENT_URI, photoId);
mQueryHandler.startQuery(
TOKEN_PHOTO_DATA,
this,
contactUri,
new String[] { Photo.PHOTO },
null,
null,
null);
return true;
}
/*
* Once we have the contact id loaded start the query for the
* contact information (which will give us the photo id).
*/
private boolean onContactIdLoaded(Cursor c, int contactIdColumn, int lookupKeyColumn) {
if (c == null || !c.moveToFirst()) return false;
mContactId = c.getLong(contactIdColumn);
String lookupKey = c.getString(lookupKeyColumn);
mContactUri = Contacts.getLookupUri(mContactId, lookupKey);
mQueryHandler.startQuery(
TOKEN_CONTACT_INFO,
this,
mContactUri,
COLUMNS,
null,
null,
null);
return true;
}
/*
* If for whatever reason we can't get the photo load teh
* default avatar. NOTE that fasttrack tries to get fancy
* with various random images (upside down, etc.) we're not
* doing that here.
*/
private void loadDefaultAvatar() {
try {
if (mDefaultAvatarDrawable == null) {
Bitmap b = BitmapFactory.decodeResource(mContext.getResources(),
R.drawable.ic_contact_picture);
mDefaultAvatarDrawable = new BitmapDrawable(mContext.getResources(), b);
}
mPhoto = mDefaultAvatarDrawable;
} catch (java.lang.OutOfMemoryError e) {
Log.e(TAG, "loadDefaultAvatar: out of memory: ", e);
}
}
};
Drawable mDefaultAvatarDrawable = null;
AsyncQueryHandler mQueryHandler = new AsyncQueryHandler(mContext.getContentResolver()) {
@Override
protected void onQueryComplete(int token, Object cookieObject, Cursor cursor) {
super.onQueryComplete(token, cookieObject, cursor);
ContactData cookie = (ContactData) cookieObject;
switch (token) {
case TOKEN_PHONE_LOOKUP: {
if (!cookie.onContactIdLoaded(
cursor,
PHONE_LOOKUP_CONTACT_ID_COLUMN_INDEX,
PHONE_LOOKUP_CONTACT_LOOKUP_KEY_COLUMN_INDEX)) {
cookie.loadDefaultAvatar();
}
break;
}
case TOKEN_EMAIL_LOOKUP: {
if (!cookie.onContactIdLoaded(
cursor,
EMAIL_LOOKUP_CONTACT_ID_COLUMN_INDEX,
EMAIL_LOOKUP_CONTACT_LOOKUP_KEY_COLUMN_INDEX)) {
cookie.loadDefaultAvatar();
}
break;
}
case TOKEN_CONTACT_INFO: {
if (!cookie.onContactInfoLoaded(cursor)) {
cookie.loadDefaultAvatar();
}
break;
}
case TOKEN_PHOTO_DATA: {
if (!cookie.onPhotoDataLoaded(cursor)) {
cookie.loadDefaultAvatar();
} else {
MessageListAdapter.this.notifyImageLoaded(cookie.mAddress);
}
break;
}
default:
break;
}
}
};
public ContactData get(final String address) {
if (mImageCache.containsKey(address)) {
return mImageCache.get(address);
} else {
// Create the ContactData object and put it into the hashtable
// so that any subsequent requests for this same avatar do not kick
// off another query.
ContactData cookie = new ContactData(address);
mImageCache.put(address, cookie);
cookie.startInitialQuery();
cookie.loadDefaultAvatar();
return cookie;
}
}
public AvatarCache() {
}
};
}
| Adopt LruCache in MMS.
Change-Id: I644e0928f9e93d885713c037a5beef067cf5c2d3
http://b/3184897
| src/com/android/mms/ui/MessageListAdapter.java | Adopt LruCache in MMS. |
|
Java | apache-2.0 | 17537496e5b5c7fb6218648609d84760e1c3bade | 0 | babble/babble,babble/babble,babble/babble,babble/babble,babble/babble,babble/babble | // JxpServlet.java
package ed.appserver.jxp;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import ed.js.*;
import ed.util.*;
import ed.js.engine.*;
import ed.js.func.*;
import ed.appserver.*;
import ed.net.httpserver.*;
public class JxpServlet {
JxpServlet( AppContext context , JxpSource source , JSFunction func ){
_context = context;
_source = source;
_theFunction = func;
}
public void handle( HttpRequest request , HttpResponse response , AppRequest ar ){
final Scope scope = ar.getScope();
if ( scope.get( "request" ) == null )
scope.put( "request" , request , true );
if ( scope.get( "response" ) == null )
scope.put( "response" , response , true );
MyWriter writer = new MyWriter( response.getWriter() , getStaticPrefix( request , ar ) , ar.getContext() , ar);
scope.put( "print" , writer , true );
try {
_theFunction.call( scope );
if ( writer._writer.hasSpot() ){
writer._writer.backToSpot();
if ( ar.getContext() != null )
for ( Object foo : ar.getContext().getGlobalHead() ) {
writer.print( foo.toString() );
writer.print( "\n" );
}
if ( ar != null )
for ( Object foo : ar.getHeadToPrint() ) {
writer.print( foo.toString() );
writer.print( "\n" );
}
writer._writer.backToEnd();
}
else {
if ( ( ar.getContext() != null && ar.getContext().getGlobalHead().size() > 0 ) ||
( ar != null && ar.getHeadToPrint().size() > 0 ) ){
// this is interesting
// maybe i should do it only for html files
// so i have to know that
//throw new RuntimeException( "have head to print but no </head>" );
}
}
}
catch ( RuntimeException re ){
_source.fix( re );
_context.fix( re );
throw re;
}
}
String getStaticPrefix( HttpRequest request , AppRequest ar ){
String host = request.getHost();
if ( host == null )
return "";
if ( host.indexOf( "." ) < 0 )
return "";
if ( request.getPort() > 0 )
return "";
String prefix= "http://static";
if ( host.indexOf( "local." ) >= 0 )
prefix += "-local";
prefix += ".10gen.com/" + host;
return prefix;
}
public static class MyWriter extends JSFunctionCalls1 {
public MyWriter( JxpWriter writer , String cdnPrefix , AppContext context , AppRequest ar ){
_writer = writer;
_cdnPrefix = cdnPrefix;
_context = context;
_request = ar;
set( "setFormObject" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
if ( o == null ){
_formInput = null;
return null;
}
if ( ! ( o instanceof JSObject ) )
throw new RuntimeException( "must be a JSObject" );
_formInput = (JSObject)o;
_formInputPrefix = null;
if ( extra != null && extra.length > 0 )
_formInputPrefix = extra[0].toString();
return o;
}
} );
}
public Object call( Scope scope , Object o , Object extra[] ){
if ( o == null )
print( "null" );
else
print( o.toString() );
return null;
}
public void print( String s ){
//System.out.println( "***\n" + s + "\n---" );
if ( _extra.length() > 0 ){
_extra.append( s );
s = _extra.toString();
_extra.setLength( 0 );
}
_matcher.reset( s );
if ( ! _matcher.find() ){
_writer.print( s );
return;
}
_writer.print( s.substring( 0 , _matcher.start() ) );
s = s.substring( _matcher.start() );
int end = endOfTag( s );
if ( end == -1 ){
_extra.append( s );
return;
}
String wholeTag = s.substring( 0 , end + 1 );
if ( ! printTag( _matcher.group(1) , wholeTag ) )
_writer.print( wholeTag );
print( s.substring( end + 1 ) );
}
/**
* @return true if i printed tag so you should not
*/
boolean printTag( String tag , String s ){
if ( tag.equalsIgnoreCase( "/head" ) && ! _writer.hasSpot() ){
_writer.saveSpot();
return false;
}
{ // CDN stuff
String srcName = null;
if ( tag.equalsIgnoreCase( "img" ) ||
tag.equalsIgnoreCase( "script" ) )
srcName = "src";
else if ( tag.equalsIgnoreCase( "link" ) )
srcName = "href";
if ( srcName != null ){
s = s.substring( 2 + tag.length() );
// TODO: cache pattern or something
Matcher m = Pattern.compile( srcName + " *= *['\"](.+?)['\"]" , Pattern.CASE_INSENSITIVE ).matcher( s );
if ( ! m.find() )
return false;
_writer.print( "<" );
_writer.print( tag );
_writer.print( " " );
_writer.print( s.substring( 0 , m.start(1) ) );
String src = m.group(1);
printSRC( src );
_writer.print( s.substring( m.end(1) ) );
return true;
}
}
if ( _formInput != null && tag.equalsIgnoreCase( "input" ) ){
Matcher m = Pattern.compile( "\\bname *= *['\"](.+?)[\"']" ).matcher( s );
if ( ! m.find() )
return false;
String name = m.group(1);
if ( name.length() == 0 )
return false;
if ( _formInputPrefix != null )
name = name.substring( _formInputPrefix.length() );
Object val = _formInput.get( name );
if ( val == null )
return false;
if ( s.toString().matches( "value *=" ) )
return false;
_writer.print( s.substring( 0 , s.length() - 1 ) );
_writer.print( " value=\"" );
_writer.print( val.toString() );
_writer.print( "\" >" );
return true;
}
return false;
}
void printSRC( String src ){
if ( src == null || src.length() == 0 )
return;
if ( src.startsWith( "NOCDN/" ) ){
_writer.print( src.substring( 5 ) );
return;
}
if ( src.startsWith( "CDN/" ) ){
_writer.print( _cdnPrefix );
_writer.print( src.substring( 3 ) );
return;
}
if ( ! src.startsWith( "/" ) ){
_writer.print( src );
return;
}
String uri = src;
int questionIndex = src.indexOf( "?" );
if ( questionIndex >= 0 )
uri = uri.substring( 0 , questionIndex );
if ( _context != null ){
String cdnTags = null;
if ( uri.equals( "/~f" ) ){
cdnTags = ""; // TODO: should i put a version or timestamp here?
}
else {
File f = _context.getFile( uri );
if ( f.exists() ){
cdnTags = "lm=" + f.lastModified();
}
}
if ( cdnTags != null )
_writer.print( _cdnPrefix );
_writer.print( src );
if ( cdnTags != null && cdnTags.length() > 0 ){
if ( questionIndex < 0 )
_writer.print( "?" );
else
_writer.print( "&" );
_writer.print( cdnTags );
}
return;
}
_writer.print( src );
}
int endOfTag( String s ){
for ( int i=0; i<s.length(); i++ ){
char c = s.charAt( i );
if ( c == '>' )
return i;
if ( c == '"' || c == '\'' ){
for ( ; i<s.length(); i++)
if ( c == s.charAt( i ) )
break;
}
}
return -1;
}
static final Pattern _tagPattern = Pattern.compile( "<(/?\\w+)[ >]" , Pattern.CASE_INSENSITIVE );
final Matcher _matcher = _tagPattern.matcher("");
final StringBuilder _extra = new StringBuilder();
final JxpWriter _writer;
final String _cdnPrefix;
final AppContext _context;
final AppRequest _request;
JSObject _formInput = null;
String _formInputPrefix = null;
}
final AppContext _context;
final JxpSource _source;
final JSFunction _theFunction;
}
| src/main/ed/appserver/jxp/JxpServlet.java | // JxpServlet.java
package ed.appserver.jxp;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import ed.js.*;
import ed.util.*;
import ed.js.engine.*;
import ed.js.func.*;
import ed.appserver.*;
import ed.net.httpserver.*;
public class JxpServlet {
JxpServlet( AppContext context , JxpSource source , JSFunction func ){
_context = context;
_source = source;
_theFunction = func;
}
public void handle( HttpRequest request , HttpResponse response , AppRequest ar ){
final Scope scope = ar.getScope();
if ( scope.get( "request" ) == null )
scope.put( "request" , request , true );
if ( scope.get( "response" ) == null )
scope.put( "response" , response , true );
MyWriter writer = new MyWriter( response.getWriter() , getStaticPrefix( request , ar ) , ar.getContext() , ar);
scope.put( "print" , writer , true );
try {
_theFunction.call( scope );
if ( writer._writer.hasSpot() ){
writer._writer.backToSpot();
if ( ar.getContext() != null )
for ( Object foo : ar.getContext().getGlobalHead() )
writer.print( foo.toString() );
if ( ar != null )
for ( Object foo : ar.getHeadToPrint() )
writer.print( foo.toString() );
writer._writer.backToEnd();
}
else {
if ( ( ar.getContext() != null && ar.getContext().getGlobalHead().size() > 0 ) ||
( ar != null && ar.getHeadToPrint().size() > 0 ) ){
// this is interesting
// maybe i should do it only for html files
// so i have to know that
//throw new RuntimeException( "have head to print but no </head>" );
}
}
}
catch ( RuntimeException re ){
_source.fix( re );
_context.fix( re );
throw re;
}
}
String getStaticPrefix( HttpRequest request , AppRequest ar ){
String host = request.getHost();
if ( host == null )
return "";
if ( host.indexOf( "." ) < 0 )
return "";
if ( request.getPort() > 0 )
return "";
String prefix= "http://static";
if ( host.indexOf( "local." ) >= 0 )
prefix += "-local";
prefix += ".10gen.com/" + host;
return prefix;
}
public static class MyWriter extends JSFunctionCalls1 {
public MyWriter( JxpWriter writer , String cdnPrefix , AppContext context , AppRequest ar ){
_writer = writer;
_cdnPrefix = cdnPrefix;
_context = context;
_request = ar;
set( "setFormObject" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
if ( o == null ){
_formInput = null;
return null;
}
if ( ! ( o instanceof JSObject ) )
throw new RuntimeException( "must be a JSObject" );
_formInput = (JSObject)o;
_formInputPrefix = null;
if ( extra != null && extra.length > 0 )
_formInputPrefix = extra[0].toString();
return o;
}
} );
}
public Object call( Scope scope , Object o , Object extra[] ){
if ( o == null )
print( "null" );
else
print( o.toString() );
return null;
}
public void print( String s ){
//System.out.println( "***\n" + s + "\n---" );
if ( _extra.length() > 0 ){
_extra.append( s );
s = _extra.toString();
_extra.setLength( 0 );
}
_matcher.reset( s );
if ( ! _matcher.find() ){
_writer.print( s );
return;
}
_writer.print( s.substring( 0 , _matcher.start() ) );
s = s.substring( _matcher.start() );
int end = endOfTag( s );
if ( end == -1 ){
_extra.append( s );
return;
}
String wholeTag = s.substring( 0 , end + 1 );
if ( ! printTag( _matcher.group(1) , wholeTag ) )
_writer.print( wholeTag );
print( s.substring( end + 1 ) );
}
/**
* @return true if i printed tag so you should not
*/
boolean printTag( String tag , String s ){
if ( tag.equalsIgnoreCase( "/head" ) && ! _writer.hasSpot() ){
_writer.saveSpot();
return false;
}
{ // CDN stuff
String srcName = null;
if ( tag.equalsIgnoreCase( "img" ) ||
tag.equalsIgnoreCase( "script" ) )
srcName = "src";
else if ( tag.equalsIgnoreCase( "link" ) )
srcName = "href";
if ( srcName != null ){
s = s.substring( 2 + tag.length() );
// TODO: cache pattern or something
Matcher m = Pattern.compile( srcName + " *= *['\"](.+?)['\"]" , Pattern.CASE_INSENSITIVE ).matcher( s );
if ( ! m.find() )
return false;
_writer.print( "<" );
_writer.print( tag );
_writer.print( " " );
_writer.print( s.substring( 0 , m.start(1) ) );
String src = m.group(1);
printSRC( src );
_writer.print( s.substring( m.end(1) ) );
return true;
}
}
if ( _formInput != null && tag.equalsIgnoreCase( "input" ) ){
Matcher m = Pattern.compile( "\\bname *= *['\"](.+?)[\"']" ).matcher( s );
if ( ! m.find() )
return false;
String name = m.group(1);
if ( name.length() == 0 )
return false;
if ( _formInputPrefix != null )
name = name.substring( _formInputPrefix.length() );
Object val = _formInput.get( name );
if ( val == null )
return false;
if ( s.toString().matches( "value *=" ) )
return false;
_writer.print( s.substring( 0 , s.length() - 1 ) );
_writer.print( " value=\"" );
_writer.print( val.toString() );
_writer.print( "\" >" );
return true;
}
return false;
}
void printSRC( String src ){
if ( src == null || src.length() == 0 )
return;
if ( src.startsWith( "NOCDN/" ) ){
_writer.print( src.substring( 5 ) );
return;
}
if ( src.startsWith( "CDN/" ) ){
_writer.print( _cdnPrefix );
_writer.print( src.substring( 3 ) );
return;
}
if ( ! src.startsWith( "/" ) ){
_writer.print( src );
return;
}
String uri = src;
int questionIndex = src.indexOf( "?" );
if ( questionIndex >= 0 )
uri = uri.substring( 0 , questionIndex );
if ( _context != null ){
String cdnTags = null;
if ( uri.equals( "/~f" ) ){
cdnTags = ""; // TODO: should i put a version or timestamp here?
}
else {
File f = _context.getFile( uri );
if ( f.exists() ){
cdnTags = "lm=" + f.lastModified();
}
}
if ( cdnTags != null )
_writer.print( _cdnPrefix );
_writer.print( src );
if ( cdnTags != null && cdnTags.length() > 0 ){
if ( questionIndex < 0 )
_writer.print( "?" );
else
_writer.print( "&" );
_writer.print( cdnTags );
}
return;
}
_writer.print( src );
}
int endOfTag( String s ){
for ( int i=0; i<s.length(); i++ ){
char c = s.charAt( i );
if ( c == '>' )
return i;
if ( c == '"' || c == '\'' ){
for ( ; i<s.length(); i++)
if ( c == s.charAt( i ) )
break;
}
}
return -1;
}
static final Pattern _tagPattern = Pattern.compile( "<(/?\\w+)[ >]" , Pattern.CASE_INSENSITIVE );
final Matcher _matcher = _tagPattern.matcher("");
final StringBuilder _extra = new StringBuilder();
final JxpWriter _writer;
final String _cdnPrefix;
final AppContext _context;
final AppRequest _request;
JSObject _formInput = null;
String _formInputPrefix = null;
}
final AppContext _context;
final JxpSource _source;
final JSFunction _theFunction;
}
| added new lines to head.push() jxp method
| src/main/ed/appserver/jxp/JxpServlet.java | added new lines to head.push() jxp method |
|
Java | apache-2.0 | 57b6254a1a113e889c93cb76ab408d6d9c8b83aa | 0 | StoryMaker/storypath,scalio/storypath,scalio/storypath,n8fr8/storypath,n8fr8/storypath,scalio/storypath,n8fr8/storypath,StoryMaker/storypath,StoryMaker/storypath | package scal.io.liger;
import android.content.Context;
import android.content.res.AssetManager;
import android.os.Environment;
import android.util.Log;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Date;
import scal.io.liger.model.StoryPath;
import scal.io.liger.model.StoryPathLibrary;
/**
* Created by mnbogner on 7/14/14.
*/
public class JsonHelper {
private static final String TAG = "JsonHelper";
private static final String LIGER_DIR = "Liger";
private static File selectedJSONFile = null;
private static String selectedJSONPath = null;
private static ArrayList<File> jsonFileList = null;
private static ArrayList<String> jsonPathList = null;
private static String sdLigerFilePath = null;
private static String language = null; // TEMP
public static String loadJSONFromPath(String jsonPath) {
String jsonString = "";
String sdCardState = Environment.getExternalStorageState();
String localizedFilePath = jsonPath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonPath.lastIndexOf("-" + language + jsonPath.substring(jsonPath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonPath.substring(0, jsonPath.lastIndexOf(".")) + "-" + language + jsonPath.substring(jsonPath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadJSONFromPath() - LOCALIZED PATH: " + localizedFilePath);
}
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
File jsonFile = new File(jsonPath);
InputStream jsonStream = new FileInputStream(jsonFile);
File localizedFile = new File(localizedFilePath);
// if there is a file at the localized path, use that instead
if ((localizedFile.exists()) && (!jsonPath.equals(localizedFilePath))) {
Log.d("LANGUAGE", "loadJSONFromPath() - USING LOCALIZED FILE: " + localizedFilePath);
jsonStream = new FileInputStream(localizedFile);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
jsonString = new String(buffer);
} catch (IOException e) {
Log.e(TAG, "READING JSON FILE FROM SD CARD FAILED: " + e.getMessage());
}
} else {
System.err.println("SD CARD NOT FOUND");
}
return jsonString;
}
public static String loadJSON() {
return loadJSON(selectedJSONFile);
}
public static String loadJSON(File file) {
if(null == file) {
return null;
}
String jsonString = "";
String sdCardState = Environment.getExternalStorageState();
String localizedFilePath = file.getPath();
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (file.getPath().lastIndexOf("-" + language + file.getPath().substring(file.getPath().lastIndexOf("."))) < 0) {
localizedFilePath = file.getPath().substring(0, file.getPath().lastIndexOf(".")) + "-" + language + file.getPath().substring(file.getPath().lastIndexOf("."));
}
Log.d("LANGUAGE", "loadJSON() - LOCALIZED PATH: " + localizedFilePath);
}
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
InputStream jsonStream = new FileInputStream(file);
File localizedFile = new File(localizedFilePath);
// if there is a file at the localized path, use that instead
if ((localizedFile.exists()) && (!file.getPath().equals(localizedFilePath))) {
Log.d("LANGUAGE", "loadJSON() - USING LOCALIZED FILE: " + localizedFilePath);
jsonStream = new FileInputStream(localizedFile);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
jsonString = new String(buffer);
} catch (IOException e) {
Log.e(TAG, "READING JSON FILE FRON SD CARD FAILED: " + e.getMessage());
}
} else {
Log.e(TAG, "SD CARD NOT FOUND");
}
return jsonString;
}
// NEW
public static String loadJSONFromZip(Context context) {
return loadJSONFromZip(selectedJSONPath, context);
}
public static String loadJSONFromZip(String jsonFilePath, Context context) {
Log.d(" *** TESTING *** ", "NEW METHOD loadJSONFromZip CALLED FOR " + jsonFilePath);
if(null == jsonFilePath) {
return null;
}
String jsonString = "";
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadJSONFromZip() - LOCALIZED PATH: " + localizedFilePath);
}
// removed sd card check as expansion file should not be located on sd card
try {
InputStream jsonStream = ZipHelper.getFileInputStream(localizedFilePath, context);
// if there is no result with the localized path, retry with default path
if ((jsonStream == null) && (!jsonFilePath.equals(localizedFilePath))) {
jsonStream = ZipHelper.getFileInputStream(jsonFilePath, context);
} else {
Log.d("LANGUAGE", "loadJSONFromZip() - USING LOCALIZED FILE: " + localizedFilePath);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
jsonString = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from ZIP file failed: " + ioe.getMessage());
}
return jsonString;
}
public static String getSdLigerFilePath() {
return sdLigerFilePath;
}
private static void copyFilesToSdCard(Context context, String basePath) {
copyFileOrDir(context, basePath, ""); // copy all files in assets folder in my project
}
private static void copyFileOrDir(Context context, String basePath, String path) {
AssetManager assetManager = context.getAssets();
String assets[] = null;
try {
Log.i("tag", "copyFileOrDir() "+path);
assets = assetManager.list(path);
if (assets.length == 0) {
copyFile(context, basePath, path);
} else {
String fullPath = basePath + path;
Log.i("tag", "path="+fullPath);
File dir = new File(fullPath);
if (!dir.exists() && !path.startsWith("images") && !path.startsWith("sounds") && !path.startsWith("webkit"))
if (!dir.mkdirs())
Log.i("tag", "could not create dir "+fullPath);
for (int i = 0; i < assets.length; ++i) {
String p;
if (path.equals(""))
p = "";
else
p = path + "/";
if (!path.startsWith("images") && !path.startsWith("sounds") && !path.startsWith("webkit"))
copyFileOrDir(context, basePath, p + assets[i]);
}
}
} catch (IOException ex) {
Log.e("tag", "I/O Exception", ex);
}
}
private static void copyFile(Context context, String basePath, String filename) {
AssetManager assetManager = context.getAssets();
InputStream in = null;
OutputStream out = null;
String newFileName = null;
try {
Log.i("tag", "copyFile() "+filename);
in = assetManager.open(filename);
if (filename.endsWith(".jpg")) // extension was added to avoid compression on APK file
newFileName = basePath + filename.substring(0, filename.length()-4);
else
newFileName = basePath + filename;
out = new FileOutputStream(newFileName);
byte[] buffer = new byte[1024];
int read;
while ((read = in.read(buffer)) != -1) {
out.write(buffer, 0, read);
}
in.close();
in = null;
out.flush();
out.close();
out = null;
} catch (Exception e) {
Log.e("tag", "Exception in copyFile() of "+newFileName);
Log.e("tag", "Exception in copyFile() "+e.toString());
}
}
public static void setupFileStructure(Context context) {
String sdCardState = Environment.getExternalStorageState();
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
String sdCardFolderPath = Environment.getExternalStorageDirectory().getPath();
sdLigerFilePath = sdCardFolderPath + File.separator + LIGER_DIR + File.separator;
// based on http://stackoverflow.com/questions/4447477/android-how-to-copy-files-from-assets-folder-to-sdcard/8366081#8366081
copyFilesToSdCard(context, sdLigerFilePath);
} else {
Log.e(TAG, "SD CARD NOT FOUND");
}
}
public static String[] getJSONFileList() {
//ensure path has been set
if(null == sdLigerFilePath) {
return null;
}
ArrayList<String> jsonFileNamesList = new ArrayList<String>();
jsonFileList = new ArrayList<File>();
jsonPathList = new ArrayList<String>();
// HARD CODING LIST
File ligerFile_1 = new File(sdLigerFilePath + "/default/default_library/default_library.json");
File ligerFile_2 = new File(sdLigerFilePath + "/default/learning_guide_TEST.json");
File ligerFile_3 = new File(sdLigerFilePath + "/default/LIB_1/LIB_1_TEST.json");
File ligerFile_4 = new File(sdLigerFilePath + "/default/LIB_2/LIB_2_TEST.json");
File ligerFile_5 = new File(sdLigerFilePath + "/default/learning_guide_library.json");
File ligerFile_6 = new File(sdLigerFilePath + "/default/learning_guide_library_SAVE.json");
jsonFileNamesList.add(ligerFile_1.getName());
jsonFileNamesList.add(ligerFile_2.getName());
jsonFileNamesList.add(ligerFile_3.getName());
jsonFileNamesList.add(ligerFile_4.getName());
jsonFileNamesList.add(ligerFile_5.getName());
jsonFileNamesList.add(ligerFile_6.getName());
jsonFileList.add(ligerFile_1);
jsonFileList.add(ligerFile_2);
jsonFileList.add(ligerFile_3);
jsonFileList.add(ligerFile_4);
jsonFileList.add(ligerFile_5);
jsonFileList.add(ligerFile_6);
jsonPathList.add("default/default_library/default_library.json");
jsonPathList.add("default/learning_guide_TEST.json");
jsonPathList.add("default/LIB_1/LIB_1_TEST.json");
jsonPathList.add("default/LIB_2/LIB_2_TEST.json");
jsonPathList.add("default/learning_guide_library.json");
jsonPathList.add("default/learning_guide_library_SAVE.json");
/*
File ligerDir = new File(sdLigerFilePath);
if (ligerDir != null) {
for (File file : ligerDir.listFiles()) {
if (file.getName().endsWith(".json")) {
jsonFileNamesList.add(file.getName());
jsonFileList.add(file);
}
}
}
File defaultLigerDir = new File(sdLigerFilePath + "/default/");
if (defaultLigerDir != null) {
for (File file : defaultLigerDir.listFiles()) {
if (file.getName().endsWith(".json")) {
jsonFileNamesList.add(file.getName());
jsonFileList.add(file);
}
}
}
*/
return jsonFileNamesList.toArray(new String[jsonFileNamesList.size()]);
}
public static File setSelectedJSONFile(int index) {
selectedJSONFile = jsonFileList.get(index);
return selectedJSONFile;
}
public static String setSelectedJSONPath(int index) {
selectedJSONPath = jsonPathList.get(index);
return selectedJSONPath;
}
private static void addFileToSDCard(InputStream jsonInputStream, String filePath) {
OutputStream outputStream = null;
try {
// write the inputStream to a FileOutputStream
outputStream = new FileOutputStream(new File(sdLigerFilePath + filePath));
int read = 0;
byte[] bytes = new byte[1024];
while ((read = jsonInputStream.read(bytes)) != -1) {
outputStream.write(bytes, 0, read);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (jsonInputStream != null) {
try {
jsonInputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public static StoryPathLibrary loadStoryPathLibrary(String jsonFilePath, Context context) {
//Log.d(" *** TESTING *** ", "NEW METHOD loadStoryPathLibrary CALLED FOR " + jsonFilePath);
String storyPathLibraryJson = "";
String sdCardState = Environment.getExternalStorageState();
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadStoryPathLibrary() - LOCALIZED PATH: " + localizedFilePath);
}
File f = new File(localizedFilePath);
if ((!f.exists()) && (!localizedFilePath.equals(jsonFilePath))) {
f = new File(jsonFilePath);
} else {
Log.d("LANGUAGE", "loadStoryPathLibrary() - USING LOCALIZED FILE: " + localizedFilePath);
}
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
InputStream jsonStream = new FileInputStream(f);
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
storyPathLibraryJson = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from SD card failed: " + ioe.getMessage());
return null;
}
} else {
Log.e(TAG, "SD card not found");
return null;
}
return deserializeStoryPathLibrary(storyPathLibraryJson, f.getPath(), context);
}
// NEW
public static StoryPathLibrary loadStoryPathLibraryFromZip(String jsonFilePath, Context context) {
Log.d(" *** TESTING *** ", "NEW METHOD loadStoryPathLibraryFromZip CALLED FOR " + jsonFilePath);
String storyPathLibraryJson = "";
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadStoryPathLibraryFromZip() - LOCALIZED PATH: " + localizedFilePath);
}
// removed sd card check as expansion file should not be located on sd card
try {
InputStream jsonStream = ZipHelper.getFileInputStream(localizedFilePath, context);
// if there is no result with the localized path, retry with default path
if ((jsonStream == null) && (!localizedFilePath.equals(jsonFilePath))) {
jsonStream = ZipHelper.getFileInputStream(jsonFilePath, context);
} else {
Log.d("LANGUAGE", "loadStoryPathLibraryFromZip() - USING LOCALIZED FILE: " + localizedFilePath);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
storyPathLibraryJson = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from ZIP file failed: " + ioe.getMessage());
return null;
}
return deserializeStoryPathLibrary(storyPathLibraryJson, jsonFilePath, context);
}
public static StoryPathLibrary deserializeStoryPathLibrary(String storyPathLibraryJson, String jsonFilePath, Context context) {
//Log.d(" *** TESTING *** ", "NEW METHOD deserializeStoryPathLibrary CALLED FOR " + jsonFilePath);
GsonBuilder gBuild = new GsonBuilder();
gBuild.registerTypeAdapter(StoryPathLibrary.class, new StoryPathLibraryDeserializer());
Gson gson = gBuild.excludeFieldsWithoutExposeAnnotation().create();
StoryPathLibrary storyPathLibrary = gson.fromJson(storyPathLibraryJson, StoryPathLibrary.class);
// a story path library model must have a file location to manage relative paths
// if it is loaded from a saved state, the location should already be set
if ((jsonFilePath == null) || (jsonFilePath.length() == 0)) {
if ((storyPathLibrary.getFileLocation() == null) || (storyPathLibrary.getFileLocation().length() == 0)) {
Log.e(TAG, "file location for story path library " + storyPathLibrary.getId() + " could not be determined");
return null;
}
} else {
storyPathLibrary.setFileLocation(jsonFilePath);
}
storyPathLibrary.setCardReferences();
storyPathLibrary.initializeObservers();
storyPathLibrary.setContext(context);
return storyPathLibrary;
}
public static String saveStoryPathLibrary(StoryPathLibrary storyPathLibrary) {
//Log.d(" *** TESTING *** ", "NEW METHOD saveStoryPathLibrary CALLED FOR " + storyPathLibrary.getId());
Date timeStamp = new Date();
String jsonFilePath = storyPathLibrary.buildPath(storyPathLibrary.getId() + "_" + timeStamp.getTime() + ".json");
String sdCardState = Environment.getExternalStorageState();
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
File storyPathLibraryFile = new File(jsonFilePath);
FileOutputStream storyPathLibraryStream = new FileOutputStream(storyPathLibraryFile);
if (!storyPathLibraryFile.exists()) {
storyPathLibraryFile.createNewFile();
}
String storyPathLibraryJson = serializeStoryPathLibrary(storyPathLibrary);
byte storyPathLibraryData[] = storyPathLibraryJson.getBytes();
storyPathLibraryStream.write(storyPathLibraryData);
storyPathLibraryStream.flush();
storyPathLibraryStream.close();
} catch (IOException ioe) {
Log.e(TAG, "writing json file " + jsonFilePath + " to SD card failed: " + ioe.getMessage());
return null;
}
} else {
Log.e(TAG, "SD card not found");
return null;
}
// update file location
storyPathLibrary.setFileLocation(jsonFilePath);
return jsonFilePath;
}
public static String serializeStoryPathLibrary(StoryPathLibrary storyPathLibrary) {
//Log.d(" *** TESTING *** ", "NEW METHOD serializeStoryPathLibrary CALLED FOR " + storyPathLibrary.getId());
GsonBuilder gBuild = new GsonBuilder();
Gson gson = gBuild.excludeFieldsWithoutExposeAnnotation().create();
String storyPathLibraryJson = gson.toJson(storyPathLibrary);
return storyPathLibraryJson;
}
public static StoryPath loadStoryPath(String jsonFilePath, StoryPathLibrary storyPathLibrary, Context context) {
//Log.d(" *** TESTING *** ", "NEW METHOD loadStoryPath CALLED FOR " + jsonFilePath);
String storyPathJson = "";
String sdCardState = Environment.getExternalStorageState();
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadStoryPath() - LOCALIZED PATH: " + localizedFilePath);
}
File f = new File(localizedFilePath);
if ((!f.exists()) && (!localizedFilePath.equals(jsonFilePath))) {
f = new File(jsonFilePath);
} else {
Log.d("LANGUAGE", "loadStoryPath() - USING LOCALIZED FILE: " + localizedFilePath);
}
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
InputStream jsonStream = new FileInputStream(f);
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
storyPathJson = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from SD card failed: " + ioe.getMessage());
return null;
}
} else {
Log.e(TAG, "SD card not found");
return null;
}
return deserializeStoryPath(storyPathJson, f.getPath(), storyPathLibrary, context);
}
// NEW
public static StoryPath loadStoryPathFromZip(String jsonFilePath, StoryPathLibrary storyPathLibrary, Context context) {
Log.d(" *** TESTING *** ", "NEW METHOD loadStoryPathFromZip CALLED FOR " + jsonFilePath);
String storyPathJson = "";
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadStoryPathFromZip() - LOCALIZED PATH: " + localizedFilePath);
}
// removed sd card check as expansion file should not be located on sd card
try {
InputStream jsonStream = ZipHelper.getFileInputStream(localizedFilePath, context);
// if there is no result with the localized path, retry with default path
if ((jsonStream == null) && (!localizedFilePath.equals(jsonFilePath))) {
jsonStream = ZipHelper.getFileInputStream(jsonFilePath, context);
} else {
Log.d("LANGUAGE", "loadStoryPathFromZip() - USING LOCALIZED FILE: " + localizedFilePath);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
storyPathJson = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from ZIP file failed: " + ioe.getMessage());
return null;
}
return deserializeStoryPath(storyPathJson, jsonFilePath, storyPathLibrary, context);
}
public static StoryPath deserializeStoryPath(String storyPathJson, String jsonFilePath, StoryPathLibrary storyPathLibrary, Context context) {
//Log.d(" *** TESTING *** ", "NEW METHOD deserializeStoryPath CALLED FOR " + jsonFilePath);
GsonBuilder gBuild = new GsonBuilder();
gBuild.registerTypeAdapter(StoryPath.class, new StoryPathDeserializer());
Gson gson = gBuild.excludeFieldsWithoutExposeAnnotation().create();
StoryPath storyPath = gson.fromJson(storyPathJson, StoryPath.class);
// a story path model must have a file location to manage relative paths
// if it is loaded from a saved state, the location should already be set
if ((jsonFilePath == null) || (jsonFilePath.length() == 0)) {
if ((storyPath.getFileLocation() == null) || (storyPath.getFileLocation().length() == 0)) {
Log.e(TAG, "file location for story path " + storyPath.getId() + " could not be determined");
}
} else {
storyPath.setFileLocation(jsonFilePath);
}
storyPath.setCardReferences();
storyPath.initializeObservers();
storyPath.setStoryPathLibrary(storyPathLibrary);
// THIS MAY HAVE UNINTENDED CONSEQUENCES...
if (storyPath.getStoryPathLibraryFile() == null) {
storyPath.setStoryPathLibraryFile(storyPathLibrary.getFileLocation());
}
storyPath.setContext(context);
return storyPath;
}
public static String saveStoryPath(StoryPath storyPath) {
//Log.d(" *** TESTING *** ", "NEW METHOD saveStoryPath CALLED FOR " + storyPath.getId());
Date timeStamp = new Date();
String jsonFilePath = storyPath.buildPath(storyPath.getId() + "_" + timeStamp.getTime() + ".json");
String sdCardState = Environment.getExternalStorageState();
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
File storyPathFile = new File(jsonFilePath);
FileOutputStream storyPathStream = new FileOutputStream(storyPathFile);
if (!storyPathFile.exists()) {
storyPathFile.createNewFile();
}
String storyPathJson = serializeStoryPath(storyPath);
byte storyPathData[] = storyPathJson.getBytes();
storyPathStream.write(storyPathData);
storyPathStream.flush();
storyPathStream.close();
} catch (IOException ioe) {
Log.e(TAG, "writing json file " + jsonFilePath + " to SD card failed: " + ioe.getMessage());
return null;
}
} else {
Log.e(TAG, "SD card not found");
return null;
}
// update file location
storyPath.setFileLocation(jsonFilePath);
return jsonFilePath;
}
public static String serializeStoryPath(StoryPath storyPath) {
//Log.d(" *** TESTING *** ", "NEW METHOD serializeStoryPath CALLED FOR " + storyPath.getId());
GsonBuilder gBuild = new GsonBuilder();
Gson gson = gBuild.excludeFieldsWithoutExposeAnnotation().create();
// set aside references to prevent circular dependencies when serializing
Context tempContext = storyPath.getContext();
StoryPathLibrary tempStoryPathLibrary = storyPath.getStoryPathLibrary();
storyPath.setContext(null);
storyPath.setStoryPathLibrary(null);
storyPath.clearObservers();
storyPath.clearCardReferences();
String storyPathJson = gson.toJson(storyPath);
// restore references
storyPath.setCardReferences();
storyPath.initializeObservers();
storyPath.setStoryPathLibrary(tempStoryPathLibrary);
storyPath.setContext(tempContext);
return storyPathJson;
}
}
| lib/src/main/java/scal/io/liger/JsonHelper.java | package scal.io.liger;
import android.content.Context;
import android.content.res.AssetManager;
import android.os.Environment;
import android.util.Log;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Date;
import scal.io.liger.model.StoryPath;
import scal.io.liger.model.StoryPathLibrary;
/**
* Created by mnbogner on 7/14/14.
*/
public class JsonHelper {
private static final String TAG = "JsonHelper";
private static final String LIGER_DIR = "Liger";
private static File selectedJSONFile = null;
private static String selectedJSONPath = null;
private static ArrayList<File> jsonFileList = null;
private static ArrayList<String> jsonPathList = null;
private static String sdLigerFilePath = null;
private static String language = "de"; // TEMP
public static String loadJSONFromPath(String jsonPath) {
String jsonString = "";
String sdCardState = Environment.getExternalStorageState();
String localizedFilePath = jsonPath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonPath.lastIndexOf("-" + language + jsonPath.substring(jsonPath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonPath.substring(0, jsonPath.lastIndexOf(".")) + "-" + language + jsonPath.substring(jsonPath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadJSONFromPath() - LOCALIZED PATH: " + localizedFilePath);
}
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
File jsonFile = new File(jsonPath);
InputStream jsonStream = new FileInputStream(jsonFile);
File localizedFile = new File(localizedFilePath);
// if there is a file at the localized path, use that instead
if ((localizedFile.exists()) && (!jsonPath.equals(localizedFilePath))) {
Log.d("LANGUAGE", "loadJSONFromPath() - USING LOCALIZED FILE: " + localizedFilePath);
jsonStream = new FileInputStream(localizedFile);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
jsonString = new String(buffer);
} catch (IOException e) {
Log.e(TAG, "READING JSON FILE FROM SD CARD FAILED: " + e.getMessage());
}
} else {
System.err.println("SD CARD NOT FOUND");
}
return jsonString;
}
public static String loadJSON() {
return loadJSON(selectedJSONFile);
}
public static String loadJSON(File file) {
if(null == file) {
return null;
}
String jsonString = "";
String sdCardState = Environment.getExternalStorageState();
String localizedFilePath = file.getPath();
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (file.getPath().lastIndexOf("-" + language + file.getPath().substring(file.getPath().lastIndexOf("."))) < 0) {
localizedFilePath = file.getPath().substring(0, file.getPath().lastIndexOf(".")) + "-" + language + file.getPath().substring(file.getPath().lastIndexOf("."));
}
Log.d("LANGUAGE", "loadJSON() - LOCALIZED PATH: " + localizedFilePath);
}
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
InputStream jsonStream = new FileInputStream(file);
File localizedFile = new File(localizedFilePath);
// if there is a file at the localized path, use that instead
if ((localizedFile.exists()) && (!file.getPath().equals(localizedFilePath))) {
Log.d("LANGUAGE", "loadJSON() - USING LOCALIZED FILE: " + localizedFilePath);
jsonStream = new FileInputStream(localizedFile);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
jsonString = new String(buffer);
} catch (IOException e) {
Log.e(TAG, "READING JSON FILE FRON SD CARD FAILED: " + e.getMessage());
}
} else {
Log.e(TAG, "SD CARD NOT FOUND");
}
return jsonString;
}
// NEW
public static String loadJSONFromZip(Context context) {
return loadJSONFromZip(selectedJSONPath, context);
}
public static String loadJSONFromZip(String jsonFilePath, Context context) {
Log.d(" *** TESTING *** ", "NEW METHOD loadJSONFromZip CALLED FOR " + jsonFilePath);
if(null == jsonFilePath) {
return null;
}
String jsonString = "";
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadJSONFromZip() - LOCALIZED PATH: " + localizedFilePath);
}
// removed sd card check as expansion file should not be located on sd card
try {
InputStream jsonStream = ZipHelper.getFileInputStream(localizedFilePath, context);
// if there is no result with the localized path, retry with default path
if ((jsonStream == null) && (!jsonFilePath.equals(localizedFilePath))) {
jsonStream = ZipHelper.getFileInputStream(jsonFilePath, context);
} else {
Log.d("LANGUAGE", "loadJSONFromZip() - USING LOCALIZED FILE: " + localizedFilePath);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
jsonString = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from ZIP file failed: " + ioe.getMessage());
}
return jsonString;
}
public static String getSdLigerFilePath() {
return sdLigerFilePath;
}
private static void copyFilesToSdCard(Context context, String basePath) {
copyFileOrDir(context, basePath, ""); // copy all files in assets folder in my project
}
private static void copyFileOrDir(Context context, String basePath, String path) {
AssetManager assetManager = context.getAssets();
String assets[] = null;
try {
Log.i("tag", "copyFileOrDir() "+path);
assets = assetManager.list(path);
if (assets.length == 0) {
copyFile(context, basePath, path);
} else {
String fullPath = basePath + path;
Log.i("tag", "path="+fullPath);
File dir = new File(fullPath);
if (!dir.exists() && !path.startsWith("images") && !path.startsWith("sounds") && !path.startsWith("webkit"))
if (!dir.mkdirs())
Log.i("tag", "could not create dir "+fullPath);
for (int i = 0; i < assets.length; ++i) {
String p;
if (path.equals(""))
p = "";
else
p = path + "/";
if (!path.startsWith("images") && !path.startsWith("sounds") && !path.startsWith("webkit"))
copyFileOrDir(context, basePath, p + assets[i]);
}
}
} catch (IOException ex) {
Log.e("tag", "I/O Exception", ex);
}
}
private static void copyFile(Context context, String basePath, String filename) {
AssetManager assetManager = context.getAssets();
InputStream in = null;
OutputStream out = null;
String newFileName = null;
try {
Log.i("tag", "copyFile() "+filename);
in = assetManager.open(filename);
if (filename.endsWith(".jpg")) // extension was added to avoid compression on APK file
newFileName = basePath + filename.substring(0, filename.length()-4);
else
newFileName = basePath + filename;
out = new FileOutputStream(newFileName);
byte[] buffer = new byte[1024];
int read;
while ((read = in.read(buffer)) != -1) {
out.write(buffer, 0, read);
}
in.close();
in = null;
out.flush();
out.close();
out = null;
} catch (Exception e) {
Log.e("tag", "Exception in copyFile() of "+newFileName);
Log.e("tag", "Exception in copyFile() "+e.toString());
}
}
public static void setupFileStructure(Context context) {
String sdCardState = Environment.getExternalStorageState();
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
String sdCardFolderPath = Environment.getExternalStorageDirectory().getPath();
sdLigerFilePath = sdCardFolderPath + File.separator + LIGER_DIR + File.separator;
// based on http://stackoverflow.com/questions/4447477/android-how-to-copy-files-from-assets-folder-to-sdcard/8366081#8366081
copyFilesToSdCard(context, sdLigerFilePath);
} else {
Log.e(TAG, "SD CARD NOT FOUND");
}
}
public static String[] getJSONFileList() {
//ensure path has been set
if(null == sdLigerFilePath) {
return null;
}
ArrayList<String> jsonFileNamesList = new ArrayList<String>();
jsonFileList = new ArrayList<File>();
jsonPathList = new ArrayList<String>();
// HARD CODING LIST
File ligerFile_1 = new File(sdLigerFilePath + "/default/default_library/default_library.json");
File ligerFile_2 = new File(sdLigerFilePath + "/default/learning_guide_TEST.json");
File ligerFile_3 = new File(sdLigerFilePath + "/default/LIB_1/LIB_1_TEST.json");
File ligerFile_4 = new File(sdLigerFilePath + "/default/LIB_2/LIB_2_TEST.json");
File ligerFile_5 = new File(sdLigerFilePath + "/default/learning_guide_library.json");
File ligerFile_6 = new File(sdLigerFilePath + "/default/learning_guide_library_SAVE.json");
jsonFileNamesList.add(ligerFile_1.getName());
jsonFileNamesList.add(ligerFile_2.getName());
jsonFileNamesList.add(ligerFile_3.getName());
jsonFileNamesList.add(ligerFile_4.getName());
jsonFileNamesList.add(ligerFile_5.getName());
jsonFileNamesList.add(ligerFile_6.getName());
jsonFileList.add(ligerFile_1);
jsonFileList.add(ligerFile_2);
jsonFileList.add(ligerFile_3);
jsonFileList.add(ligerFile_4);
jsonFileList.add(ligerFile_5);
jsonFileList.add(ligerFile_6);
jsonPathList.add("default/default_library/default_library.json");
jsonPathList.add("default/learning_guide_TEST.json");
jsonPathList.add("default/LIB_1/LIB_1_TEST.json");
jsonPathList.add("default/LIB_2/LIB_2_TEST.json");
jsonPathList.add("default/learning_guide_library.json");
jsonPathList.add("default/learning_guide_library_SAVE.json");
/*
File ligerDir = new File(sdLigerFilePath);
if (ligerDir != null) {
for (File file : ligerDir.listFiles()) {
if (file.getName().endsWith(".json")) {
jsonFileNamesList.add(file.getName());
jsonFileList.add(file);
}
}
}
File defaultLigerDir = new File(sdLigerFilePath + "/default/");
if (defaultLigerDir != null) {
for (File file : defaultLigerDir.listFiles()) {
if (file.getName().endsWith(".json")) {
jsonFileNamesList.add(file.getName());
jsonFileList.add(file);
}
}
}
*/
return jsonFileNamesList.toArray(new String[jsonFileNamesList.size()]);
}
public static File setSelectedJSONFile(int index) {
selectedJSONFile = jsonFileList.get(index);
return selectedJSONFile;
}
public static String setSelectedJSONPath(int index) {
selectedJSONPath = jsonPathList.get(index);
return selectedJSONPath;
}
private static void addFileToSDCard(InputStream jsonInputStream, String filePath) {
OutputStream outputStream = null;
try {
// write the inputStream to a FileOutputStream
outputStream = new FileOutputStream(new File(sdLigerFilePath + filePath));
int read = 0;
byte[] bytes = new byte[1024];
while ((read = jsonInputStream.read(bytes)) != -1) {
outputStream.write(bytes, 0, read);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (jsonInputStream != null) {
try {
jsonInputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public static StoryPathLibrary loadStoryPathLibrary(String jsonFilePath, Context context) {
//Log.d(" *** TESTING *** ", "NEW METHOD loadStoryPathLibrary CALLED FOR " + jsonFilePath);
String storyPathLibraryJson = "";
String sdCardState = Environment.getExternalStorageState();
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadStoryPathLibrary() - LOCALIZED PATH: " + localizedFilePath);
}
File f = new File(localizedFilePath);
if ((!f.exists()) && (!localizedFilePath.equals(jsonFilePath))) {
f = new File(jsonFilePath);
} else {
Log.d("LANGUAGE", "loadStoryPathLibrary() - USING LOCALIZED FILE: " + localizedFilePath);
}
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
InputStream jsonStream = new FileInputStream(f);
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
storyPathLibraryJson = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from SD card failed: " + ioe.getMessage());
return null;
}
} else {
Log.e(TAG, "SD card not found");
return null;
}
return deserializeStoryPathLibrary(storyPathLibraryJson, f.getPath(), context);
}
// NEW
public static StoryPathLibrary loadStoryPathLibraryFromZip(String jsonFilePath, Context context) {
Log.d(" *** TESTING *** ", "NEW METHOD loadStoryPathLibraryFromZip CALLED FOR " + jsonFilePath);
String storyPathLibraryJson = "";
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadStoryPathLibraryFromZip() - LOCALIZED PATH: " + localizedFilePath);
}
// removed sd card check as expansion file should not be located on sd card
try {
InputStream jsonStream = ZipHelper.getFileInputStream(localizedFilePath, context);
// if there is no result with the localized path, retry with default path
if ((jsonStream == null) && (!localizedFilePath.equals(jsonFilePath))) {
jsonStream = ZipHelper.getFileInputStream(jsonFilePath, context);
} else {
Log.d("LANGUAGE", "loadStoryPathLibraryFromZip() - USING LOCALIZED FILE: " + localizedFilePath);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
storyPathLibraryJson = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from ZIP file failed: " + ioe.getMessage());
return null;
}
return deserializeStoryPathLibrary(storyPathLibraryJson, jsonFilePath, context);
}
public static StoryPathLibrary deserializeStoryPathLibrary(String storyPathLibraryJson, String jsonFilePath, Context context) {
//Log.d(" *** TESTING *** ", "NEW METHOD deserializeStoryPathLibrary CALLED FOR " + jsonFilePath);
GsonBuilder gBuild = new GsonBuilder();
gBuild.registerTypeAdapter(StoryPathLibrary.class, new StoryPathLibraryDeserializer());
Gson gson = gBuild.excludeFieldsWithoutExposeAnnotation().create();
StoryPathLibrary storyPathLibrary = gson.fromJson(storyPathLibraryJson, StoryPathLibrary.class);
// a story path library model must have a file location to manage relative paths
// if it is loaded from a saved state, the location should already be set
if ((jsonFilePath == null) || (jsonFilePath.length() == 0)) {
if ((storyPathLibrary.getFileLocation() == null) || (storyPathLibrary.getFileLocation().length() == 0)) {
Log.e(TAG, "file location for story path library " + storyPathLibrary.getId() + " could not be determined");
return null;
}
} else {
storyPathLibrary.setFileLocation(jsonFilePath);
}
storyPathLibrary.setCardReferences();
storyPathLibrary.initializeObservers();
storyPathLibrary.setContext(context);
return storyPathLibrary;
}
public static String saveStoryPathLibrary(StoryPathLibrary storyPathLibrary) {
//Log.d(" *** TESTING *** ", "NEW METHOD saveStoryPathLibrary CALLED FOR " + storyPathLibrary.getId());
Date timeStamp = new Date();
String jsonFilePath = storyPathLibrary.buildPath(storyPathLibrary.getId() + "_" + timeStamp.getTime() + ".json");
String sdCardState = Environment.getExternalStorageState();
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
File storyPathLibraryFile = new File(jsonFilePath);
FileOutputStream storyPathLibraryStream = new FileOutputStream(storyPathLibraryFile);
if (!storyPathLibraryFile.exists()) {
storyPathLibraryFile.createNewFile();
}
String storyPathLibraryJson = serializeStoryPathLibrary(storyPathLibrary);
byte storyPathLibraryData[] = storyPathLibraryJson.getBytes();
storyPathLibraryStream.write(storyPathLibraryData);
storyPathLibraryStream.flush();
storyPathLibraryStream.close();
} catch (IOException ioe) {
Log.e(TAG, "writing json file " + jsonFilePath + " to SD card failed: " + ioe.getMessage());
return null;
}
} else {
Log.e(TAG, "SD card not found");
return null;
}
// update file location
storyPathLibrary.setFileLocation(jsonFilePath);
return jsonFilePath;
}
public static String serializeStoryPathLibrary(StoryPathLibrary storyPathLibrary) {
//Log.d(" *** TESTING *** ", "NEW METHOD serializeStoryPathLibrary CALLED FOR " + storyPathLibrary.getId());
GsonBuilder gBuild = new GsonBuilder();
Gson gson = gBuild.excludeFieldsWithoutExposeAnnotation().create();
String storyPathLibraryJson = gson.toJson(storyPathLibrary);
return storyPathLibraryJson;
}
public static StoryPath loadStoryPath(String jsonFilePath, StoryPathLibrary storyPathLibrary, Context context) {
//Log.d(" *** TESTING *** ", "NEW METHOD loadStoryPath CALLED FOR " + jsonFilePath);
String storyPathJson = "";
String sdCardState = Environment.getExternalStorageState();
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadStoryPath() - LOCALIZED PATH: " + localizedFilePath);
}
File f = new File(localizedFilePath);
if ((!f.exists()) && (!localizedFilePath.equals(jsonFilePath))) {
f = new File(jsonFilePath);
} else {
Log.d("LANGUAGE", "loadStoryPath() - USING LOCALIZED FILE: " + localizedFilePath);
}
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
InputStream jsonStream = new FileInputStream(f);
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
storyPathJson = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from SD card failed: " + ioe.getMessage());
return null;
}
} else {
Log.e(TAG, "SD card not found");
return null;
}
return deserializeStoryPath(storyPathJson, f.getPath(), storyPathLibrary, context);
}
// NEW
public static StoryPath loadStoryPathFromZip(String jsonFilePath, StoryPathLibrary storyPathLibrary, Context context) {
Log.d(" *** TESTING *** ", "NEW METHOD loadStoryPathFromZip CALLED FOR " + jsonFilePath);
String storyPathJson = "";
String localizedFilePath = jsonFilePath;
// check language setting and insert country code if necessary
if (language != null) {
// just in case, check whether country code has already been inserted
if (jsonFilePath.lastIndexOf("-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."))) < 0) {
localizedFilePath = jsonFilePath.substring(0, jsonFilePath.lastIndexOf(".")) + "-" + language + jsonFilePath.substring(jsonFilePath.lastIndexOf("."));
}
Log.d("LANGUAGE", "loadStoryPathFromZip() - LOCALIZED PATH: " + localizedFilePath);
}
// removed sd card check as expansion file should not be located on sd card
try {
InputStream jsonStream = ZipHelper.getFileInputStream(localizedFilePath, context);
// if there is no result with the localized path, retry with default path
if ((jsonStream == null) && (!localizedFilePath.equals(jsonFilePath))) {
jsonStream = ZipHelper.getFileInputStream(jsonFilePath, context);
} else {
Log.d("LANGUAGE", "loadStoryPathFromZip() - USING LOCALIZED FILE: " + localizedFilePath);
}
int size = jsonStream.available();
byte[] buffer = new byte[size];
jsonStream.read(buffer);
jsonStream.close();
storyPathJson = new String(buffer);
} catch (IOException ioe) {
Log.e(TAG, "reading json file " + jsonFilePath + " from ZIP file failed: " + ioe.getMessage());
return null;
}
return deserializeStoryPath(storyPathJson, jsonFilePath, storyPathLibrary, context);
}
public static StoryPath deserializeStoryPath(String storyPathJson, String jsonFilePath, StoryPathLibrary storyPathLibrary, Context context) {
//Log.d(" *** TESTING *** ", "NEW METHOD deserializeStoryPath CALLED FOR " + jsonFilePath);
GsonBuilder gBuild = new GsonBuilder();
gBuild.registerTypeAdapter(StoryPath.class, new StoryPathDeserializer());
Gson gson = gBuild.excludeFieldsWithoutExposeAnnotation().create();
StoryPath storyPath = gson.fromJson(storyPathJson, StoryPath.class);
// a story path model must have a file location to manage relative paths
// if it is loaded from a saved state, the location should already be set
if ((jsonFilePath == null) || (jsonFilePath.length() == 0)) {
if ((storyPath.getFileLocation() == null) || (storyPath.getFileLocation().length() == 0)) {
Log.e(TAG, "file location for story path " + storyPath.getId() + " could not be determined");
}
} else {
storyPath.setFileLocation(jsonFilePath);
}
storyPath.setCardReferences();
storyPath.initializeObservers();
storyPath.setStoryPathLibrary(storyPathLibrary);
// THIS MAY HAVE UNINTENDED CONSEQUENCES...
if (storyPath.getStoryPathLibraryFile() == null) {
storyPath.setStoryPathLibraryFile(storyPathLibrary.getFileLocation());
}
storyPath.setContext(context);
return storyPath;
}
public static String saveStoryPath(StoryPath storyPath) {
//Log.d(" *** TESTING *** ", "NEW METHOD saveStoryPath CALLED FOR " + storyPath.getId());
Date timeStamp = new Date();
String jsonFilePath = storyPath.buildPath(storyPath.getId() + "_" + timeStamp.getTime() + ".json");
String sdCardState = Environment.getExternalStorageState();
if (sdCardState.equals(Environment.MEDIA_MOUNTED)) {
try {
File storyPathFile = new File(jsonFilePath);
FileOutputStream storyPathStream = new FileOutputStream(storyPathFile);
if (!storyPathFile.exists()) {
storyPathFile.createNewFile();
}
String storyPathJson = serializeStoryPath(storyPath);
byte storyPathData[] = storyPathJson.getBytes();
storyPathStream.write(storyPathData);
storyPathStream.flush();
storyPathStream.close();
} catch (IOException ioe) {
Log.e(TAG, "writing json file " + jsonFilePath + " to SD card failed: " + ioe.getMessage());
return null;
}
} else {
Log.e(TAG, "SD card not found");
return null;
}
// update file location
storyPath.setFileLocation(jsonFilePath);
return jsonFilePath;
}
public static String serializeStoryPath(StoryPath storyPath) {
//Log.d(" *** TESTING *** ", "NEW METHOD serializeStoryPath CALLED FOR " + storyPath.getId());
GsonBuilder gBuild = new GsonBuilder();
Gson gson = gBuild.excludeFieldsWithoutExposeAnnotation().create();
// set aside references to prevent circular dependencies when serializing
Context tempContext = storyPath.getContext();
StoryPathLibrary tempStoryPathLibrary = storyPath.getStoryPathLibrary();
storyPath.setContext(null);
storyPath.setStoryPathLibrary(null);
storyPath.clearObservers();
storyPath.clearCardReferences();
String storyPathJson = gson.toJson(storyPath);
// restore references
storyPath.setCardReferences();
storyPath.initializeObservers();
storyPath.setStoryPathLibrary(tempStoryPathLibrary);
storyPath.setContext(tempContext);
return storyPathJson;
}
}
| set hard coded language to null before pushing
| lib/src/main/java/scal/io/liger/JsonHelper.java | set hard coded language to null before pushing |
|
Java | apache-2.0 | 393dae0db89c43325ccf54800c56d30c3c1b9b65 | 0 | S-Bartfast/cgeo,madankb/cgeo,kumy/cgeo,matej116/cgeo,madankb/cgeo,superspindel/cgeo,auricgoldfinger/cgeo,mucek4/cgeo,brok85/cgeo,cgeo/cgeo,cgeo/cgeo,schwabe/cgeo,pstorch/cgeo,cgeo/cgeo,Huertix/cgeo,tobiasge/cgeo,rsudev/c-geo-opensource,xiaoyanit/cgeo,ThibaultR/cgeo,cgeo/cgeo,SammysHP/cgeo,samueltardieu/cgeo,yummy222/cgeo,schwabe/cgeo,marco-dev/c-geo-opensource,rsudev/c-geo-opensource,Bananeweizen/cgeo,KublaikhanGeek/cgeo,mucek4/cgeo,xiaoyanit/cgeo,superspindel/cgeo,yummy222/cgeo,Bananeweizen/cgeo,samueltardieu/cgeo,brok85/cgeo,pstorch/cgeo,rsudev/c-geo-opensource,S-Bartfast/cgeo,lewurm/cgeo,lewurm/cgeo,tobiasge/cgeo,tobiasge/cgeo,schwabe/cgeo,ThibaultR/cgeo,superspindel/cgeo,S-Bartfast/cgeo,kumy/cgeo,Huertix/cgeo,matej116/cgeo,kumy/cgeo,brok85/cgeo,vishwakulkarni/cgeo,SammysHP/cgeo,matej116/cgeo,SammysHP/cgeo,Huertix/cgeo,marco-dev/c-geo-opensource,vishwakulkarni/cgeo,auricgoldfinger/cgeo,KublaikhanGeek/cgeo,KublaikhanGeek/cgeo,pstorch/cgeo,lewurm/cgeo,ThibaultR/cgeo,yummy222/cgeo,schwabe/cgeo,auricgoldfinger/cgeo,samueltardieu/cgeo,mucek4/cgeo,xiaoyanit/cgeo,vishwakulkarni/cgeo,madankb/cgeo,marco-dev/c-geo-opensource,Bananeweizen/cgeo | package cgeo.geocaching.network;
import cgeo.geocaching.CgeoApplication;
import cgeo.geocaching.R;
import cgeo.geocaching.compatibility.Compatibility;
import cgeo.geocaching.connector.ConnectorFactory;
import cgeo.geocaching.files.LocalStorage;
import cgeo.geocaching.list.StoredList;
import cgeo.geocaching.utils.CancellableHandler;
import cgeo.geocaching.utils.FileUtils;
import cgeo.geocaching.utils.ImageUtils;
import cgeo.geocaching.utils.ImageUtils.ContainerDrawable;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.RxUtils;
import ch.boye.httpclientandroidlib.HttpResponse;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import rx.Observable;
import rx.Observable.OnSubscribe;
import rx.Subscriber;
import rx.functions.Action0;
import rx.functions.Func0;
import rx.functions.Func1;
import rx.subjects.PublishSubject;
import rx.subscriptions.CompositeSubscription;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Point;
import android.graphics.drawable.BitmapDrawable;
import android.net.Uri;
import android.text.Html;
import android.widget.TextView;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.Date;
public class HtmlImage implements Html.ImageGetter {
// This class implements an all-purpose image getter that can also be used as a ImageGetter interface
// when displaying caches. An instance mainly has three possible use cases:
// - If onlySave is true, getDrawable() will return null immediately and will queue the image retrieval
// and saving in the loading subject. Downloads will start in parallel when the blocking
// waitForBackgroundLoading() method is called, and they can be cancelled through the given handler.
// - If onlySave is false and the instance is called through fetchDrawable(), then an observable for the
// given URL will be returned. This observable will emit the local copy of the image if it is present,
// regardless of its freshness, then if needed an updated fresher copy after retrieving it from the network.
// - If onlySave is false and the instance is used as an ImageGetter, only the final version of the
// image will be returned, unless a view has been provided. If it has, then a dummy drawable is returned
// and is updated when the image is available, possibly several times if we had a stale copy of the image
// and then got a new one from the network.
private static final String[] BLOCKED = new String[] {
"gccounter.de",
"gccounter.com",
"cachercounter/?",
"gccounter/imgcount.php",
"flagcounter.com",
"compteur-blog.net",
"counter.digits.com",
"andyhoppe",
"besucherzaehler-homepage.de",
"hitwebcounter.com",
"kostenloser-counter.eu",
"trendcounter.com",
"hit-counter-download.com",
"gcwetterau.de/counter"
};
public static final String SHARED = "shared";
final private String geocode;
/**
* on error: return large error image, if <code>true</code>, otherwise empty 1x1 image
*/
final private boolean returnErrorImage;
final private int listId;
final private boolean onlySave;
final private int maxWidth;
final private int maxHeight;
final private Resources resources;
protected final TextView view;
// Background loading
final private PublishSubject<Observable<String>> loading = PublishSubject.create();
final private Observable<String> waitForEnd = Observable.merge(loading).publish().refCount();
final CompositeSubscription subscription = new CompositeSubscription(waitForEnd.subscribe());
/**
* Create a new HtmlImage object with different behaviours depending on <tt>onlySave</tt> and <tt>view</tt> values.
*
* @param geocode the geocode of the item for which we are requesting the image
* @param returnErrorImage set to <tt>true</tt> if an error image should be returned in case of a problem,
* <tt>false</tt> to get a transparent 1x1 image instead
* @param listId the list this cache belongs to, used to determine if an older image for the offline case can be used or not
* @param onlySave if set to <tt>true</tt>, {@link #getDrawable(String)} will only fetch and store the image, not return it
* @param view if non-null, {@link #getDrawable(String)} will return an initially empty drawable which will be redrawn when
* the image is ready through an invalidation of the given view
*/
public HtmlImage(final String geocode, final boolean returnErrorImage, final int listId, final boolean onlySave, final TextView view) {
this.geocode = geocode;
this.returnErrorImage = returnErrorImage;
this.listId = listId;
this.onlySave = onlySave;
this.view = view;
final Point displaySize = Compatibility.getDisplaySize();
this.maxWidth = displaySize.x - 25;
this.maxHeight = displaySize.y - 25;
this.resources = CgeoApplication.getInstance().getResources();
}
/**
* Create a new HtmlImage object with different behaviours depending on <tt>onlySave</tt> value. No view object
* will be tied to this HtmlImage.
*
* For documentation, see {@link #HtmlImage(String, boolean, int, boolean, TextView)}.
*/
public HtmlImage(final String geocode, final boolean returnErrorImage, final int listId, final boolean onlySave) {
this(geocode, returnErrorImage, listId, onlySave, null);
}
/**
* Retrieve and optionally display an image.
* See {@link #HtmlImage(String, boolean, int, boolean, TextView)} for the various behaviours.
*
* @param url
* the URL to fetch from cache or network
* @return a drawable containing the image, or <tt>null</tt> if <tt>onlySave</tt> is <tt>true</tt>
*/
@Nullable
@Override
public BitmapDrawable getDrawable(final String url) {
final Observable<BitmapDrawable> drawable = fetchDrawable(url);
if (onlySave) {
loading.onNext(drawable.map(new Func1<BitmapDrawable, String>() {
@Override
public String call(final BitmapDrawable bitmapDrawable) {
return url;
}
}));
return null;
}
if (view == null) {
return drawable.toBlocking().lastOrDefault(null);
}
return getContainerDrawable(drawable);
}
protected BitmapDrawable getContainerDrawable(final Observable<BitmapDrawable> drawable) {
return new ContainerDrawable(view, drawable);
}
// Caches are loaded from disk on a computation scheduler to avoid using more threads than cores while decoding
// the image. Downloads happen on downloadScheduler, in parallel with image decoding.
public Observable<BitmapDrawable> fetchDrawable(final String url) {
if (StringUtils.isBlank(url) || ImageUtils.containsPattern(url, BLOCKED)) {
return Observable.just(ImageUtils.getTransparent1x1Drawable(resources));
}
// Explicit local file URLs are loaded from the filesystem regardless of their age. The IO part is short
// enough to make the whole operation on the computation scheduler.
if (FileUtils.isFileUrl(url)) {
return Observable.defer(new Func0<Observable<BitmapDrawable>>() {
@Override
public Observable<BitmapDrawable> call() {
final Bitmap bitmap = loadCachedImage(FileUtils.urlToFile(url), true).getLeft();
return bitmap != null ? Observable.just(ImageUtils.scaleBitmapToFitDisplay(bitmap)) : Observable.<BitmapDrawable>empty();
}
}).subscribeOn(RxUtils.computationScheduler);
}
final boolean shared = url.contains("/images/icons/icon_");
final String pseudoGeocode = shared ? SHARED : geocode;
return Observable.create(new OnSubscribe<BitmapDrawable>() {
@Override
public void call(final Subscriber<? super BitmapDrawable> subscriber) {
subscription.add(subscriber);
subscriber.add(RxUtils.computationScheduler.createWorker().schedule(new Action0() {
@Override
public void call() {
final Pair<BitmapDrawable, Boolean> loaded = loadFromDisk();
final BitmapDrawable bitmap = loaded.getLeft();
if (loaded.getRight()) {
subscriber.onNext(bitmap);
subscriber.onCompleted();
return;
}
if (bitmap != null && !onlySave) {
subscriber.onNext(bitmap);
}
RxUtils.networkScheduler.createWorker().schedule(new Action0() {
@Override public void call() {
downloadAndSave(subscriber);
}
});
}
}));
}
private Pair<BitmapDrawable, Boolean> loadFromDisk() {
final Pair<Bitmap, Boolean> loadResult = loadImageFromStorage(url, pseudoGeocode, shared);
return scaleImage(loadResult);
}
private void downloadAndSave(final Subscriber<? super BitmapDrawable> subscriber) {
final File file = LocalStorage.getStorageFile(pseudoGeocode, url, true, true);
if (url.startsWith("data:image/")) {
if (url.contains(";base64,")) {
ImageUtils.decodeBase64ToFile(StringUtils.substringAfter(url, ";base64,"), file);
} else {
Log.e("HtmlImage.getDrawable: unable to decode non-base64 inline image");
subscriber.onCompleted();
return;
}
} else if (subscriber.isUnsubscribed() || downloadOrRefreshCopy(url, file)) {
// The existing copy was fresh enough or we were unsubscribed earlier.
subscriber.onCompleted();
return;
}
if (onlySave) {
subscriber.onCompleted();
} else {
RxUtils.computationScheduler.createWorker().schedule(new Action0() {
@Override
public void call() {
final Pair<BitmapDrawable, Boolean> loaded = loadFromDisk();
final BitmapDrawable image = loaded.getLeft();
if (image != null) {
subscriber.onNext(image);
} else {
subscriber.onNext(returnErrorImage ?
new BitmapDrawable(resources, BitmapFactory.decodeResource(resources, R.drawable.image_not_loaded)) :
ImageUtils.getTransparent1x1Drawable(resources));
}
subscriber.onCompleted();
}
});
}
}
});
}
@SuppressWarnings("static-method")
protected Pair<BitmapDrawable, Boolean> scaleImage(final Pair<Bitmap, Boolean> loadResult) {
final Bitmap bitmap = loadResult.getLeft();
return new ImmutablePair<>(bitmap != null ?
ImageUtils.scaleBitmapToFitDisplay(bitmap) :
null,
loadResult.getRight());
}
public Observable<String> waitForEndObservable(@Nullable final CancellableHandler handler) {
if (handler != null) {
handler.unsubscribeIfCancelled(subscription);
}
loading.onCompleted();
return waitForEnd;
}
/**
* Download or refresh the copy of <code>url</code> in <code>file</code>.
*
* @param url the url of the document
* @param file the file to save the document in
* @return <code>true</code> if the existing file was up-to-date, <code>false</code> otherwise
*/
private boolean downloadOrRefreshCopy(final String url, final File file) {
final String absoluteURL = makeAbsoluteURL(url);
if (absoluteURL != null) {
try {
final HttpResponse httpResponse = Network.getRequest(absoluteURL, null, file);
if (httpResponse != null) {
final int statusCode = httpResponse.getStatusLine().getStatusCode();
if (statusCode == 200) {
LocalStorage.saveEntityToFile(httpResponse, file);
} else if (statusCode == 304) {
if (!file.setLastModified(System.currentTimeMillis())) {
makeFreshCopy(file);
}
return true;
}
}
} catch (final Exception e) {
Log.e("HtmlImage.downloadOrRefreshCopy", e);
}
}
return false;
}
/**
* Make a fresh copy of the file to reset its timestamp. On some storage, it is impossible
* to modify the modified time after the fact, in which case a brand new file must be
* created if we want to be able to use the time as validity hint.
*
* See Android issue 1699.
*
* @param file the file to refresh
*/
private static void makeFreshCopy(final File file) {
final File tempFile = new File(file.getParentFile(), file.getName() + "-temp");
if (file.renameTo(tempFile)) {
LocalStorage.copy(tempFile, file);
FileUtils.deleteIgnoringFailure(tempFile);
}
else {
Log.e("Could not reset timestamp of file " + file.getAbsolutePath());
}
}
/**
* Load an image from primary or secondary storage.
*
* @param url the image URL
* @param pseudoGeocode the geocode or the shared name
* @param forceKeep keep the image if it is there, without checking its freshness
* @return <code>true</code> if the image was there and is fresh enough, <code>false</code> otherwise
*/
@NonNull
private Pair<Bitmap, Boolean> loadImageFromStorage(final String url, final String pseudoGeocode, final boolean forceKeep) {
try {
final File file = LocalStorage.getStorageFile(pseudoGeocode, url, true, false);
final Pair<Bitmap, Boolean> image = loadCachedImage(file, forceKeep);
if (image.getRight() || image.getLeft() != null) {
return image;
}
final File fileSec = LocalStorage.getStorageSecFile(pseudoGeocode, url, true);
return loadCachedImage(fileSec, forceKeep);
} catch (final Exception e) {
Log.w("HtmlImage.loadImageFromStorage", e);
}
return new ImmutablePair<>(null, false);
}
@Nullable
private String makeAbsoluteURL(final String url) {
// Check if uri is absolute or not, if not attach the connector hostname
// FIXME: that should also include the scheme
if (Uri.parse(url).isAbsolute()) {
return url;
}
final String host = ConnectorFactory.getConnector(geocode).getHost();
if (StringUtils.isNotEmpty(host)) {
final StringBuilder builder = new StringBuilder("http://");
builder.append(host);
if (!StringUtils.startsWith(url, "/")) {
// FIXME: explain why the result URL would be valid if the path does not start with
// a '/', or signal an error.
builder.append('/');
}
builder.append(url);
return builder.toString();
}
return null;
}
/**
* Load a previously saved image.
*
* @param file the file on disk
* @param forceKeep keep the image if it is there, without checking its freshness
* @return a pair with <code>true</code> if the image was there and is fresh enough or <code>false</code> otherwise,
* and the image (possibly <code>null</code> if the first component is <code>false</code> and the image
* could not be loaded, or if the first component is <code>true</code> and <code>onlySave</code> is also
* <code>true</code>)
*/
@NonNull
private Pair<Bitmap, Boolean> loadCachedImage(final File file, final boolean forceKeep) {
if (file.exists()) {
final boolean freshEnough = listId >= StoredList.STANDARD_LIST_ID || file.lastModified() > (new Date().getTime() - (24 * 60 * 60 * 1000)) || forceKeep;
if (freshEnough && onlySave) {
return new ImmutablePair<>(null, true);
}
final BitmapFactory.Options bfOptions = new BitmapFactory.Options();
bfOptions.inTempStorage = new byte[16 * 1024];
bfOptions.inPreferredConfig = Bitmap.Config.RGB_565;
setSampleSize(file, bfOptions);
final Bitmap image = BitmapFactory.decodeFile(file.getPath(), bfOptions);
if (image == null) {
Log.e("Cannot decode bitmap from " + file.getPath());
return new ImmutablePair<>(null, false);
}
return new ImmutablePair<>(image,
freshEnough);
}
return new ImmutablePair<>(null, false);
}
private void setSampleSize(final File file, final BitmapFactory.Options bfOptions) {
//Decode image size only
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BufferedInputStream stream = null;
try {
stream = new BufferedInputStream(new FileInputStream(file));
BitmapFactory.decodeStream(stream, null, options);
} catch (final FileNotFoundException e) {
Log.e("HtmlImage.setSampleSize", e);
} finally {
IOUtils.closeQuietly(stream);
}
int scale = 1;
if (options.outHeight > maxHeight || options.outWidth > maxWidth) {
scale = Math.max(options.outHeight / maxHeight, options.outWidth / maxWidth);
}
bfOptions.inSampleSize = scale;
}
}
| main/src/cgeo/geocaching/network/HtmlImage.java | package cgeo.geocaching.network;
import cgeo.geocaching.CgeoApplication;
import cgeo.geocaching.R;
import cgeo.geocaching.compatibility.Compatibility;
import cgeo.geocaching.connector.ConnectorFactory;
import cgeo.geocaching.files.LocalStorage;
import cgeo.geocaching.list.StoredList;
import cgeo.geocaching.utils.CancellableHandler;
import cgeo.geocaching.utils.FileUtils;
import cgeo.geocaching.utils.ImageUtils;
import cgeo.geocaching.utils.ImageUtils.ContainerDrawable;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.RxUtils;
import ch.boye.httpclientandroidlib.HttpResponse;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import rx.Observable;
import rx.Observable.OnSubscribe;
import rx.Subscriber;
import rx.functions.Action0;
import rx.functions.Func0;
import rx.functions.Func1;
import rx.subjects.PublishSubject;
import rx.subscriptions.CompositeSubscription;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Point;
import android.graphics.drawable.BitmapDrawable;
import android.net.Uri;
import android.text.Html;
import android.widget.TextView;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.Date;
public class HtmlImage implements Html.ImageGetter {
// This class implements an all-purpose image getter that can also be used as a ImageGetter interface
// when displaying caches. An instance mainly has three possible use cases:
// - If onlySave is true, getDrawable() will return null immediately and will queue the image retrieval
// and saving in the loading subject. Downloads will start in parallel when the blocking
// waitForBackgroundLoading() method is called, and they can be cancelled through the given handler.
// - If onlySave is false and the instance is called through fetchDrawable(), then an observable for the
// given URL will be returned. This observable will emit the local copy of the image if it is present,
// regardless of its freshness, then if needed an updated fresher copy after retrieving it from the network.
// - If onlySave is false and the instance is used as an ImageGetter, only the final version of the
// image will be returned, unless a view has been provided. If it has, then a dummy drawable is returned
// and is updated when the image is available, possibly several times if we had a stale copy of the image
// and then got a new one from the network.
private static final String[] BLOCKED = new String[] {
"gccounter.de",
"gccounter.com",
"cachercounter/?",
"gccounter/imgcount.php",
"flagcounter.com",
"compteur-blog.net",
"counter.digits.com",
"andyhoppe",
"besucherzaehler-homepage.de",
"hitwebcounter.com",
"kostenloser-counter.eu",
"trendcounter.com",
"hit-counter-download.com",
"gcwetterau.de/counter"
};
public static final String SHARED = "shared";
final private String geocode;
/**
* on error: return large error image, if <code>true</code>, otherwise empty 1x1 image
*/
final private boolean returnErrorImage;
final private int listId;
final private boolean onlySave;
final private int maxWidth;
final private int maxHeight;
final private Resources resources;
protected final TextView view;
// Background loading
final private PublishSubject<Observable<String>> loading = PublishSubject.create();
final private Observable<String> waitForEnd = Observable.merge(loading).publish().refCount();
final CompositeSubscription subscription = new CompositeSubscription(waitForEnd.subscribe());
/**
* Create a new HtmlImage object with different behaviours depending on <tt>onlySave</tt> and <tt>view</tt> values.
*
* @param geocode the geocode of the item for which we are requesting the image
* @param returnErrorImage set to <tt>true</tt> if an error image should be returned in case of a problem,
* <tt>false</tt> to get a transparent 1x1 image instead
* @param listId the list this cache belongs to, used to determine if an older image for the offline case can be used or not
* @param onlySave if set to <tt>true</tt>, {@link #getDrawable(String)} will only fetch and store the image, not return it
* @param view if non-null, {@link #getDrawable(String)} will return an initially empty drawable which will be redrawn when
* the image is ready through an invalidation of the given view
*/
public HtmlImage(final String geocode, final boolean returnErrorImage, final int listId, final boolean onlySave, final TextView view) {
this.geocode = geocode;
this.returnErrorImage = returnErrorImage;
this.listId = listId;
this.onlySave = onlySave;
this.view = view;
final Point displaySize = Compatibility.getDisplaySize();
this.maxWidth = displaySize.x - 25;
this.maxHeight = displaySize.y - 25;
this.resources = CgeoApplication.getInstance().getResources();
}
/**
* Create a new HtmlImage object with different behaviours depending on <tt>onlySave</tt> value. No view object
* will be tied to this HtmlImage.
*
* For documentation, see {@link #HtmlImage(String, boolean, int, boolean, TextView)}.
*/
public HtmlImage(final String geocode, final boolean returnErrorImage, final int listId, final boolean onlySave) {
this(geocode, returnErrorImage, listId, onlySave, null);
}
/**
* Retrieve and optionally display an image.
* See {@link #HtmlImage(String, boolean, int, boolean, TextView)} for the various behaviours.
*
* @param url
* the URL to fetch from cache or network
* @return a drawable containing the image, or <tt>null</tt> if <tt>onlySave</tt> is <tt>true</tt>
*/
@Nullable
@Override
public BitmapDrawable getDrawable(final String url) {
final Observable<BitmapDrawable> drawable = fetchDrawable(url);
if (onlySave) {
loading.onNext(drawable.map(new Func1<BitmapDrawable, String>() {
@Override
public String call(final BitmapDrawable bitmapDrawable) {
return url;
}
}));
return null;
}
if (view == null) {
return drawable.toBlocking().lastOrDefault(null);
}
return getContainerDrawable(drawable);
}
protected BitmapDrawable getContainerDrawable(final Observable<BitmapDrawable> drawable) {
return new ContainerDrawable(view, drawable);
}
// Caches are loaded from disk on a computation scheduler to avoid using more threads than cores while decoding
// the image. Downloads happen on downloadScheduler, in parallel with image decoding.
public Observable<BitmapDrawable> fetchDrawable(final String url) {
if (StringUtils.isBlank(url) || ImageUtils.containsPattern(url, BLOCKED)) {
return Observable.just(ImageUtils.getTransparent1x1Drawable(resources));
}
// Explicit local file URLs are loaded from the filesystem regardless of their age. The IO part is short
// enough to make the whole operation on the computation scheduler.
if (FileUtils.isFileUrl(url)) {
return Observable.defer(new Func0<Observable<BitmapDrawable>>() {
@Override
public Observable<BitmapDrawable> call() {
final Bitmap bitmap = loadCachedImage(FileUtils.urlToFile(url), true).getLeft();
return bitmap != null ? Observable.just(ImageUtils.scaleBitmapToFitDisplay(bitmap)) : Observable.<BitmapDrawable>empty();
}
}).subscribeOn(RxUtils.computationScheduler);
}
final boolean shared = url.contains("/images/icons/icon_");
final String pseudoGeocode = shared ? SHARED : geocode;
return Observable.create(new OnSubscribe<BitmapDrawable>() {
@Override
public void call(final Subscriber<? super BitmapDrawable> subscriber) {
subscription.add(subscriber);
subscriber.add(RxUtils.computationScheduler.createWorker().schedule(new Action0() {
@Override
public void call() {
final Pair<BitmapDrawable, Boolean> loaded = loadFromDisk();
final BitmapDrawable bitmap = loaded.getLeft();
if (loaded.getRight()) {
subscriber.onNext(bitmap);
subscriber.onCompleted();
return;
}
if (bitmap != null && !onlySave) {
subscriber.onNext(bitmap);
}
RxUtils.networkScheduler.createWorker().schedule(new Action0() {
@Override public void call() {
downloadAndSave(subscriber);
}
});
}
}));
}
private Pair<BitmapDrawable, Boolean> loadFromDisk() {
final Pair<Bitmap, Boolean> loadResult = loadImageFromStorage(url, pseudoGeocode, shared);
return scaleImage(loadResult);
}
private void downloadAndSave(final Subscriber<? super BitmapDrawable> subscriber) {
final File file = LocalStorage.getStorageFile(pseudoGeocode, url, true, true);
if (url.startsWith("data:image/")) {
if (url.contains(";base64,")) {
ImageUtils.decodeBase64ToFile(StringUtils.substringAfter(url, ";base64,"), file);
} else {
Log.e("HtmlImage.getDrawable: unable to decode non-base64 inline image");
subscriber.onCompleted();
return;
}
} else if (subscriber.isUnsubscribed() || downloadOrRefreshCopy(url, file)) {
// The existing copy was fresh enough or we were unsubscribed earlier.
subscriber.onCompleted();
return;
}
if (onlySave) {
subscriber.onCompleted();
} else {
RxUtils.computationScheduler.createWorker().schedule(new Action0() {
@Override
public void call() {
final Pair<BitmapDrawable, Boolean> loaded = loadFromDisk();
final BitmapDrawable image = loaded.getLeft();
if (image != null) {
subscriber.onNext(image);
} else {
subscriber.onNext(returnErrorImage ?
new BitmapDrawable(resources, BitmapFactory.decodeResource(resources, R.drawable.image_not_loaded)) :
ImageUtils.getTransparent1x1Drawable(resources));
}
subscriber.onCompleted();
}
});
}
}
});
}
@SuppressWarnings("static-method")
protected Pair<BitmapDrawable, Boolean> scaleImage(final Pair<Bitmap, Boolean> loadResult) {
final Bitmap bitmap = loadResult.getLeft();
return new ImmutablePair<>(bitmap != null ?
ImageUtils.scaleBitmapToFitDisplay(bitmap) :
null,
loadResult.getRight());
}
public Observable<String> waitForEndObservable(@Nullable final CancellableHandler handler) {
if (handler != null) {
handler.unsubscribeIfCancelled(subscription);
}
loading.onCompleted();
return waitForEnd;
}
/**
* Download or refresh the copy of <code>url</code> in <code>file</code>.
*
* @param url the url of the document
* @param file the file to save the document in
* @return <code>true</code> if the existing file was up-to-date, <code>false</code> otherwise
*/
private boolean downloadOrRefreshCopy(final String url, final File file) {
final String absoluteURL = makeAbsoluteURL(url);
if (absoluteURL != null) {
try {
final HttpResponse httpResponse = Network.getRequest(absoluteURL, null, file);
if (httpResponse != null) {
final int statusCode = httpResponse.getStatusLine().getStatusCode();
if (statusCode == 200) {
LocalStorage.saveEntityToFile(httpResponse, file);
} else if (statusCode == 304) {
if (!file.setLastModified(System.currentTimeMillis())) {
makeFreshCopy(file);
}
return true;
}
}
} catch (final Exception e) {
Log.e("HtmlImage.downloadOrRefreshCopy", e);
}
}
return false;
}
/**
* Make a fresh copy of the file to reset its timestamp. On some storage, it is impossible
* to modify the modified time after the fact, in which case a brand new file must be
* created if we want to be able to use the time as validity hint.
*
* See Android issue 1699.
*
* @param file the file to refresh
*/
private static void makeFreshCopy(final File file) {
final File tempFile = new File(file.getParentFile(), file.getName() + "-temp");
if (file.renameTo(tempFile)) {
LocalStorage.copy(tempFile, file);
FileUtils.deleteIgnoringFailure(tempFile);
}
else {
Log.e("Could not reset timestamp of file " + file.getAbsolutePath());
}
}
/**
* Load an image from primary or secondary storage.
*
* @param url the image URL
* @param pseudoGeocode the geocode or the shared name
* @param forceKeep keep the image if it is there, without checking its freshness
* @return <code>true</code> if the image was there and is fresh enough, <code>false</code> otherwise
*/
@NonNull
private Pair<Bitmap, Boolean> loadImageFromStorage(final String url, final String pseudoGeocode, final boolean forceKeep) {
try {
final File file = LocalStorage.getStorageFile(pseudoGeocode, url, true, false);
final Pair<Bitmap, Boolean> image = loadCachedImage(file, forceKeep);
if (image.getRight() || image.getLeft() != null) {
return image;
}
final File fileSec = LocalStorage.getStorageSecFile(pseudoGeocode, url, true);
return loadCachedImage(fileSec, forceKeep);
} catch (final Exception e) {
Log.w("HtmlImage.loadImageFromStorage", e);
}
return new ImmutablePair<>(null, false);
}
@Nullable
private String makeAbsoluteURL(final String url) {
// Check if uri is absolute or not, if not attach the connector hostname
// FIXME: that should also include the scheme
if (Uri.parse(url).isAbsolute()) {
return url;
}
final String host = ConnectorFactory.getConnector(geocode).getHost();
if (StringUtils.isNotEmpty(host)) {
final StringBuilder builder = new StringBuilder("http://");
builder.append(host);
if (!StringUtils.startsWith(url, "/")) {
// FIXME: explain why the result URL would be valid if the path does not start with
// a '/', or signal an error.
builder.append('/');
}
builder.append(url);
return builder.toString();
}
return null;
}
/**
* Load a previously saved image.
*
* @param file the file on disk
* @param forceKeep keep the image if it is there, without checking its freshness
* @return a pair with <code>true</code> if the image was there and is fresh enough or <code>false</code> otherwise,
* and the image (possibly <code>null</code> if the first component is <code>false</code> and the image
* could not be loaded, or if the first component is <code>true</code> and <code>onlySave</code> is also
* <code>true</code>)
*/
@NonNull
private Pair<Bitmap, Boolean> loadCachedImage(final File file, final boolean forceKeep) {
if (file.exists()) {
final boolean freshEnough = listId >= StoredList.STANDARD_LIST_ID || file.lastModified() > (new Date().getTime() - (24 * 60 * 60 * 1000)) || forceKeep;
if (onlySave) {
return new ImmutablePair<>(null, true);
}
final BitmapFactory.Options bfOptions = new BitmapFactory.Options();
bfOptions.inTempStorage = new byte[16 * 1024];
bfOptions.inPreferredConfig = Bitmap.Config.RGB_565;
setSampleSize(file, bfOptions);
final Bitmap image = BitmapFactory.decodeFile(file.getPath(), bfOptions);
if (image == null) {
Log.e("Cannot decode bitmap from " + file.getPath());
return new ImmutablePair<>(null, false);
}
return new ImmutablePair<>(image,
freshEnough);
}
return new ImmutablePair<>(null, false);
}
private void setSampleSize(final File file, final BitmapFactory.Options bfOptions) {
//Decode image size only
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BufferedInputStream stream = null;
try {
stream = new BufferedInputStream(new FileInputStream(file));
BitmapFactory.decodeStream(stream, null, options);
} catch (final FileNotFoundException e) {
Log.e("HtmlImage.setSampleSize", e);
} finally {
IOUtils.closeQuietly(stream);
}
int scale = 1;
if (options.outHeight > maxHeight || options.outWidth > maxWidth) {
scale = Math.max(options.outHeight / maxHeight, options.outWidth / maxWidth);
}
bfOptions.inSampleSize = scale;
}
}
| fix #4493: mass-refreshing caches won't refresh stale images
| main/src/cgeo/geocaching/network/HtmlImage.java | fix #4493: mass-refreshing caches won't refresh stale images |
|
Java | apache-2.0 | 1246be5b9296788c771a167c41c6ce774d537872 | 0 | EvilMcJerkface/Aeron,mikeb01/Aeron,galderz/Aeron,galderz/Aeron,EvilMcJerkface/Aeron,galderz/Aeron,real-logic/Aeron,real-logic/Aeron,mikeb01/Aeron,mikeb01/Aeron,EvilMcJerkface/Aeron,real-logic/Aeron,mikeb01/Aeron,galderz/Aeron,EvilMcJerkface/Aeron,real-logic/Aeron | package io.aeron.archive;
import io.aeron.Counter;
import io.aeron.archive.codecs.RecordingDescriptorDecoder;
import io.aeron.archive.codecs.RecordingDescriptorEncoder;
import org.agrona.IoUtil;
import org.agrona.concurrent.EpochClock;
import org.agrona.concurrent.UnsafeBuffer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
import java.io.File;
import java.io.IOException;
import java.nio.channels.FileChannel;
import static io.aeron.archive.Catalog.wrapDescriptorDecoder;
import static io.aeron.logbuffer.FrameDescriptor.FRAME_ALIGNMENT;
import static org.agrona.BufferUtil.allocateDirectAligned;
import static org.mockito.Mockito.*;
public class RecordingWriterTest
{
private static final int RECORDING_ID = 1;
private static final int TERM_BUFFER_LENGTH = 16 * 1024;
private static final int MTU_LENGTH = 4 * 1024;
private static final int INITIAL_TERM_ID = 3;
private static final int START_POSITION = 32;
private static final int SESSION_ID = 1234;
private static final int STREAM_ID = 0;
private static final int SYNC_LEVEL = 2;
private static final String CHANNEL = "channel";
private static final String SOURCE = "source";
private static final long START_TIMESTAMP = 0L;
private File archiveDir = TestUtil.makeTempDir();
private EpochClock epochClock = Mockito.mock(EpochClock.class);
private final Archive.Context ctx = new Archive.Context();
private FileChannel mockArchiveDirFileChannel = Mockito.mock(FileChannel.class);
private FileChannel mockDataFileChannel = Mockito.mock(FileChannel.class);
private UnsafeBuffer mockTermBuffer = Mockito.mock(UnsafeBuffer.class);
private final Counter position = mock(Counter.class);
private long positionLong;
@Before
public void before()
{
when(position.getWeak()).then((invocation) -> positionLong);
when(position.get()).then((invocation) -> positionLong);
doAnswer(
(invocation) ->
{
positionLong = invocation.getArgument(0);
return null;
})
.when(position).setOrdered(anyLong());
ctx
.archiveDir(archiveDir)
.segmentFileLength(1024 * 1024)
.epochClock(epochClock)
.fileSyncLevel(SYNC_LEVEL);
}
@After
public void after()
{
IoUtil.delete(archiveDir, false);
}
@SuppressWarnings("ConstantConditions")
@Test
public void verifyFirstWrite() throws IOException
{
when(epochClock.time()).thenReturn(42L);
final UnsafeBuffer descriptorBuffer =
new UnsafeBuffer(allocateDirectAligned(Catalog.DEFAULT_RECORD_LENGTH, FRAME_ALIGNMENT));
final RecordingDescriptorEncoder descriptorEncoder = new RecordingDescriptorEncoder().wrap(
descriptorBuffer,
Catalog.DESCRIPTOR_HEADER_LENGTH);
final RecordingDescriptorDecoder descriptorDecoder = new RecordingDescriptorDecoder();
wrapDescriptorDecoder(descriptorDecoder, descriptorBuffer);
Catalog.initDescriptor(
descriptorEncoder,
RECORDING_ID,
START_TIMESTAMP,
START_POSITION,
INITIAL_TERM_ID,
ctx.segmentFileLength(),
TERM_BUFFER_LENGTH,
MTU_LENGTH,
SESSION_ID,
STREAM_ID,
CHANNEL,
CHANNEL,
SOURCE);
try (RecordingWriter writer = Mockito.spy(new RecordingWriter(
RECORDING_ID, START_POSITION, TERM_BUFFER_LENGTH, ctx, mockArchiveDirFileChannel, position)))
{
when(mockDataFileChannel.transferTo(eq(0L), eq(256L), any(FileChannel.class))).then(
(invocation) ->
{
final FileChannel dataFileChannel = invocation.getArgument(2);
dataFileChannel.position(START_POSITION + 256);
return 256L;
});
writer.onBlock(
mockDataFileChannel, 0, mockTermBuffer, START_POSITION, 256, SESSION_ID, INITIAL_TERM_ID);
final InOrder inOrder = Mockito.inOrder(writer);
inOrder.verify(writer).forceData(eq(mockArchiveDirFileChannel), eq(SYNC_LEVEL == 2));
inOrder.verify(writer).forceData(any(FileChannel.class), eq(SYNC_LEVEL == 2));
}
}
} | aeron-archive/src/test/java/io/aeron/archive/RecordingWriterTest.java | package io.aeron.archive;
import io.aeron.Counter;
import io.aeron.archive.codecs.RecordingDescriptorDecoder;
import io.aeron.archive.codecs.RecordingDescriptorEncoder;
import org.agrona.IoUtil;
import org.agrona.concurrent.EpochClock;
import org.agrona.concurrent.UnsafeBuffer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
import java.io.File;
import java.io.IOException;
import java.nio.channels.FileChannel;
import static io.aeron.archive.Catalog.wrapDescriptorDecoder;
import static io.aeron.logbuffer.FrameDescriptor.FRAME_ALIGNMENT;
import static org.agrona.BufferUtil.allocateDirectAligned;
import static org.mockito.Mockito.*;
public class RecordingWriterTest
{
private static final int RECORDING_ID = 1;
private static final int TERM_BUFFER_LENGTH = 16 * 1024;
private static final int MTU_LENGTH = 4 * 1024;
private static final int INITIAL_TERM_ID = 3;
private static final int START_POSITION = 32;
private static final int SESSION_ID = 1234;
private static final int STREAM_ID = 0;
private static final int SYNC_LEVEL = 2;
private static final String CHANNEL = "channel";
private static final String SOURCE = "source";
private static final long START_TIMESTAMP = 0L;
private File archiveDir = TestUtil.makeTempDir();
private EpochClock epochClock = Mockito.mock(EpochClock.class);
private final Archive.Context ctx = new Archive.Context();
private FileChannel mockArchiveDirFileChannel = Mockito.mock(FileChannel.class);
private FileChannel mockDataFileChannel = Mockito.mock(FileChannel.class);
private UnsafeBuffer mockTermBuffer = Mockito.mock(UnsafeBuffer.class);
private final Counter position = mock(Counter.class);
private long positionLong;
@Before
public void before() throws Exception
{
when(position.getWeak()).then((invocation) -> positionLong);
when(position.get()).then((invocation) -> positionLong);
doAnswer(
(invocation) ->
{
positionLong = invocation.getArgument(0);
return null;
})
.when(position).setOrdered(anyLong());
ctx
.archiveDir(archiveDir)
.segmentFileLength(1024 * 1024)
.epochClock(epochClock)
.fileSyncLevel(SYNC_LEVEL);
}
@After
public void after()
{
IoUtil.delete(archiveDir, false);
}
@SuppressWarnings("ConstantConditions")
@Test
public void verifyFirstWrite() throws IOException
{
when(epochClock.time()).thenReturn(42L);
final UnsafeBuffer descriptorBuffer =
new UnsafeBuffer(allocateDirectAligned(Catalog.DEFAULT_RECORD_LENGTH, FRAME_ALIGNMENT));
final RecordingDescriptorEncoder descriptorEncoder = new RecordingDescriptorEncoder().wrap(
descriptorBuffer,
Catalog.DESCRIPTOR_HEADER_LENGTH);
final RecordingDescriptorDecoder descriptorDecoder = new RecordingDescriptorDecoder();
wrapDescriptorDecoder(descriptorDecoder, descriptorBuffer);
Catalog.initDescriptor(
descriptorEncoder,
RECORDING_ID,
START_TIMESTAMP,
START_POSITION,
INITIAL_TERM_ID,
ctx.segmentFileLength(),
TERM_BUFFER_LENGTH,
MTU_LENGTH,
SESSION_ID,
STREAM_ID,
CHANNEL,
CHANNEL,
SOURCE);
try (RecordingWriter writer = Mockito.spy(new RecordingWriter(
RECORDING_ID, START_POSITION, TERM_BUFFER_LENGTH, ctx, mockArchiveDirFileChannel, position)))
{
when(mockDataFileChannel.transferTo(eq(0L), eq(256L), any(FileChannel.class))).then(
(invocation) ->
{
final FileChannel dataFileChannel = invocation.getArgument(2);
dataFileChannel.position(START_POSITION + 256);
return 256L;
});
writer.onBlock(
mockDataFileChannel, 0, mockTermBuffer, START_POSITION, 256, SESSION_ID, INITIAL_TERM_ID);
final InOrder inOrder = Mockito.inOrder(writer);
inOrder.verify(writer).forceData(eq(mockArchiveDirFileChannel), eq(SYNC_LEVEL == 2));
inOrder.verify(writer).forceData(any(FileChannel.class), eq(SYNC_LEVEL == 2));
}
}
} | [Java] Remove unnecessary throws clause.
| aeron-archive/src/test/java/io/aeron/archive/RecordingWriterTest.java | [Java] Remove unnecessary throws clause. |
|
Java | bsd-2-clause | 11a68bde310bca5247d0f1f5f44291e4813f4d8f | 0 | insideo/randomcoder-proxy-server | package com.randomcoder.proxy.server;
import java.io.*;
import java.net.SocketException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.*;
import org.apache.log4j.Logger;
import org.springframework.validation.BindException;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.AbstractCommandController;
/**
* Controller which handles the receive event on a connection. This event
* flushes data periodically from the connected socket.
*
* <pre>
* Copyright (c) 2007, Craig Condit. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* </pre>
*/
public class ReceiveController extends AbstractCommandController
{
private static final Logger logger = Logger.getLogger(ReceiveController.class);
private EndpointTracker endpointTracker;
/**
* Sets the endpoint tracker to use.
*
* @param endpointTracker
* endpoint tracker
*/
public void setEndpointTracker(EndpointTracker endpointTracker)
{
this.endpointTracker = endpointTracker;
}
/**
* Processes the receive request.
*
* @param request
* HTTP request
* @param response
* HTTP response
* @param command
* {@link IdCommand} instance
* @param errors
* unused
* @throws IOException
* if an I/O error occurs
*/
@Override
protected ModelAndView handle(
HttpServletRequest request, HttpServletResponse response,
Object command, BindException errors)
throws IOException
{
IdCommand form = (IdCommand) command;
Endpoint endpoint = endpointTracker.getEndpoint(form.getId());
if (endpoint == null)
{
if (logger.isDebugEnabled())
logger.debug("Receive [" + form.getId() + "]: closed");
sendError(response, "Connection closed");
return null;
}
if (logger.isDebugEnabled())
logger.debug("Receive [" + form.getId() + "]: active");
response.setStatus(HttpServletResponse.SC_OK);
response.setContentType("application/octet-stream");
ServletOutputStream out = null;
try
{
out = response.getOutputStream();
out.flush();
InputStream endpointStream = endpoint.getInputStream();
// must send something here so that server will actually flush the result
out.write("SENDING\r\n".getBytes("UTF-8"));
out.flush();
byte[] buf = new byte[32768];
int c;
do
{
c = endpointStream.read(buf, 0, 32768);
if (c > 0)
{
logger.debug("Wrote " + c + " bytes");
out.write(buf, 0, c);
out.flush();
if (!endpointTracker.refresh(form.getId()))
break;
}
}
while (c >= 0);
}
catch (SocketException e)
{
logger.debug("Receive [" + form.getId() + "]: " + e.getMessage());
}
finally
{
try { if (out != null) out.close(); } catch (Throwable ignored) {}
}
return null;
}
private void sendError(HttpServletResponse response, String error)
throws IOException
{
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
response.setContentType("text/plain");
PrintWriter out = null;
try
{
out = response.getWriter();
out.print("ERROR " + error + "\r\n");
}
finally
{
try { if (out != null) out.close(); } catch (Throwable ignored) {}
}
}
} | src/main/java/com/randomcoder/proxy/server/ReceiveController.java | package com.randomcoder.proxy.server;
import java.io.*;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.*;
import org.apache.log4j.Logger;
import org.springframework.validation.BindException;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.AbstractCommandController;
/**
* Controller which handles the receive event on a connection. This event
* flushes data periodically from the connected socket.
*
* <pre>
* Copyright (c) 2007, Craig Condit. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* </pre>
*/
public class ReceiveController extends AbstractCommandController
{
private static final Logger logger = Logger.getLogger(ReceiveController.class);
private EndpointTracker endpointTracker;
/**
* Sets the endpoint tracker to use.
*
* @param endpointTracker
* endpoint tracker
*/
public void setEndpointTracker(EndpointTracker endpointTracker)
{
this.endpointTracker = endpointTracker;
}
/**
* Processes the receive request.
*
* @param request
* HTTP request
* @param response
* HTTP response
* @param command
* {@link IdCommand} instance
* @param errors
* unused
* @throws IOException
* if an I/O error occurs
*/
@Override
protected ModelAndView handle(
HttpServletRequest request, HttpServletResponse response,
Object command, BindException errors)
throws IOException
{
IdCommand form = (IdCommand) command;
Endpoint endpoint = endpointTracker.getEndpoint(form.getId());
if (endpoint == null)
{
if (logger.isDebugEnabled())
logger.debug("Receive [" + form.getId() + "]: closed");
sendError(response, "Connection closed");
return null;
}
if (logger.isDebugEnabled())
logger.debug("Receive [" + form.getId() + "]: active");
response.setStatus(HttpServletResponse.SC_OK);
response.setContentType("application/octet-stream");
ServletOutputStream out = null;
try
{
out = response.getOutputStream();
out.flush();
InputStream endpointStream = endpoint.getInputStream();
// must send something here so that server will actually flush the result
out.write("SENDING\r\n".getBytes("UTF-8"));
out.flush();
byte[] buf = new byte[32768];
int c;
do
{
c = endpointStream.read(buf, 0, 32768);
if (c > 0)
{
logger.debug("Wrote " + c + " bytes");
out.write(buf, 0, c);
out.flush();
if (!endpointTracker.refresh(form.getId()))
break;
}
}
while (c >= 0);
}
finally
{
try { if (out != null) out.close(); } catch (Throwable ignored) {}
}
return null;
}
private void sendError(HttpServletResponse response, String error)
throws IOException
{
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
response.setContentType("text/plain");
PrintWriter out = null;
try
{
out = response.getWriter();
out.print("ERROR " + error + "\r\n");
}
finally
{
try { if (out != null) out.close(); } catch (Throwable ignored) {}
}
}
} | Added SocketException check in receive controller to prevent ugly error message.
git-svn-id: 1be577d5ccfeb0e1edc890f9098b91442b983048@763 5bee6cb3-3d18-0410-8c93-a642edd49b48
| src/main/java/com/randomcoder/proxy/server/ReceiveController.java | Added SocketException check in receive controller to prevent ugly error message. |
|
Java | bsd-3-clause | d326129ff22330dd10a936a1e5d1313879f1eb64 | 0 | NCIP/catissue-advanced-query,NCIP/catissue-advanced-query |
package edu.wustl.common.query.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import edu.common.dynamicextensions.domaininterface.AssociationInterface;
import edu.common.dynamicextensions.domaininterface.AttributeInterface;
import edu.common.dynamicextensions.domaininterface.AttributeTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.BooleanTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.DateTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.DoubleTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.EntityInterface;
import edu.common.dynamicextensions.domaininterface.IntegerTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.LongTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.StringTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.databaseproperties.ConstraintKeyPropertiesInterface;
import edu.common.dynamicextensions.domaininterface.databaseproperties.ConstraintPropertiesInterface;
import edu.common.dynamicextensions.entitymanager.DataTypeFactory;
import edu.common.dynamicextensions.entitymanager.EntityManager;
import edu.common.dynamicextensions.entitymanager.EntityManagerConstantsInterface;
import edu.common.dynamicextensions.entitymanager.EntityManagerInterface;
import edu.common.dynamicextensions.exception.DataTypeFactoryInitializationException;
import edu.common.dynamicextensions.exception.DynamicExtensionsSystemException;
import edu.wustl.common.query.exeptions.SQLXMLException;
import edu.wustl.common.query.impl.predicate.PredicateGenerator;
import edu.wustl.common.query.queryobject.impl.OutputTreeDataNode;
import edu.wustl.common.query.queryobject.impl.metadata.QueryOutputTreeAttributeMetadata;
import edu.wustl.common.query.queryobject.util.QueryObjectProcessor;
import edu.wustl.common.querysuite.exceptions.MultipleRootsException;
import edu.wustl.common.querysuite.exceptions.SqlException;
import edu.wustl.common.querysuite.metadata.associations.IAssociation;
import edu.wustl.common.querysuite.metadata.associations.IIntraModelAssociation;
import edu.wustl.common.querysuite.queryobject.ICondition;
import edu.wustl.common.querysuite.queryobject.IExpression;
import edu.wustl.common.querysuite.queryobject.IOutputAttribute;
import edu.wustl.common.querysuite.queryobject.IParameter;
import edu.wustl.common.querysuite.queryobject.IQuery;
import edu.wustl.common.querysuite.queryobject.RelationalOperator;
import edu.wustl.common.querysuite.queryobject.impl.JoinGraph;
import edu.wustl.common.querysuite.queryobject.impl.ParameterizedQuery;
import edu.wustl.common.querysuite.utils.QueryUtility;
import edu.wustl.common.util.logger.Logger;
import edu.wustl.metadata.util.DyExtnObjectCloner;
import edu.wustl.query.util.global.Constants;
import edu.wustl.query.util.global.Utility;
import edu.wustl.query.util.global.Variables;
import edu.wustl.query.util.querysuite.QueryCSMUtil;
import edu.wustl.query.xquerydatatypes.XQueryAttributeType;
import edu.wustl.query.xquerydatatypes.XQueryDataTypeInitializationException;
/**
*
* @author juberahamad_patel
*
* abstract class from which the xquery generator classes extend
*
*/
public abstract class AbstractXQueryGenerator extends QueryGenerator
{
/**
* the suffix used to generate sql column names on the fly, like column0, column1 etc.
*/
protected int suffix = 0;
/**
* the set of expressions whose entites have a separate XML file, where they are the root element
*/
private Set<IExpression> mainExpressions;
/**
* map of expressions for entities and the xpath used to reach them
* xpath could be a variable name or a path
*/
private Map<IExpression, String> entityPaths;
/**
* map of expressions which have one to many relationships with their parents
* expressions and their respective target roles
*/
private Map<IExpression, String> targetRoles;
/**
* the map of exprssions for which a variable is created in the for clause and the variables
*/
private Map<IExpression, String> forVariables;
/**
* the selected attributes (ie the ones going in SELECT part) and their aliases
*/
private Map<IOutputAttribute, String> attributeAliases;
/**
* map of IParameter and the expressions corresponding to their attributes
*/
private Map<IParameter<ICondition>, IExpression> parameters;
/*
* A List containing all the main Entitiees
*/
protected List<EntityInterface> allMainEntityList = new ArrayList<EntityInterface>();
//private static org.apache.log4j.Logger logger =Logger.getLogger(XQueryGenerator.class);
/**
* Generates SQL for the given Query Object.
*
* @param query
* @return the String representing SQL for the given Query object.
* @throws MultipleRootsException
* @throws SqlException
* @throws XQueryDataTypeInitializationException
* @see edu.wustl.common.querysuite.queryengine.ISqlGenerator#generateSQL(edu.wustl.common.querysuite.queryobject.IQuery)
*/
/*
* The following function takes IQuery object as input for further processing
* @parameters : IQuery query= The query object
* @parameters : char QueryType = representing the kind of query whether aggregate or normal
*/
@Override
public String generateQuery(IQuery query) throws MultipleRootsException, SqlException
{
String formedQuery = null;
try
{
prepare(query);
formedQuery = formQuery();
}
catch (SQLXMLException e)
{
throw new SqlException("problem while trying to build xquery", e);
}
catch (DynamicExtensionsSystemException e)
{
throw new SqlException("problem while trying to build xquery", e);
}
catch (XQueryDataTypeInitializationException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
if (!Variables.isExecutingTestCase)
{
log(formedQuery);
}
return formedQuery;
}
/**
* prepare the data structures and information required to generate xquery
* This is step I of xquery generation
* @param query
* @throws MultipleRootsException
*/
private void prepare(IQuery query) throws MultipleRootsException
{
ParameterizedQuery queryClone = (ParameterizedQuery) new DyExtnObjectCloner().clone(query);
setSelectedAttributes(queryClone);
setParameterizedAttributes(queryClone);
// IQuery queryClone = query;
constraints = queryClone.getConstraints();
QueryObjectProcessor.replaceMultipleParents(constraints);
this.joinGraph = (JoinGraph) constraints.getJoinGraph();
aliasAppenderMap = new HashMap<IExpression, Integer>();
createAliasAppenderMap();
mainExpressions = new LinkedHashSet<IExpression>();
setMainExpressions(joinGraph.getRoot());
createTree();
createEntityPaths();
checkForEmptyExpression(joinGraph.getRoot().getExpressionId());
}
/**
* set the parameters and their expressions
* @param query
*/
private void setParameterizedAttributes(ParameterizedQuery query)
{
parameters = new HashMap<IParameter<ICondition>, IExpression>();
for (IParameter<?> parameter : query.getParameters())
{
IExpression expression = QueryUtility.getExpression((IParameter<ICondition>) parameter,
query);
parameters.put((IParameter<ICondition>) parameter, expression);
}
}
/**
* @param query
*/
private void setSelectedAttributes(ParameterizedQuery query)
{
//populate selected attributes and their aliases
attributeAliases = new LinkedHashMap<IOutputAttribute, String>();
for (IOutputAttribute selectedAttribute : query.getOutputAttributeList())
{
String attributeAlias = Utility.getAliasFor(selectedAttribute.getAttribute(),
selectedAttribute.getExpression());
attributeAliases.put(selectedAttribute, attributeAlias);
}
}
/**
* form xquery using the data structures prepared in step I.
* This is step II of xquery generation.
*
* @return the formed xquery
* @throws SqlException
* @throws MultipleRootsException
* @throws SQLXMLException
* @throws DynamicExtensionsSystemException
* @throws XQueryDataTypeInitializationException
*/
private String formQuery() throws SqlException, MultipleRootsException, SQLXMLException,
DynamicExtensionsSystemException, XQueryDataTypeInitializationException
{
StringBuilder formedQuery = new StringBuilder();
formedQuery.append(buildSelectPart());
String wherePart = buildWherePart(constraints.getRootExpression(), null);
wherePart = addJoiningTableCondition(wherePart);
PredicateGenerator predicateGenerator = new PredicateGenerator(forVariables, wherePart);
//isEmptyExpression(rootExpression.getExpressionId());
formedQuery.append(buildFromPart(predicateGenerator));
return formedQuery.toString();
}
/**
* add the join condition for 2 tables to the "where" part
*
* @param wherePart
* @return "where" part that has the join condition in it
* @throws MultipleRootsException
* @throws SqlException
* @throws XQueryDataTypeInitializationException
* @throws DynamicExtensionsSystemException
*/
private String addJoiningTableCondition(String wherePart) throws MultipleRootsException,
SqlException, XQueryDataTypeInitializationException, DynamicExtensionsSystemException
{
StringBuilder completeWherePart = new StringBuilder(wherePart);
Set<Integer> processedAlias = new HashSet<Integer>();
IExpression parentExpression = joinGraph.getRoot();
String leftAlias = getAliasName(parentExpression);
completeWherePart.append(Constants.QUERY_AND);
completeWherePart.append(processChildExpressions(leftAlias, processedAlias,
parentExpression));
return Utility.removeLastAnd(completeWherePart.toString());
}
/**
* To process all child expression of the given Expression & get their SQL
* representation for where part.
*
* @param leftAlias left table alias in join.
* @param processedAlias The list of precessed alias.
* @param parentExpressionId The reference to expression whose children to
* be processed.
* @return the left join sql for children expression.
* @throws SqlException when there is error in the passed IQuery object.
* @throws XQueryDataTypeInitializationException
* @throws DynamicExtensionsSystemException
*/
private String processChildExpressions(String leftAlias, Set<Integer> processedAlias,
IExpression parentExpression) throws SqlException, XQueryDataTypeInitializationException, DynamicExtensionsSystemException
{
StringBuffer buffer = new StringBuffer();
List<IExpression> children = joinGraph.getChildrenList(parentExpression);
if (!children.isEmpty())
{
// processing all outgoing edges/nodes from the current node in the
// joingraph.
for (IExpression childExpression : children)
{
if (mainExpressions.contains(childExpression))
{
IAssociation association = joinGraph.getAssociation(parentExpression,
childExpression);
AssociationInterface actualEavAssociation = ((IIntraModelAssociation) association)
.getDynamicExtensionsAssociation();
AssociationInterface eavAssociation = actualEavAssociation;
EntityInterface rightEntity = eavAssociation.getTargetEntity();
String actualRightAlias = getAliasFor(childExpression, rightEntity);
if (!processedAlias.contains(aliasAppenderMap.get(childExpression)))
{
ConstraintPropertiesInterface constraintProperties = eavAssociation
.getConstraintProperties();
Collection<ConstraintKeyPropertiesInterface> srcCnstrKeyPropColl = constraintProperties
.getSrcEntityConstraintKeyPropertiesCollection();
Collection<ConstraintKeyPropertiesInterface> tgtCnstrKeyPropColl = constraintProperties
.getTgtEntityConstraintKeyPropertiesCollection();
String leftAttribute = null;
String rightAttribute = null;
//many sides
for (ConstraintKeyPropertiesInterface cnstrKeyProp : srcCnstrKeyPropColl)
{
AttributeInterface primaryKey = cnstrKeyProp.getSrcPrimaryKeyAttribute();
String xQueryDataType = getXQuerydataType(primaryKey);
leftAttribute = "$" + getAliasName(parentExpression) + "/"
+ cnstrKeyProp.getTgtForiegnKeyColumnProperties().getName();
EntityInterface entity = cnstrKeyProp.getSrcPrimaryKeyAttribute().getEntity();
String entityPath = getEntityPath(entity,childExpression);
String primaryKeyName = cnstrKeyProp.getSrcPrimaryKeyAttribute()
.getName();
rightAttribute = "$" + getAliasName(childExpression) + entityPath + "/"
+ primaryKeyName + "/" + xQueryDataType;
buffer.append(Constants.QUERY_OPENING_PARENTHESIS + rightAttribute + "=" + leftAttribute);
buffer.append(Constants.QUERY_CLOSING_PARENTHESIS);
buffer.append(Constants.QUERY_AND);
}
// One Side
for (ConstraintKeyPropertiesInterface cnstrKeyProp : tgtCnstrKeyPropColl)
{
AttributeInterface primaryKey = cnstrKeyProp.getSrcPrimaryKeyAttribute();
String xQueryDataType = getXQuerydataType(primaryKey);
EntityInterface entity = cnstrKeyProp.getSrcPrimaryKeyAttribute().getEntity();
String entityPath = getEntityPath(entity, parentExpression);
String primaryKeyName = cnstrKeyProp.getSrcPrimaryKeyAttribute()
.getName();
leftAttribute = "$" + getAliasName(parentExpression) + entityPath + "/"
+ primaryKeyName ;
rightAttribute = "$" + getAliasName(childExpression) + "/"
+ cnstrKeyProp.getTgtForiegnKeyColumnProperties().getName()+ "/" + xQueryDataType;
buffer.append(Constants.QUERY_OPENING_PARENTHESIS + rightAttribute + "=" + leftAttribute);
buffer.append(Constants.QUERY_CLOSING_PARENTHESIS);
buffer.append(Constants.QUERY_AND);
}
}
// append from part SQLXML for the next Expressions.
buffer.append(processChildExpressions(actualRightAlias, processedAlias,
childExpression));
}
else
{
continue;
}
}
}
return buffer.toString();
}
/**
*
* @param entity - the entity whose path is been calculated
* @param intermediatePath - String buffer that stores the path and sends at as input recu
* @param expression - expression for which the path is to be calculated
* @return
* @throws DynamicExtensionsSystemException
*/
private String getEntityPath(EntityInterface entity, IExpression expression) throws DynamicExtensionsSystemException
{
StringBuffer intermediatePath = new StringBuffer();
if(!allMainEntityList.contains(entity))
{
EntityManagerInterface entityMgr = EntityManager.getInstance();
Collection<AssociationInterface> associationList = entityMgr.getIncomingAssociations(entity);
for(AssociationInterface association : associationList)
{
EntityInterface associatedEntity = association.getEntity();
if (associatedEntity.equals(expression.getQueryEntity().getDynamicExtensionsEntity()))
{
intermediatePath.append("/").append(association.getTargetRole().getName());
}
}
}
return intermediatePath.toString();
}
/**
* The method returns the XQuery function based on the data type
* @param attributeType - the data type of the attribute
* @return String representing XQuery function
* @throws XQueryDataTypeInitializationException
*/
private String getXQuerydataType(AttributeInterface attributeType)
throws XQueryDataTypeInitializationException
{
String returnValue = null;
AttributeTypeInformationInterface dataType = attributeType.getAttributeTypeInformation();
XQueryAttributeType xQueryAttributeType = XQueryAttributeType.getInstance();
if (dataType instanceof StringTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.STRING_ATTRIBUTE_TYPE);
}
else if (dataType instanceof DateTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.DATE_TIME_ATTRIBUTE_TYPE);
}
else if (dataType instanceof LongTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.LONG_ATTRIBUTE_TYPE);
}
else if (dataType instanceof DoubleTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.DOUBLE_ATTRIBUTE_TYPE);
}
else if (dataType instanceof IntegerTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.INTEGER_ATTRIBUTE_TYPE);
}
else if (dataType instanceof BooleanTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.BOOLEAN_ATTRIBUTE_TYPE);
}
return returnValue;
}
/**
* log the sql
* @param sql
*/
private void log(String sql)
{
try
{
new SQLLogger().log(sql);
}
catch (IOException e)
{
Logger.out.error("Error while logging sql.\n" + e);
}
}
/**
* populate entityPaths, forVariables and targetRoles
* by processing the expression hierarchy and deciding which relationships are one-many.
*/
private void createEntityPaths()
{
entityPaths = new LinkedHashMap<IExpression, String>();
forVariables = new LinkedHashMap<IExpression, String>();
targetRoles = new HashMap<IExpression, String>();
for (IExpression mainExpression : mainExpressions)
{
String mainVariable = new StringBuilder().append(Constants.QUERY_DOLLAR).append(
getAliasName(mainExpression)).toString();
entityPaths.put(mainExpression, mainVariable);
forVariables.put(mainExpression, mainVariable);
createEntityPaths(mainExpression, mainVariable);
}
}
/**
* the recursive method to traverse down the expression hierachy
* and decide paths to reach each entity
* @param expression
* @param xpath the path built so far to reach this point
*/
private void createEntityPaths(IExpression expression, String xpath)
{
for (IExpression childExpression : getNonMainChildren(expression))
{
String newPath = null;
IAssociation association = joinGraph.getAssociation(expression, childExpression);
AssociationInterface eavAssociation = ((IIntraModelAssociation) association)
.getDynamicExtensionsAssociation();
int cardinality = eavAssociation.getTargetRole().getMaximumCardinality().getValue();
if (cardinality > 1)
{
newPath = new StringBuilder().append(Constants.QUERY_DOLLAR).append(
getAliasName(childExpression)).toString();
entityPaths.put(childExpression, newPath);
forVariables.put(childExpression, newPath);
targetRoles.put(childExpression, eavAssociation.getTargetRole().getName());
}
else
{
String childEntityName = eavAssociation.getTargetRole().getName();
newPath = new StringBuilder(xpath).append('/').append(childEntityName).toString();
entityPaths.put(childExpression, newPath);
}
createEntityPaths(childExpression, newPath);
}
}
/**
*
* @param predicateGenerator
* @return the From part of SQLXML
* @throws SQLXMLException - Will be thrown when there is some SQLXML Exception
* @throws DynamicExtensionsSystemException - Exception thrown by DynamicExtensions
* @throws MultipleRootsException - thrown when there is more then one root element
* @throws SqlException - Thrown when there is some SQL Exception
*/
private String buildFromPart(PredicateGenerator predicateGenerator) throws SQLXMLException,
DynamicExtensionsSystemException, MultipleRootsException, SqlException
{
StringBuilder fromPart = new StringBuilder();
fromPart.append(Constants.QUERY_FROM_XMLTABLE + Constants.QUERY_OPENING_PARENTHESIS + "'");
fromPart.append(buildXQuery(predicateGenerator));
fromPart.append("'");
fromPart.append(buildPassingPart());
fromPart.append(buildColumnsPart());
fromPart.append(Constants.QUERY_CLOSING_PARENTHESIS);
return fromPart.toString();
}
/**
* return the Select portion of SQLXML Query
* throws
* SQLXMLException
*/
private String buildSelectPart() throws SQLXMLException
{
StringBuilder selectClause = new StringBuilder(256);
selectClause.append(Constants.SELECT);
for (Entry<IOutputAttribute, String> entry : attributeAliases.entrySet())
{
selectClause.append(entry.getValue());
String columnAliasName = Constants.QUERY_COLUMN_NAME + suffix;
selectClause.append(" " + columnAliasName + Constants.QUERY_COMMA);
addToTreeNode(entry, columnAliasName);
attributeColumnNameMap.put(entry.getKey().getAttribute(), columnAliasName);
suffix++;
}
Utility.removeLastComma(selectClause);
return selectClause.toString();
}
/**
* add the attribute to the appropriate node of rootOutputTreeNodeList
* @param entry
* @param columnAliasName
*/
private void addToTreeNode(Entry<IOutputAttribute, String> entry, String columnAliasName)
{
// code to get displayname. & pass it to the Constructor along with
// treeNode.
OutputTreeDataNode treeNode = null;
//find the right tree node to add the attribute to
for (OutputTreeDataNode node : attributeOutputTreeNodeList)
{
if (node.getExpressionId() == entry.getKey().getExpression().getExpressionId())
{
treeNode = node;
break;
}
}
String displayNameForColumn = Utility
.getDisplayNameForColumn(entry.getKey().getAttribute());
treeNode.addAttribute(new QueryOutputTreeAttributeMetadata(entry.getKey().getAttribute(),
columnAliasName, treeNode, displayNameForColumn));
}
/**
*
* @param predicateGenerator
* @return returns the XQuery formed from IQuery object
* @throws SQLXMLException
* @throws DynamicExtensionsSystemException
* @throws MultipleRootsException
* @throws SqlException
*/
private String buildXQuery(PredicateGenerator predicateGenerator) throws SQLXMLException,
DynamicExtensionsSystemException, MultipleRootsException, SqlException
{
StringBuffer xQuery = new StringBuffer(1024);
xQuery.append(buildXQueryForClause(predicateGenerator));
xQuery.append(buildXQueryLetClause(predicateGenerator));
//xQuery.append(buildXQueryWhereClause());
xQuery.append(buildXQueryReturnClause());
return xQuery.toString();
}
/**
*
* @param predicateGenerator
* @return the For Clause of XQuery
* @throws MultipleRootsException
* @throws DynamicExtensionsSystemException
*/
protected abstract String buildXQueryForClause(PredicateGenerator predicateGenerator)
throws MultipleRootsException, DynamicExtensionsSystemException;
/**
*
* @return the Return Clause of SQLXML
*/
protected abstract String buildXQueryLetClause(PredicateGenerator predicateGenerator);
/**
*
* @return the Return Clause of SQLXML
*/
protected abstract String buildXQueryReturnClause();
/**
* Adds an pseudo anded expression & all its child expressions to
* pAndExpressions set.
*
* @param expression pAnd expression
*/
/*private void addpAndExpression(IExpression expression)
{
List<IExpression> childList = joinGraph.getChildrenList(expression);
pAndExpressions.add(expression);
for (IExpression newExp : childList)
{
addpAndExpression(newExp);
}
}*/
/**
* change the first letter of the Entity Name
* to lower case
*/
protected String deCapitalize(String name)
{
StringBuilder builder = new StringBuilder(name);
String firstLetter = name.substring(0, 1).toLowerCase();
builder.replace(0, 1, firstLetter);
return builder.toString();
}
/**
* @return Will modify the DataType depending on input
* according to the database
*/
@Override
protected String modifyValueForDataType(String value, AttributeTypeInformationInterface dataType)
{
StringBuilder actualValue = new StringBuilder();
if (dataType instanceof StringTypeInformationInterface)
{
actualValue.append("\"").append(value).append("\"");
}
else if (dataType instanceof DateTypeInformationInterface)
{
String actualYear = value.substring(6, 10);
String actualMonth = value.substring(0, 2);
String actualDate = value.substring(3, 5);
String actualTime = "";
try
{
actualTime = value.substring(11);
}
catch (Exception e)
{
actualTime = "";
}
if (actualTime.equals(""))
{
StringBuilder newValue = new StringBuilder(actualYear);
newValue.append("-");
newValue.append(actualMonth);
newValue.append("-");
newValue.append(actualDate);
actualValue.append("xs:dateTime(\"").append(newValue.toString()).append(
"T00:00:00\")");
}
else
{
StringBuilder newValue = new StringBuilder(actualYear);
newValue.append("-");
newValue.append(actualMonth);
newValue.append("-");
newValue.append(actualDate);
newValue.append("T" + actualTime);
actualValue.append("xs:dateTime(\"").append(newValue.toString()).append("\")");
}
}
else
{
actualValue.append(value);
}
return actualValue.toString();
}
/**
* populate the set of main expressions by
* traversing expression tree recursively
*/
private void setMainExpressions(IExpression expression)
{
List<EntityInterface> mainEntityList = new ArrayList<EntityInterface>();
EntityInterface entity = expression.getQueryEntity().getDynamicExtensionsEntity();
List<EntityInterface> mainEntities = QueryCSMUtil
.getAllMainEntities(entity, mainEntityList);
if (mainEntities.contains(expression.getQueryEntity().getDynamicExtensionsEntity()))
{
mainExpressions.add(expression);
}
for (IExpression child : joinGraph.getChildrenList(expression))
{
setMainExpressions(child);
}
}
/**
*
* @return the 'passing' part of SQLXML
* @throws DataTypeFactoryInitializationException
*/
protected abstract String buildPassingPart() throws DataTypeFactoryInitializationException;
/**
*
* @return Columns part of SQLXML
* @throws DataTypeFactoryInitializationException
*/
protected abstract String buildColumnsPart() throws DataTypeFactoryInitializationException;
/**
* get the database specific data type for given attribute
*
* @param attribute
* @return
* @throws DataTypeFactoryInitializationException
*/
protected String getDataTypeInformation(AttributeInterface attribute)
throws DataTypeFactoryInitializationException
{
String returnValue = null;
DataTypeFactory type = DataTypeFactory.getInstance();
AttributeTypeInformationInterface dataType = attribute.getAttributeTypeInformation();
if (dataType instanceof StringTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.STRING_ATTRIBUTE_TYPE)
+ Constants.QUERY_OPENING_PARENTHESIS
+ ((StringTypeInformationInterface) dataType).getSize()
+ Constants.QUERY_CLOSING_PARENTHESIS;
}
else if (dataType instanceof DateTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.DATE_TIME_ATTRIBUTE_TYPE);
}
else if (dataType instanceof LongTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.LONG_ATTRIBUTE_TYPE);
}
else if (dataType instanceof DoubleTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.DOUBLE_ATTRIBUTE_TYPE);
}
else if (dataType instanceof IntegerTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.INTEGER_ATTRIBUTE_TYPE);
}
else if (dataType instanceof BooleanTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.BOOLEAN_ATTRIBUTE_TYPE);
}
return returnValue;
}
/**
* get the complete name for given attribute
*/
@Override
protected String getConditionAttributeName(AttributeInterface attribute, IExpression expression)
{
return entityPaths.get(expression) + '/' + attribute.getName();
}
@Override
protected String getDescriminatorCondition(EntityInterface entity, String aliasFor)
{
//for the time being null is sufficient
return null;
}
@Override
protected boolean shouldAddNodeFor(IExpression expression)
{
return super.shouldAddNodeFor(expression);
}
@Override
protected boolean isContainedExpresion(int expressionId)
{
boolean isMainExpression = false;
for (IExpression exp : mainExpressions)
{
if (exp.getExpressionId() == expressionId)
{
isMainExpression = true;
break;
}
}
return !(isMainExpression);
}
/**
* create xquery fragment to represent "between" operator
*/
@Override
protected String processBetweenOperator(ICondition condition, String attributeName)
throws SqlException
{
StringBuilder builder = new StringBuilder();
List<String> values = condition.getValues();
if (values.size() != 2)
{
throw new SqlException("Incorrect number of operand for Between oparator in condition:"
+ condition);
}
AttributeTypeInformationInterface dataType = condition.getAttribute()
.getAttributeTypeInformation();
if (!(dataType instanceof DateTypeInformationInterface
|| dataType instanceof IntegerTypeInformationInterface
|| dataType instanceof LongTypeInformationInterface || dataType instanceof DoubleTypeInformationInterface))
{
throw new SqlException(
"Incorrect Data type of operand for Between oparator in condition:" + condition);
}
String firstValue = modifyValueForDataType(values.get(0), dataType);
String secondValue = modifyValueForDataType(values.get(1), dataType);
builder.append(attributeName).append(
RelationalOperator.getSQL(RelationalOperator.GreaterThanOrEquals)).append(
firstValue);
builder.append(Constants.QUERY_AND);
builder.append(attributeName).append(
RelationalOperator.getSQL(RelationalOperator.LessThanOrEquals)).append(secondValue);
return builder.toString();
}
/**
* create xquery fragment to represent "in" operator
*/
@Override
protected String processInOperator(ICondition condition, String attributeName)
throws SqlException
{
StringBuilder builder = new StringBuilder(attributeName).append(' ').append(" = ").append(
' ').append(Constants.QUERY_OPENING_PARENTHESIS);
for (String value : condition.getValues())
{
AttributeTypeInformationInterface dataType = condition.getAttribute()
.getAttributeTypeInformation();
if (dataType instanceof StringTypeInformationInterface)
{
builder.append("\"").append(value).append("\"").append(Constants.QUERY_COMMA);
}
else
{
builder.append(value).append(Constants.QUERY_COMMA);
}
}
Utility.removeLastComma(builder);
builder.append(Constants.QUERY_CLOSING_PARENTHESIS);
if (condition.getRelationalOperator().equals(RelationalOperator.NotIn))
{
builder.insert(0, Constants.QUERY_OPENING_PARENTHESIS).insert(0, "not").append(
Constants.QUERY_CLOSING_PARENTHESIS);
}
return builder.toString();
}
/**
* create xquery fragment to represent "exists" and "empty" operators
*/
@Override
protected String processNullCheckOperators(ICondition condition, String attributeName)
throws SqlException
{
RelationalOperator operator = condition.getRelationalOperator();
StringBuilder builder = new StringBuilder();
if (operator.equals(RelationalOperator.IsNotNull))
{
builder.append("exists");
}
else if (operator.equals(RelationalOperator.IsNull))
{
builder.append("empty");
}
builder.append(Constants.QUERY_OPENING_PARENTHESIS).append(attributeName).append(
Constants.QUERY_CLOSING_PARENTHESIS);
return builder.toString();
}
/**
* create xquery fragment to represent "contains", "starts-with" and
* "ends-with" operators
*/
@Override
protected String processLikeOperators(ICondition condition, String attributeName)
throws SqlException
{
RelationalOperator operator = condition.getRelationalOperator();
String newOperator = null;
String value = condition.getValue();
if (operator.equals(RelationalOperator.Contains))
{
newOperator = "contains(string(" + attributeName + "),\"" + value + "\")";
}
else if (operator.equals(RelationalOperator.StartsWith))
{
newOperator = "starts-with(string(" + attributeName + "),\"" + value + "\")";
}
else if (operator.equals(RelationalOperator.EndsWith))
{
newOperator = "ends-with(string(" + attributeName + "),\"" + value + "\")";
}
return newOperator;
}
/**
* get the list of children of given expression which are not main expressions
*
* @param expression
* @return
*/
protected List<IExpression> getNonMainChildren(IExpression expression)
{
List<IExpression> nonMainChildren = new ArrayList<IExpression>(joinGraph
.getChildrenList(expression));
nonMainChildren.removeAll(mainExpressions);
return nonMainChildren;
}
protected List<IExpression> getNonMainNonEmptyChildren(IExpression expression)
{
List<IExpression> children = getNonMainChildren(expression);
children.removeAll(emptyExpressions);
return children;
}
/**
* @return the mainExpressions
*/
protected Set<IExpression> getMainExpressions()
{
return mainExpressions;
}
/**
* @return the entityPaths
*/
protected Map<IExpression, String> getEntityPaths()
{
return entityPaths;
}
/**
* @return the targetRoles
*/
protected Map<IExpression, String> getTargetRoles()
{
return targetRoles;
}
/**
* @return the forVariables
*/
protected Map<IExpression, String> getForVariables()
{
return forVariables;
}
/**
* @return the attributeAliases
*/
protected Map<IOutputAttribute, String> getAttributeAliases()
{
return attributeAliases;
}
/**
* @return the parameters
*/
protected Map<IParameter<ICondition>, IExpression> getParameters()
{
return parameters;
}
/**
*
* @param operandquery
* @return Added a method so that the Parser can identify the temporal query and act accordingly
*/
protected String getTemporalCondition(String operandquery)
{
return "<" + Constants.QUERY_TEMPORAL_CONDITION + ">" + operandquery + "</" + Constants.QUERY_TEMPORAL_CONDITION + ">";
}
} | WEB-INF/src/edu/wustl/common/query/impl/AbstractXQueryGenerator.java |
package edu.wustl.common.query.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import edu.common.dynamicextensions.domaininterface.AssociationInterface;
import edu.common.dynamicextensions.domaininterface.AttributeInterface;
import edu.common.dynamicextensions.domaininterface.AttributeTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.BooleanTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.DateTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.DoubleTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.EntityInterface;
import edu.common.dynamicextensions.domaininterface.IntegerTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.LongTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.StringTypeInformationInterface;
import edu.common.dynamicextensions.domaininterface.databaseproperties.ConstraintKeyPropertiesInterface;
import edu.common.dynamicextensions.domaininterface.databaseproperties.ConstraintPropertiesInterface;
import edu.common.dynamicextensions.entitymanager.DataTypeFactory;
import edu.common.dynamicextensions.entitymanager.EntityManager;
import edu.common.dynamicextensions.entitymanager.EntityManagerConstantsInterface;
import edu.common.dynamicextensions.entitymanager.EntityManagerInterface;
import edu.common.dynamicextensions.exception.DataTypeFactoryInitializationException;
import edu.common.dynamicextensions.exception.DynamicExtensionsSystemException;
import edu.wustl.common.query.exeptions.SQLXMLException;
import edu.wustl.common.query.impl.predicate.PredicateGenerator;
import edu.wustl.common.query.queryobject.impl.OutputTreeDataNode;
import edu.wustl.common.query.queryobject.impl.metadata.QueryOutputTreeAttributeMetadata;
import edu.wustl.common.query.queryobject.util.QueryObjectProcessor;
import edu.wustl.common.querysuite.exceptions.MultipleRootsException;
import edu.wustl.common.querysuite.exceptions.SqlException;
import edu.wustl.common.querysuite.metadata.associations.IAssociation;
import edu.wustl.common.querysuite.metadata.associations.IIntraModelAssociation;
import edu.wustl.common.querysuite.queryobject.ICondition;
import edu.wustl.common.querysuite.queryobject.IExpression;
import edu.wustl.common.querysuite.queryobject.IOutputAttribute;
import edu.wustl.common.querysuite.queryobject.IParameter;
import edu.wustl.common.querysuite.queryobject.IQuery;
import edu.wustl.common.querysuite.queryobject.RelationalOperator;
import edu.wustl.common.querysuite.queryobject.impl.JoinGraph;
import edu.wustl.common.querysuite.queryobject.impl.ParameterizedQuery;
import edu.wustl.common.querysuite.utils.QueryUtility;
import edu.wustl.common.util.logger.Logger;
import edu.wustl.metadata.util.DyExtnObjectCloner;
import edu.wustl.query.util.global.Constants;
import edu.wustl.query.util.global.Utility;
import edu.wustl.query.util.global.Variables;
import edu.wustl.query.util.querysuite.QueryCSMUtil;
import edu.wustl.query.xquerydatatypes.XQueryAttributeType;
import edu.wustl.query.xquerydatatypes.XQueryDataTypeInitializationException;
/**
*
* @author juberahamad_patel
*
* abstract class from which the xquery generator classes extend
*
*/
public abstract class AbstractXQueryGenerator extends QueryGenerator
{
/**
* the suffix used to generate sql column names on the fly, like column0, column1 etc.
*/
protected int suffix = 0;
/**
* the set of expressions whose entites have a separate XML file, where they are the root element
*/
private Set<IExpression> mainExpressions;
/**
* map of expressions for entities and the xpath used to reach them
* xpath could be a variable name or a path
*/
private Map<IExpression, String> entityPaths;
/**
* map of expressions which have one to many relationships with their parents
* expressions and their respective target roles
*/
private Map<IExpression, String> targetRoles;
/**
* the map of exprssions for which a variable is created in the for clause and the variables
*/
private Map<IExpression, String> forVariables;
/**
* the selected attributes (ie the ones going in SELECT part) and their aliases
*/
private Map<IOutputAttribute, String> attributeAliases;
/**
* map of IParameter and the expressions corresponding to their attributes
*/
private Map<IParameter<ICondition>, IExpression> parameters;
/*
* A List containing all the main Entitiees
*/
protected List<EntityInterface> allMainEntityList = new ArrayList<EntityInterface>();
//private static org.apache.log4j.Logger logger =Logger.getLogger(XQueryGenerator.class);
/**
* Generates SQL for the given Query Object.
*
* @param query
* @return the String representing SQL for the given Query object.
* @throws MultipleRootsException
* @throws SqlException
* @throws XQueryDataTypeInitializationException
* @see edu.wustl.common.querysuite.queryengine.ISqlGenerator#generateSQL(edu.wustl.common.querysuite.queryobject.IQuery)
*/
/*
* The following function takes IQuery object as input for further processing
* @parameters : IQuery query= The query object
* @parameters : char QueryType = representing the kind of query whether aggregate or normal
*/
@Override
public String generateQuery(IQuery query) throws MultipleRootsException, SqlException
{
String formedQuery = null;
try
{
prepare(query);
formedQuery = formQuery();
}
catch (SQLXMLException e)
{
throw new SqlException("problem while trying to build xquery", e);
}
catch (DynamicExtensionsSystemException e)
{
throw new SqlException("problem while trying to build xquery", e);
}
catch (XQueryDataTypeInitializationException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
if (!Variables.isExecutingTestCase)
{
log(formedQuery);
}
return formedQuery;
}
/**
* prepare the data structures and information required to generate xquery
* This is step I of xquery generation
* @param query
* @throws MultipleRootsException
*/
private void prepare(IQuery query) throws MultipleRootsException
{
ParameterizedQuery queryClone = (ParameterizedQuery) new DyExtnObjectCloner().clone(query);
setSelectedAttributes(queryClone);
setParameterizedAttributes(queryClone);
// IQuery queryClone = query;
constraints = queryClone.getConstraints();
QueryObjectProcessor.replaceMultipleParents(constraints);
this.joinGraph = (JoinGraph) constraints.getJoinGraph();
aliasAppenderMap = new HashMap<IExpression, Integer>();
createAliasAppenderMap();
mainExpressions = new LinkedHashSet<IExpression>();
setMainExpressions(joinGraph.getRoot());
createTree();
createEntityPaths();
checkForEmptyExpression(joinGraph.getRoot().getExpressionId());
}
/**
* set the parameters and their expressions
* @param query
*/
private void setParameterizedAttributes(ParameterizedQuery query)
{
parameters = new HashMap<IParameter<ICondition>, IExpression>();
for (IParameter<?> parameter : query.getParameters())
{
IExpression expression = QueryUtility.getExpression((IParameter<ICondition>) parameter,
query);
parameters.put((IParameter<ICondition>) parameter, expression);
}
}
/**
* @param query
*/
private void setSelectedAttributes(ParameterizedQuery query)
{
//populate selected attributes and their aliases
attributeAliases = new LinkedHashMap<IOutputAttribute, String>();
for (IOutputAttribute selectedAttribute : query.getOutputAttributeList())
{
String attributeAlias = Utility.getAliasFor(selectedAttribute.getAttribute(),
selectedAttribute.getExpression());
attributeAliases.put(selectedAttribute, attributeAlias);
}
}
/**
* form xquery using the data structures prepared in step I.
* This is step II of xquery generation.
*
* @return the formed xquery
* @throws SqlException
* @throws MultipleRootsException
* @throws SQLXMLException
* @throws DynamicExtensionsSystemException
* @throws XQueryDataTypeInitializationException
*/
private String formQuery() throws SqlException, MultipleRootsException, SQLXMLException,
DynamicExtensionsSystemException, XQueryDataTypeInitializationException
{
StringBuilder formedQuery = new StringBuilder();
formedQuery.append(buildSelectPart());
String wherePart = buildWherePart(constraints.getRootExpression(), null);
wherePart = addJoiningTableCondition(wherePart);
PredicateGenerator predicateGenerator = new PredicateGenerator(forVariables, wherePart);
//isEmptyExpression(rootExpression.getExpressionId());
formedQuery.append(buildFromPart(predicateGenerator));
return formedQuery.toString();
}
/**
* add the join condition for 2 tables to the "where" part
*
* @param wherePart
* @return "where" part that has the join condition in it
* @throws MultipleRootsException
* @throws SqlException
* @throws XQueryDataTypeInitializationException
* @throws DynamicExtensionsSystemException
*/
private String addJoiningTableCondition(String wherePart) throws MultipleRootsException,
SqlException, XQueryDataTypeInitializationException, DynamicExtensionsSystemException
{
StringBuilder completeWherePart = new StringBuilder(wherePart);
Set<Integer> processedAlias = new HashSet<Integer>();
IExpression parentExpression = joinGraph.getRoot();
String leftAlias = getAliasName(parentExpression);
completeWherePart.append(Constants.QUERY_AND);
completeWherePart.append(processChildExpressions(leftAlias, processedAlias,
parentExpression));
return Utility.removeLastAnd(completeWherePart.toString());
}
/**
* To process all child expression of the given Expression & get their SQL
* representation for where part.
*
* @param leftAlias left table alias in join.
* @param processedAlias The list of precessed alias.
* @param parentExpressionId The reference to expression whose children to
* be processed.
* @return the left join sql for children expression.
* @throws SqlException when there is error in the passed IQuery object.
* @throws XQueryDataTypeInitializationException
* @throws DynamicExtensionsSystemException
*/
private String processChildExpressions(String leftAlias, Set<Integer> processedAlias,
IExpression parentExpression) throws SqlException, XQueryDataTypeInitializationException, DynamicExtensionsSystemException
{
StringBuffer buffer = new StringBuffer();
List<IExpression> children = joinGraph.getChildrenList(parentExpression);
if (!children.isEmpty())
{
// processing all outgoing edges/nodes from the current node in the
// joingraph.
for (IExpression childExpression : children)
{
if (mainExpressions.contains(childExpression))
{
IAssociation association = joinGraph.getAssociation(parentExpression,
childExpression);
AssociationInterface actualEavAssociation = ((IIntraModelAssociation) association)
.getDynamicExtensionsAssociation();
AssociationInterface eavAssociation = actualEavAssociation;
EntityInterface rightEntity = eavAssociation.getTargetEntity();
String actualRightAlias = getAliasFor(childExpression, rightEntity);
if (!processedAlias.contains(aliasAppenderMap.get(childExpression)))
{
ConstraintPropertiesInterface constraintProperties = eavAssociation
.getConstraintProperties();
Collection<ConstraintKeyPropertiesInterface> srcCnstrKeyPropColl = constraintProperties
.getSrcEntityConstraintKeyPropertiesCollection();
Collection<ConstraintKeyPropertiesInterface> tgtCnstrKeyPropColl = constraintProperties
.getTgtEntityConstraintKeyPropertiesCollection();
String leftAttribute = null;
String rightAttribute = null;
//many sides
for (ConstraintKeyPropertiesInterface cnstrKeyProp : srcCnstrKeyPropColl)
{
AttributeInterface primaryKey = cnstrKeyProp.getSrcPrimaryKeyAttribute();
String xQueryDataType = getXQuerydataType(primaryKey);
leftAttribute = "$" + getAliasName(parentExpression) + "/"
+ cnstrKeyProp.getTgtForiegnKeyColumnProperties().getName();
EntityInterface entity = cnstrKeyProp.getSrcPrimaryKeyAttribute().getEntity();
String entityPath = getEntityPath(entity,childExpression);
String primaryKeyName = cnstrKeyProp.getSrcPrimaryKeyAttribute()
.getName();
rightAttribute = "$" + getAliasName(childExpression) + entityPath + "/"
+ primaryKeyName + "/" + xQueryDataType;
buffer.append(Constants.QUERY_OPENING_PARENTHESIS + rightAttribute + "=" + leftAttribute);
buffer.append(Constants.QUERY_CLOSING_PARENTHESIS);
buffer.append(Constants.QUERY_AND);
}
// One Side
for (ConstraintKeyPropertiesInterface cnstrKeyProp : tgtCnstrKeyPropColl)
{
AttributeInterface primaryKey = cnstrKeyProp.getSrcPrimaryKeyAttribute();
String xQueryDataType = getXQuerydataType(primaryKey);
EntityInterface entity = cnstrKeyProp.getSrcPrimaryKeyAttribute().getEntity();
String entityPath = getEntityPath(entity, parentExpression);
String primaryKeyName = cnstrKeyProp.getSrcPrimaryKeyAttribute()
.getName();
leftAttribute = "$" + getAliasName(parentExpression) + entityPath + "/"
+ primaryKeyName ;
rightAttribute = "$" + getAliasName(childExpression) + "/"
+ cnstrKeyProp.getTgtForiegnKeyColumnProperties().getName()+ "/" + xQueryDataType;
buffer.append(Constants.QUERY_OPENING_PARENTHESIS + rightAttribute + "=" + leftAttribute);
buffer.append(Constants.QUERY_CLOSING_PARENTHESIS);
buffer.append(Constants.QUERY_AND);
}
}
// append from part SQLXML for the next Expressions.
buffer.append(processChildExpressions(actualRightAlias, processedAlias,
childExpression));
}
else
{
continue;
}
}
}
return buffer.toString();
}
/**
*
* @param entity - the entity whose path is been calculated
* @param intermediatePath - String buffer that stores the path and sends at as input recu
* @param expression - expression for which the path is to be calculated
* @return
* @throws DynamicExtensionsSystemException
*/
private String getEntityPath(EntityInterface entity, IExpression expression) throws DynamicExtensionsSystemException
{
StringBuffer intermediatePath = new StringBuffer();
if(!allMainEntityList.contains(entity))
{
EntityManagerInterface entityMgr = EntityManager.getInstance();
Collection<AssociationInterface> associationList = entityMgr.getIncomingAssociations(entity);
for(AssociationInterface association : associationList)
{
EntityInterface associatedEntity = association.getEntity();
if (associatedEntity.equals(expression.getQueryEntity().getDynamicExtensionsEntity()))
{
intermediatePath.append("/").append(association.getTargetRole().getName());
}
}
}
return intermediatePath.toString();
}
/**
* The method returns the XQuery function based on the data type
* @param attributeType - the data type of the attribute
* @return String representing XQuery function
* @throws XQueryDataTypeInitializationException
*/
private String getXQuerydataType(AttributeInterface attributeType)
throws XQueryDataTypeInitializationException
{
String returnValue = null;
AttributeTypeInformationInterface dataType = attributeType.getAttributeTypeInformation();
XQueryAttributeType xQueryAttributeType = XQueryAttributeType.getInstance();
if (dataType instanceof StringTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.STRING_ATTRIBUTE_TYPE);
}
else if (dataType instanceof DateTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.DATE_TIME_ATTRIBUTE_TYPE);
}
else if (dataType instanceof LongTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.LONG_ATTRIBUTE_TYPE);
}
else if (dataType instanceof DoubleTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.DOUBLE_ATTRIBUTE_TYPE);
}
else if (dataType instanceof IntegerTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.INTEGER_ATTRIBUTE_TYPE);
}
else if (dataType instanceof BooleanTypeInformationInterface)
{
returnValue = xQueryAttributeType
.getDataType(EntityManagerConstantsInterface.BOOLEAN_ATTRIBUTE_TYPE);
}
return returnValue;
}
/**
* log the sql
* @param sql
*/
private void log(String sql)
{
try
{
new SQLLogger().log(sql);
}
catch (IOException e)
{
Logger.out.error("Error while logging sql.\n" + e);
}
}
/**
* populate entityPaths, forVariables and targetRoles
* by processing the expression hierarchy and deciding which relationships are one-many.
*/
private void createEntityPaths()
{
entityPaths = new LinkedHashMap<IExpression, String>();
forVariables = new LinkedHashMap<IExpression, String>();
targetRoles = new HashMap<IExpression, String>();
for (IExpression mainExpression : mainExpressions)
{
String mainVariable = new StringBuilder().append(Constants.QUERY_DOLLAR).append(
getAliasName(mainExpression)).toString();
entityPaths.put(mainExpression, mainVariable);
forVariables.put(mainExpression, mainVariable);
createEntityPaths(mainExpression, mainVariable);
}
}
/**
* the recursive method to traverse down the expression hierachy
* and decide paths to reach each entity
* @param expression
* @param xpath the path built so far to reach this point
*/
private void createEntityPaths(IExpression expression, String xpath)
{
for (IExpression childExpression : getNonMainChildren(expression))
{
String newPath = null;
IAssociation association = joinGraph.getAssociation(expression, childExpression);
AssociationInterface eavAssociation = ((IIntraModelAssociation) association)
.getDynamicExtensionsAssociation();
int cardinality = eavAssociation.getTargetRole().getMaximumCardinality().getValue();
if (cardinality > 1)
{
newPath = new StringBuilder().append(Constants.QUERY_DOLLAR).append(
getAliasName(childExpression)).toString();
entityPaths.put(childExpression, newPath);
forVariables.put(childExpression, newPath);
targetRoles.put(childExpression, eavAssociation.getTargetRole().getName());
}
else
{
String childEntityName = eavAssociation.getTargetRole().getName();
newPath = new StringBuilder(xpath).append('/').append(childEntityName).toString();
entityPaths.put(childExpression, newPath);
}
createEntityPaths(childExpression, newPath);
}
}
/**
*
* @param predicateGenerator
* @return the From part of SQLXML
* @throws SQLXMLException - Will be thrown when there is some SQLXML Exception
* @throws DynamicExtensionsSystemException - Exception thrown by DynamicExtensions
* @throws MultipleRootsException - thrown when there is more then one root element
* @throws SqlException - Thrown when there is some SQL Exception
*/
private String buildFromPart(PredicateGenerator predicateGenerator) throws SQLXMLException,
DynamicExtensionsSystemException, MultipleRootsException, SqlException
{
StringBuilder fromPart = new StringBuilder();
fromPart.append(Constants.QUERY_FROM_XMLTABLE + Constants.QUERY_OPENING_PARENTHESIS + "'");
fromPart.append(buildXQuery(predicateGenerator));
fromPart.append("'");
fromPart.append(buildPassingPart());
fromPart.append(buildColumnsPart());
fromPart.append(Constants.QUERY_CLOSING_PARENTHESIS);
return fromPart.toString();
}
/**
* return the Select portion of SQLXML Query
* throws
* SQLXMLException
*/
private String buildSelectPart() throws SQLXMLException
{
StringBuilder selectClause = new StringBuilder(256);
selectClause.append(Constants.SELECT);
for (Entry<IOutputAttribute, String> entry : attributeAliases.entrySet())
{
selectClause.append(entry.getValue());
String columnAliasName = Constants.QUERY_COLUMN_NAME + suffix;
selectClause.append(" " + columnAliasName + Constants.QUERY_COMMA);
addToTreeNode(entry, columnAliasName);
attributeColumnNameMap.put(entry.getKey().getAttribute(), columnAliasName);
suffix++;
}
Utility.removeLastComma(selectClause);
return selectClause.toString();
}
/**
* add the attribute to the appropriate node of rootOutputTreeNodeList
* @param entry
* @param columnAliasName
*/
private void addToTreeNode(Entry<IOutputAttribute, String> entry, String columnAliasName)
{
// code to get displayname. & pass it to the Constructor along with
// treeNode.
OutputTreeDataNode treeNode = null;
//find the right tree node to add the attribute to
for (OutputTreeDataNode node : attributeOutputTreeNodeList)
{
if (node.getExpressionId() == entry.getKey().getExpression().getExpressionId())
{
treeNode = node;
break;
}
}
String displayNameForColumn = Utility
.getDisplayNameForColumn(entry.getKey().getAttribute());
treeNode.addAttribute(new QueryOutputTreeAttributeMetadata(entry.getKey().getAttribute(),
columnAliasName, treeNode, displayNameForColumn));
}
/**
*
* @param predicateGenerator
* @return returns the XQuery formed from IQuery object
* @throws SQLXMLException
* @throws DynamicExtensionsSystemException
* @throws MultipleRootsException
* @throws SqlException
*/
private String buildXQuery(PredicateGenerator predicateGenerator) throws SQLXMLException,
DynamicExtensionsSystemException, MultipleRootsException, SqlException
{
StringBuffer xQuery = new StringBuffer(1024);
xQuery.append(buildXQueryForClause(predicateGenerator));
xQuery.append(buildXQueryLetClause(predicateGenerator));
//xQuery.append(buildXQueryWhereClause());
xQuery.append(buildXQueryReturnClause());
return xQuery.toString();
}
/**
*
* @param predicateGenerator
* @return the For Clause of XQuery
* @throws MultipleRootsException
* @throws DynamicExtensionsSystemException
*/
protected abstract String buildXQueryForClause(PredicateGenerator predicateGenerator)
throws MultipleRootsException, DynamicExtensionsSystemException;
/**
*
* @return the Return Clause of SQLXML
*/
protected abstract String buildXQueryLetClause(PredicateGenerator predicateGenerator);
/**
*
* @return the Return Clause of SQLXML
*/
protected abstract String buildXQueryReturnClause();
/**
* Adds an pseudo anded expression & all its child expressions to
* pAndExpressions set.
*
* @param expression pAnd expression
*/
/*private void addpAndExpression(IExpression expression)
{
List<IExpression> childList = joinGraph.getChildrenList(expression);
pAndExpressions.add(expression);
for (IExpression newExp : childList)
{
addpAndExpression(newExp);
}
}*/
/**
* change the first letter of the Entity Name
* to lower case
*/
protected String deCapitalize(String name)
{
StringBuilder builder = new StringBuilder(name);
String firstLetter = name.substring(0, 1).toLowerCase();
builder.replace(0, 1, firstLetter);
return builder.toString();
}
/**
* @return Will modify the DataType depending on input
* according to the database
*/
@Override
protected String modifyValueForDataType(String value, AttributeTypeInformationInterface dataType)
{
StringBuilder actualValue = new StringBuilder();
if (dataType instanceof StringTypeInformationInterface)
{
actualValue.append("\"").append(value).append("\"");
}
else if (dataType instanceof DateTypeInformationInterface)
{
String actualYear = value.substring(6, 10);
String actualMonth = value.substring(0, 2);
String actualDate = value.substring(3, 5);
String actualTime = "";
try
{
actualTime = value.substring(11);
}
catch (Exception e)
{
actualTime = "";
}
if (actualTime.equals(""))
{
StringBuilder newValue = new StringBuilder(actualYear);
newValue.append("-");
newValue.append(actualMonth);
newValue.append("-");
newValue.append(actualDate);
actualValue.append("xs:dateTime(\"").append(newValue.toString()).append(
"T00:00:00\")");
}
else
{
StringBuilder newValue = new StringBuilder(actualYear);
newValue.append("-");
newValue.append(actualMonth);
newValue.append("-");
newValue.append(actualDate);
newValue.append("T" + actualTime);
actualValue.append("xs:dateTime(\"").append(newValue.toString()).append("\")");
}
}
else
{
actualValue.append(value);
}
return actualValue.toString();
}
/**
* populate the set of main expressions by
* traversing expression tree recursively
*/
private void setMainExpressions(IExpression expression)
{
List<EntityInterface> mainEntityList = new ArrayList<EntityInterface>();
EntityInterface entity = expression.getQueryEntity().getDynamicExtensionsEntity();
List<EntityInterface> mainEntities = QueryCSMUtil
.getAllMainEntities(entity, mainEntityList);
if (mainEntities.contains(expression.getQueryEntity().getDynamicExtensionsEntity()))
{
mainExpressions.add(expression);
}
for (IExpression child : joinGraph.getChildrenList(expression))
{
setMainExpressions(child);
}
}
/**
*
* @return the 'passing' part of SQLXML
* @throws DataTypeFactoryInitializationException
*/
protected abstract String buildPassingPart() throws DataTypeFactoryInitializationException;
/**
*
* @return Columns part of SQLXML
* @throws DataTypeFactoryInitializationException
*/
protected abstract String buildColumnsPart() throws DataTypeFactoryInitializationException;
/**
* get the database specific data type for given attribute
*
* @param attribute
* @return
* @throws DataTypeFactoryInitializationException
*/
protected String getDataTypeInformation(AttributeInterface attribute)
throws DataTypeFactoryInitializationException
{
String returnValue = null;
DataTypeFactory type = DataTypeFactory.getInstance();
AttributeTypeInformationInterface dataType = attribute.getAttributeTypeInformation();
if (dataType instanceof StringTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.STRING_ATTRIBUTE_TYPE)
+ Constants.QUERY_OPENING_PARENTHESIS
+ ((StringTypeInformationInterface) dataType).getSize()
+ Constants.QUERY_CLOSING_PARENTHESIS;
}
else if (dataType instanceof DateTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.DATE_TIME_ATTRIBUTE_TYPE);
}
else if (dataType instanceof LongTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.LONG_ATTRIBUTE_TYPE);
}
else if (dataType instanceof DoubleTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.DOUBLE_ATTRIBUTE_TYPE);
}
else if (dataType instanceof IntegerTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.INTEGER_ATTRIBUTE_TYPE);
}
else if (dataType instanceof BooleanTypeInformationInterface)
{
returnValue = type
.getDatabaseDataType(EntityManagerConstantsInterface.BOOLEAN_ATTRIBUTE_TYPE);
}
return returnValue;
}
/**
* get the complete name for given attribute
*/
@Override
protected String getConditionAttributeName(AttributeInterface attribute, IExpression expression)
{
return entityPaths.get(expression) + '/' + attribute.getName();
}
@Override
protected String getDescriminatorCondition(EntityInterface entity, String aliasFor)
{
//for the time being null is sufficient
return null;
}
@Override
protected boolean shouldAddNodeFor(IExpression expression)
{
return super.shouldAddNodeFor(expression);
}
@Override
protected boolean isContainedExpresion(int expressionId)
{
boolean isMainExpression = false;
for (IExpression exp : mainExpressions)
{
if (exp.getExpressionId() == expressionId)
{
isMainExpression = true;
break;
}
}
return !(isMainExpression);
}
/**
* create xquery fragment to represent "between" operator
*/
@Override
protected String processBetweenOperator(ICondition condition, String attributeName)
throws SqlException
{
StringBuilder builder = new StringBuilder();
List<String> values = condition.getValues();
if (values.size() != 2)
{
throw new SqlException("Incorrect number of operand for Between oparator in condition:"
+ condition);
}
AttributeTypeInformationInterface dataType = condition.getAttribute()
.getAttributeTypeInformation();
if (!(dataType instanceof DateTypeInformationInterface
|| dataType instanceof IntegerTypeInformationInterface
|| dataType instanceof LongTypeInformationInterface || dataType instanceof DoubleTypeInformationInterface))
{
throw new SqlException(
"Incorrect Data type of operand for Between oparator in condition:" + condition);
}
String firstValue = modifyValueForDataType(values.get(0), dataType);
String secondValue = modifyValueForDataType(values.get(1), dataType);
builder.append(attributeName).append(
RelationalOperator.getSQL(RelationalOperator.GreaterThanOrEquals)).append(
firstValue);
builder.append(Constants.QUERY_AND);
builder.append(attributeName).append(
RelationalOperator.getSQL(RelationalOperator.LessThanOrEquals)).append(secondValue);
return builder.toString();
}
/**
* create xquery fragment to represent "in" operator
*/
@Override
protected String processInOperator(ICondition condition, String attributeName)
throws SqlException
{
StringBuilder builder = new StringBuilder(attributeName).append(' ').append(" = ").append(
' ').append(Constants.QUERY_OPENING_PARENTHESIS);
for (String value : condition.getValues())
{
AttributeTypeInformationInterface dataType = condition.getAttribute()
.getAttributeTypeInformation();
if (dataType instanceof StringTypeInformationInterface)
{
builder.append("\"").append(value).append("\"").append(Constants.QUERY_COMMA);
}
else
{
builder.append(value).append(Constants.QUERY_COMMA);
}
}
Utility.removeLastComma(builder);
builder.append(Constants.QUERY_CLOSING_PARENTHESIS);
if (condition.getRelationalOperator().equals(RelationalOperator.NotIn))
{
builder.insert(0, Constants.QUERY_OPENING_PARENTHESIS).insert(0, "not").append(
Constants.QUERY_CLOSING_PARENTHESIS);
}
return builder.toString();
}
/**
* create xquery fragment to represent "exists" and "empty" operators
*/
@Override
protected String processNullCheckOperators(ICondition condition, String attributeName)
throws SqlException
{
RelationalOperator operator = condition.getRelationalOperator();
StringBuilder builder = new StringBuilder();
if (operator.equals(RelationalOperator.IsNotNull))
{
builder.append("exists");
}
else if (operator.equals(RelationalOperator.IsNull))
{
builder.append("empty");
}
builder.append(Constants.QUERY_OPENING_PARENTHESIS).append(attributeName).append(
Constants.QUERY_CLOSING_PARENTHESIS);
return builder.toString();
}
/**
* create xquery fragment to represent "contains", "starts-with" and
* "ends-with" operators
*/
@Override
protected String processLikeOperators(ICondition condition, String attributeName)
throws SqlException
{
RelationalOperator operator = condition.getRelationalOperator();
String newOperator = null;
String value = condition.getValue();
if (operator.equals(RelationalOperator.Contains))
{
newOperator = "contains(string(" + attributeName + "),\"" + value + "\")";
}
else if (operator.equals(RelationalOperator.StartsWith))
{
newOperator = "starts-with(string(" + attributeName + "),\"" + value + "\")";
}
else if (operator.equals(RelationalOperator.EndsWith))
{
newOperator = "ends-with(string(" + attributeName + "),\"" + value + "\")";
}
return newOperator;
}
/**
* get the list of children of given expression which are not main expressions
*
* @param expression
* @return
*/
protected List<IExpression> getNonMainChildren(IExpression expression)
{
List<IExpression> nonMainChildren = new ArrayList<IExpression>(joinGraph
.getChildrenList(expression));
nonMainChildren.removeAll(mainExpressions);
return nonMainChildren;
}
protected List<IExpression> getNonMainNonEmptyChildren(IExpression expression)
{
List<IExpression> children = getNonMainChildren(expression);
children.removeAll(emptyExpressions);
return children;
}
/**
* @return the mainExpressions
*/
protected Set<IExpression> getMainExpressions()
{
return mainExpressions;
}
/**
* @return the entityPaths
*/
protected Map<IExpression, String> getEntityPaths()
{
return entityPaths;
}
/**
* @return the targetRoles
*/
protected Map<IExpression, String> getTargetRoles()
{
return targetRoles;
}
/**
* @return the forVariables
*/
protected Map<IExpression, String> getForVariables()
{
return forVariables;
}
/**
* @return the attributeAliases
*/
protected Map<IOutputAttribute, String> getAttributeAliases()
{
return attributeAliases;
}
/**
* @return the parameters
*/
protected Map<IParameter<ICondition>, IExpression> getParameters()
{
return parameters;
}
/**
*
* @param operandquery
* @return Added a method so that the Parser can identify the temporal query and act accordingly
*/
protected String getTemporalCondition(String operandquery)
{
return Constants.QUERY_TEMPORAL_CONDITION + "(" + operandquery + ")";
}
} | Changes for temporal Queries
SVN-Revision: 5139
| WEB-INF/src/edu/wustl/common/query/impl/AbstractXQueryGenerator.java | Changes for temporal Queries |
|
Java | bsd-3-clause | 17e6f702d78012b333495df6fa4779aaec5ba795 | 0 | koles/gooddata-agent | /*
* Copyright (c) 2009, GoodData Corporation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and
* the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of the GoodData Corporation nor the names of its contributors may be used to endorse
* or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.gooddata.agent.api;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.Charset;
import java.security.InvalidParameterException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.sf.json.JSON;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpException;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.cookie.CookiePolicy;
import org.apache.commons.httpclient.methods.DeleteMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.InputStreamRequestEntity;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.log4j.Logger;
import com.gooddata.agent.api.model.Column;
import com.gooddata.agent.api.model.MetadataObject;
import com.gooddata.agent.api.model.Project;
import com.gooddata.agent.api.model.SLI;
import com.gooddata.agent.util.Constants;
import com.gooddata.agent.util.NetUtil;
/**
* The GoodData REST API Java wrapper. Stolen from the CL tool's code.
* Should be replaced by a call of an official GoodData Java library
* once it's available.
*
* @author Zdenek Svoboda <[email protected]>
* @version 1.0
*/
public class GdcRESTApiWrapper {
private static Logger l = Logger.getLogger(GdcRESTApiWrapper.class);
/**
* GDC URIs
*/
private static final String PLATFORM_URI = "/gdc/";
private static final String MD_URI = "/gdc/md/";
private static final String LOGIN_URI = "/gdc/account/login";
private static final String DOMAIN_URI = "/gdc/account/domains";
private static final String DOMAIN_USERS_SUFFIX = "/users";
private static final String PROJECT_USERS_SUFFIX = "/users";
private static final String PROJECT_ROLES_SUFFIX = "/roles";
private static final String TOKEN_URI = "/gdc/account/token";
private static final String DATA_INTERFACES_URI = "/ldm/singleloadinterface";
private static final String PROJECTS_URI = "/gdc/projects";
private static final String PULL_URI = "/etl/pull";
private static final String IDENTIFIER_URI = "/identifiers";
private static final String SLI_DESCRIPTOR_URI = "/descriptor";
public static final String MAQL_EXEC_URI = "/ldm/manage";
public static final String MAQL_ASYNC_EXEC_URI = "/ldm/manage2";
public static final String DML_EXEC_URI = "/dml/manage";
public static final String PROJECT_EXPORT_URI = "/maintenance/export";
public static final String PROJECT_IMPORT_URI = "/maintenance/import";
public static final String PROJECT_PARTIAL_EXPORT_URI = "/maintenance/partialmdexport";
public static final String PROJECT_PARTIAL_IMPORT_URI = "/maintenance/partialmdimport";
public static final String REPORT_QUERY = "/query/reports";
public static final String ATTR_QUERY = "/query/attributes";
public static final String EXECUTOR = "/gdc/xtab2/executor3";
public static final String EXPORT_EXECUTOR = "/gdc/exporter/executor";
public static final String INVITATION_URI = "/invitations";
public static final String ETL_MODE_URI = "/etl/mode";
public static final String OBJ_URI = "/obj";
public static final String ROLES_URI = "/roles";
public static final String USERS_URI = "/users";
public static final String ETL_MODE_DLI = "DLI";
public static final String ETL_MODE_VOID = "VOID";
public static final String LINKS_UPLOADS_KEY = "uploads";
public static final String DLI_MANIFEST_FILENAME = "upload_info.json";
public static final String QUERY_PROJECTDASHBOARDS = "projectdashboards";
public static final String QUERY_FOLDERS = "folders";
public static final String QUERY_DATASETS = "datasets";
public static final String QUERY_DIMENSIONS = "dimensions";
public static final String QUERY_PREFIX = "/query/";
protected HttpClient client;
protected NamePasswordConfiguration config;
private JSONObject userLogin = null;
private JSONObject profile;
private static HashMap<String, String> ROLES = new HashMap<String, String>();
/* TODO This is fragile and may not work for all projects and/or future versions.
* Use /gdc/projects/{projectId}/roles to retrieve roles for a particular project.
*/
static {
ROLES.put("ADMIN", "adminRole");
ROLES.put("EDITOR", "editorRole");
ROLES.put("DASHBOARD ONLY", "dashboardOnlyRole");
ROLES.put("UNVERIFIED ADMIN", "unverifiedAdminRole");
ROLES.put("READONLY", "readOnlyUserRole");
}
/**
* Constructs the GoodData REST API Java wrapper
*
* @param config NamePasswordConfiguration object with the GDC name and password configuration
*/
public GdcRESTApiWrapper(NamePasswordConfiguration config) {
this.config = config;
client = new HttpClient();
NetUtil.configureHttpProxy(client);
}
/**
* GDC login - obtain GDC SSToken
*
* @throws HttpMethodException
*/
public void login() throws HttpMethodException {
//logout();
l.debug("Logging into GoodData.");
JSONObject loginStructure = getLoginStructure();
PostMethod loginPost = createPostMethod(getServerUrl() + LOGIN_URI);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(loginStructure.toString().getBytes()));
loginPost.setRequestEntity(request);
try {
String resp = executeMethodOk(loginPost, false); // do not re-login on SC_UNAUTHORIZED
// enabling this prevents the following message:
// WARN org.apache.commons.httpclient.HttpMethodDirector -
// Unable to respond to any of these challenges:
// {gooddata=GoodData realm="GoodData API" cookie=GDCAuthTT}
// appearing always after those:
// DEBUG com.gooddata.integration.rest.GdcRESTApiWrapper -
// Logging into GoodData.
// DEBUG com.gooddata.integration.rest.GdcRESTApiWrapper -
// Successfully logged into GoodData.
setTokenCookie();
l.debug("Successfully logged into GoodData.");
JSONObject rsp = JSONObject.fromObject(resp);
userLogin = rsp.getJSONObject("userLogin");
String profileUri = userLogin.getString("profile");
if (profileUri != null && profileUri.length() > 0) {
GetMethod gm = createGetMethod(getServerUrl() + profileUri);
try {
resp = executeMethodOk(gm);
this.profile = JSONObject.fromObject(resp);
}
finally {
gm.releaseConnection();
}
} else {
l.debug("Empty account profile.");
throw new GdcRestApiException("Empty account profile.");
}
} finally {
loginPost.releaseConnection();
}
}
/**
* Creates a new login JSON structure
*
* @return the login JSON structure
*/
private JSONObject getLoginStructure() {
JSONObject credentialsStructure = new JSONObject();
credentialsStructure.put("login", config.getUsername());
credentialsStructure.put("password", config.getPassword());
credentialsStructure.put("remember", 1);
JSONObject loginStructure = new JSONObject();
loginStructure.put("postUserLogin", credentialsStructure);
return loginStructure;
}
/**
* Sets the SS token
*
* @throws HttpMethodException
*/
private void setTokenCookie() throws HttpMethodException {
HttpMethod secutityTokenGet = createGetMethod(getServerUrl() + TOKEN_URI);
try {
executeMethodOk(secutityTokenGet);
} finally {
secutityTokenGet.releaseConnection();
}
}
/**
* GDC logout - remove active session, if any exists
*
* @throws HttpMethodException
*/
public void logout() throws HttpMethodException {
if (userLogin == null)
return;
l.debug("Logging out.");
DeleteMethod logoutDelete = createDeleteMethod(getServerUrl() + userLogin.getString("state"));
try {
String resp = executeMethodOk(logoutDelete, false); // do not re-login on SC_UNAUTHORIZED
userLogin = null;
profile = null;
l.debug("Successfully logged out.");
} finally {
logoutDelete.releaseConnection();
}
this.client = new HttpClient();
NetUtil.configureHttpProxy( client );
}
/**
* Retrieves the project info by the project's ID
*
* @param id the project id
* @return the GoodDataProjectInfo populated with the project's information
* @throws HttpMethodException
* @throws GdcProjectAccessException
*/
public Project getProjectById(String id) throws HttpMethodException, GdcProjectAccessException {
l.debug("Getting project by id=" + id);
HttpMethod req = createGetMethod(getServerUrl() + PROJECTS_URI + "/" + id);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if(parsedResp != null && !parsedResp.isEmpty() && !parsedResp.isNullObject()) {
JSONObject project = parsedResp.getJSONObject("project");
if(project != null && !project.isEmpty() && !project.isNullObject()) {
JSONObject meta = project.getJSONObject("meta");
String title = meta.getString("title");
if(title != null && title.length() > 0)
return new Project(MD_URI + "/" + id, id, title);
else
throw new IllegalArgumentException("getProjectById: The project structure doesn't contain the title key.");
}
else {
throw new IllegalArgumentException("getProjectById: The project structure doesn't contain the project key.");
}
} else {
throw new IllegalArgumentException("getProjectById: Invalid response.");
}
} catch (HttpMethodException e) {
l.debug("The project id=" + id + " doesn't exists.");
throw new GdcProjectAccessException("The project id=" + id + " doesn't exists.");
} finally {
req.releaseConnection();
}
}
/**
* Returns the global platform links
*
* @return accessible platform links
* @throws com.gooddata.exception.HttpMethodException
*
*/
@SuppressWarnings("unchecked")
private Iterator<JSONObject> getPlatformLinks() throws HttpMethodException {
l.debug("Getting project links.");
HttpMethod req = createGetMethod(getServerUrl() + PLATFORM_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
JSONObject about = parsedResp.getJSONObject("about");
JSONArray links = about.getJSONArray("links");
l.debug("Got platform links " + links);
return links.iterator();
} finally {
req.releaseConnection();
}
}
/**
*
*
* @return the WebDav URL from the platform configuration
*/
public URL getWebDavURL() {
Iterator<JSONObject> links = getPlatformLinks();
while(links.hasNext()) {
JSONObject link = links.next();
if(link != null && !link.isEmpty() && !link.isNullObject()) {
String category = link.getString("category");
if(category != null && category.length() > 0 && category.equalsIgnoreCase(LINKS_UPLOADS_KEY)) {
try {
String uri = link.getString("link");
if(uri != null && uri.length()>0) {
if(uri.startsWith("/")) {
uri = getServerUrl() + uri;
}
return new URL(uri);
}
else {
throw new IllegalArgumentException("No uploads URL configured for the server: "+category);
}
}
catch (MalformedURLException e) {
throw new IllegalArgumentException("Invalid uploads URL configured for the server: "+category);
}
}
}
}
throw new IllegalArgumentException("No uploads platform link configured for the GoodData cluster.");
}
/**
* Returns a list of project's SLIs
*
* @param projectId project's ID
* @return a list of project's SLIs
* @throws HttpMethodException if there is a communication error
* @throws GdcProjectAccessException if the SLI doesn't exist
*/
public List<SLI> getSLIs(String projectId) throws HttpMethodException, GdcProjectAccessException {
l.debug("Getting SLIs from project id=" + projectId);
List<SLI> list = new ArrayList<SLI>();
String ifcUri = getSLIsUri(projectId);
HttpMethod interfacesGet = createGetMethod(ifcUri);
try {
String response = executeMethodOk(interfacesGet);
JSONObject responseObject = JSONObject.fromObject(response);
if (responseObject.isNullObject()) {
l.debug("The project id=" + projectId + " doesn't exist!");
throw new GdcProjectAccessException("The project id=" + projectId + " doesn't exist!");
}
JSONObject interfaceQuery = responseObject.getJSONObject("about");
if (interfaceQuery.isNullObject()) {
l.debug("The project id=" + projectId + " doesn't exist!");
throw new GdcProjectAccessException("The project id=" + projectId + " doesn't exist!");
}
JSONArray links = interfaceQuery.getJSONArray("links");
if (links == null) {
l.debug("The project id=" + projectId + " doesn't exist!");
throw new GdcProjectAccessException("The project id=" + projectId + " doesn't exist!");
}
for (Object ol : links) {
JSONObject link = (JSONObject) ol;
SLI ii = new SLI(link);
list.add(ii);
}
l.debug("Got SLIs " + list + " from project id=" + projectId);
} finally {
interfacesGet.releaseConnection();
}
return list;
}
/**
* Retrieves the SLI columns
*
* @param uri the SLI uri
* @return list of SLI columns
* @throws GdcProjectAccessException if the SLI doesn't exist
* @throws HttpMethodException if there is a communication issue with the GDC platform
*/
public List<Column> getSLIColumns(String uri) throws GdcProjectAccessException, HttpMethodException {
l.debug("Retrieveing SLI columns for SLI uri=" + uri);
List<Column> list = new ArrayList<Column>();
HttpMethod sliGet = createGetMethod(getServerUrl() + uri + "/manifest");
try {
String response = executeMethodOk(sliGet);
JSONObject responseObject = JSONObject.fromObject(response);
if (responseObject.isNullObject()) {
l.debug("The SLI uri=" + uri + " doesn't exist!");
throw new GdcProjectAccessException("The SLI uri=" + uri + " doesn't exist!");
}
JSONObject dataSetSLIManifest = responseObject.getJSONObject("dataSetSLIManifest");
if (dataSetSLIManifest.isNullObject()) {
l.debug("The SLI uri=" + uri + " doesn't exist!");
throw new GdcProjectAccessException("The SLI uri=" + uri + " doesn't exist!");
}
JSONArray parts = dataSetSLIManifest.getJSONArray("parts");
for (Object oPart : parts) {
list.add(new Column((JSONObject) oPart));
}
} finally {
sliGet.releaseConnection();
}
return list;
}
/**
* Retrieves the SLI column data type
*
* @param projectId projectId
* @param sliColumnIdentifier SLI column identifier (name in the SLI manifest)
* @return the SLI column datatype
*/
public String getSLIColumnDataType(String projectId, String sliColumnIdentifier) {
l.debug("Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier);
MetadataObject o = getMetadataObject(projectId, sliColumnIdentifier);
if (o != null) {
JSONObject c = o.getContent();
if (c != null) {
String type = c.getString("columnType");
if (type != null && type.length() > 0) {
return type;
} else {
l.debug("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " No columnType key in the content.");
throw new GdcRestApiException("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " No columnType key in the content.");
}
} else {
l.debug("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " No content structure.");
throw new GdcRestApiException("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " No content structure.");
}
} else {
l.debug("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " MD object doesn't exist.");
throw new GdcRestApiException("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " MD object doesn't exist.");
}
}
/**
* Retrieves the SLI columns
*
* @param uri the SLI uri
* @return JSON manifest
* @throws GdcProjectAccessException if the SLI doesn't exist
* @throws HttpMethodException if there is a communication issue with the GDC platform
*/
public JSONObject getSLIManifest(String uri) throws GdcProjectAccessException, HttpMethodException {
l.debug("Retrieveing SLI columns for SLI uri=" + uri);
List<Column> list = new ArrayList<Column>();
HttpMethod sliGet = createGetMethod(getServerUrl() + uri + "/manifest");
try {
String response = executeMethodOk(sliGet);
JSONObject responseObject = JSONObject.fromObject(response);
if (responseObject.isNullObject()) {
l.debug("The SLI uri=" + uri + " doesn't exist!");
throw new GdcProjectAccessException("The SLI uri=" + uri + " doesn't exist!");
}
return responseObject;
} finally {
sliGet.releaseConnection();
}
}
/**
* Finds a project SLI by it's id
*
* @param id the SLI id
* @param projectId the project id
* @return the SLI
* @throws GdcProjectAccessException if the SLI doesn't exist
* @throws HttpMethodException if there is a communication issue with the GDC platform
*/
public SLI getSLIById(String id, String projectId) throws GdcProjectAccessException, HttpMethodException {
l.debug("Get SLI by id=" + id + " project id=" + projectId);
List<SLI> slis = getSLIs(projectId);
return getSLIById(id, slis, projectId);
}
/**
* Finds a project SLI in list of SLI
*
* @param id the SLI id
* @param slis of SLI (related to one project)
* @param projectId the project id
* @return the SLI
* @throws GdcProjectAccessException if the SLI doesn't exist
*/
public static SLI getSLIById(String id, List<SLI> slis, String projectId) throws GdcProjectAccessException {
l.debug("Get SLI by id=" + id + " project id=" + projectId);
for (SLI sli : slis) {
if (id.equals(sli.getId())) {
l.debug("Got SLI by id=" + id + " project id=" + projectId);
return sli;
}
}
l.debug("The SLI id=" + id + " doesn't exist in the project id=" + projectId);
throw new GdcProjectAccessException("The SLI id=" + id + " doesn't exist in the project id=" + projectId);
}
/**
* Enumerates all attributes in the project
*
* @param projectId project Id
* @return LIst of attr uris
*/
public List<String> enumerateAttributes(String projectId) {
l.debug("Enumerating attributes for project id=" + projectId);
List<String> list = new ArrayList<String>();
String qUri = getProjectMdUrl(projectId) + ATTR_QUERY;
HttpMethod qGet = createGetMethod(qUri);
try {
String qr = executeMethodOk(qGet);
JSONObject q = JSONObject.fromObject(qr);
if (q.isNullObject()) {
l.debug("Enumerating attributes for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating attributes for project id=" + projectId + " failed.");
}
JSONObject qry = q.getJSONObject("query");
if (qry.isNullObject()) {
l.debug("Enumerating attributes for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
JSONArray entries = qry.getJSONArray("entries");
if (entries == null) {
l.debug("Enumerating attributes for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
for (Object oentry : entries) {
JSONObject entry = (JSONObject) oentry;
list.add(entry.getString("link"));
}
} finally {
qGet.releaseConnection();
}
return list;
}
/**
* Gets attribute PK
*
* @param attrUri attribute URI
* @return list of attribute PKs (columns)
*/
public List<JSONObject> getAttributePk(String attrUri) {
List<JSONObject> ret = new ArrayList<JSONObject>();
JSONObject attr = getObjectByUri(attrUri);
JSONObject a = attr.getJSONObject("attribute");
if (a != null && !a.isEmpty() && !a.isEmpty()) {
JSONObject c = a.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
JSONArray pks = c.getJSONArray("pk");
if (pks != null && !pks.isEmpty()) {
Object[] p = pks.toArray();
for (Object pko : p) {
JSONObject pk = (JSONObject) pko;
String columnUri = pk.getString("data");
if (columnUri != null) {
ret.add(getObjectByUri(columnUri));
} else {
l.debug("Error getting attribute PK. No PK data.");
throw new GdcProjectAccessException("Error getting attribute PK. No PK data.");
}
}
}
} else {
l.debug("Error getting attribute PK. No content.");
throw new GdcProjectAccessException("Error getting attribute PK. No content.");
}
} else {
l.debug("Error getting attribute PK. No attribute.");
throw new GdcProjectAccessException("Error getting attribute PK. No attribute.");
}
return ret;
}
/**
* Gets attribute FK
*
* @param attrUri attribute URI
* @return list of attribute FKs (columns)
*/
public List<JSONObject> getAttributeFk(String attrUri) {
List<JSONObject> ret = new ArrayList<JSONObject>();
JSONObject attr = getObjectByUri(attrUri);
JSONObject a = attr.getJSONObject("attribute");
if (a != null && !a.isEmpty() && !a.isEmpty()) {
JSONObject c = a.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
if (c.containsKey("fk")) {
JSONArray pks = c.getJSONArray("fk");
if (pks != null && !pks.isEmpty()) {
Object[] p = pks.toArray();
for (Object pko : p) {
JSONObject pk = (JSONObject) pko;
String columnUri = pk.getString("data");
if (columnUri != null && columnUri.trim().length() > 0) {
ret.add(getObjectByUri(columnUri));
} else {
l.debug("Error getting attribute FK. No FK data.");
throw new GdcProjectAccessException("Error getting attribute FK. No FK data.");
}
}
}
}
} else {
l.debug("Error getting attribute FK. No content.");
throw new GdcProjectAccessException("Error getting attribute FK. No content.");
}
} else {
l.debug("Error getting attribute FK. No attribute.");
throw new GdcProjectAccessException("Error getting attribute FK. No attribute.");
}
return ret;
}
/**
* Gets column DB name
*
* @param column column object
* @return column DB name
*/
public String getColumnDbName(JSONObject column) {
JSONObject cl = column.getJSONObject("column");
if (cl != null && !cl.isEmpty() && !cl.isEmpty()) {
JSONObject c = cl.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
String cn = c.getString("columnDBName");
if (cn != null && cn.trim().length() > 0) {
return cn;
} else {
l.debug("Error getting column name. No columnDBName.");
throw new GdcProjectAccessException("Error getting column name. No columnDBName.");
}
} else {
l.debug("Error getting column name. No content.");
throw new GdcProjectAccessException("Error getting column name. No content.");
}
} else {
l.debug("Error getting column name. No column.");
throw new GdcProjectAccessException("Error getting column name. No column.");
}
}
/**
* Gets column table name
*
* @param column column object
* @return column table name
*/
public String getColumnTableName(JSONObject column) {
JSONObject cl = column.getJSONObject("column");
if (cl != null && !cl.isEmpty() && !cl.isEmpty()) {
JSONObject c = cl.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
String t = c.getString("table");
if (t != null && t.trim().length() > 0) {
JSONObject tbl = getObjectByUri(t);
JSONObject root = tbl.getJSONObject("table");
if (root != null && !root.isEmpty() && !root.isEmpty()) {
c = root.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
String dl = c.getString("activeDataLoad");
if (dl != null && dl.trim().length() > 0) {
JSONObject tdl = getObjectByUri(dl);
root = tdl.getJSONObject("tableDataLoad");
if (root != null && !root.isEmpty() && !root.isEmpty()) {
c = root.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
String tn = c.getString("dataSourceLocation");
if (tn != null && tn.trim().length() > 0) {
return tn;
} else {
l.debug("Error getting column name. No dataSourceLocation.");
throw new GdcProjectAccessException("Error getting column name. No dataSourceLocation.");
}
} else {
l.debug("Error getting column name. No active table data load content.");
throw new GdcProjectAccessException("Error getting column name. No active table data load content.");
}
} else {
l.debug("Error getting column name. No table data load root.");
throw new GdcProjectAccessException("Error getting column name. No table data load root.");
}
} else {
l.debug("Error getting column name. No active data load.");
throw new GdcProjectAccessException("Error getting column name. No active data load.");
}
} else {
l.debug("Error getting column name. No table content.");
throw new GdcProjectAccessException("Error getting column name. No table content.");
}
} else {
l.debug("Error getting column table. No table root.");
throw new GdcProjectAccessException("Error getting column table. No table root.");
}
} else {
l.debug("Error getting column name. No table.");
throw new GdcProjectAccessException("Error getting column name. No table.");
}
} else {
l.debug("Error getting column name. No content.");
throw new GdcProjectAccessException("Error getting column name. No content.");
}
} else {
l.debug("Error getting column name. No column.");
throw new GdcProjectAccessException("Error getting column name. No column.");
}
}
/**
* Enumerates all attributes in the project
*
* @param attrUri attribute URI
* @return attribute object
*/
public JSONObject getAttribute(String attrUri) {
l.debug("Getting attribute uri=" + attrUri);
String qUri = getServerUrl() + attrUri;
HttpMethod qGet = createGetMethod(qUri);
try {
String qr = executeMethodOk(qGet);
return JSONObject.fromObject(qr);
} finally {
qGet.releaseConnection();
}
}
/**
* Enumerates all reports on in a project
*
* @param projectId project Id
* @return LIst of report uris
*/
public List<String> enumerateReports(String projectId) {
l.debug("Enumerating reports for project id=" + projectId);
List<String> list = new ArrayList<String>();
String qUri = getProjectMdUrl(projectId) + REPORT_QUERY;
HttpMethod qGet = createGetMethod(qUri);
try {
String qr = executeMethodOk(qGet);
JSONObject q = JSONObject.fromObject(qr);
if (q.isNullObject()) {
l.debug("Enumerating reports for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
JSONObject qry = q.getJSONObject("query");
if (qry.isNullObject()) {
l.debug("Enumerating reports for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
JSONArray entries = qry.getJSONArray("entries");
if (entries == null) {
l.debug("Enumerating reports for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
for (Object oentry : entries) {
JSONObject entry = (JSONObject) oentry;
int deprecated = entry.getInt("deprecated");
if (deprecated == 0)
list.add(entry.getString("link"));
}
} finally {
qGet.releaseConnection();
}
return list;
}
private String getProjectIdFromObjectUri(String uri) {
Pattern regexp = Pattern.compile("gdc/md/.*?/");
Matcher m = regexp.matcher(uri);
if (m.find()) {
return m.group().split("/")[2];
} else {
l.debug("The passed string '" + uri + "' doesn't have the GoodData URI structure!");
throw new InvalidParameterException("The passed string '" + uri + "' doesn't have the GoodData URI structure!");
}
}
/**
* Computes the metric value
*
* @param metricUri metric URI
* @return the metric value
*/
public double computeMetric(String metricUri) {
l.debug("Computing metric uri=" + metricUri);
double retVal = 0;
String projectId = getProjectIdFromObjectUri(metricUri);
JSONObject reportDefinition = new JSONObject();
JSONObject metric = new JSONObject();
metric.put("alias", "");
metric.put("uri", metricUri);
JSONArray metrics = new JSONArray();
metrics.add(metric);
JSONArray columns = new JSONArray();
columns.add("metricGroup");
JSONObject grid = new JSONObject();
grid.put("metrics", metrics);
grid.put("columns", columns);
grid.put("rows", new JSONArray());
grid.put("columnWidths", new JSONArray());
JSONObject sort = new JSONObject();
sort.put("columns", new JSONArray());
sort.put("rows", new JSONArray());
grid.put("sort", sort);
JSONObject content = new JSONObject();
content.put("grid", grid);
content.put("filters", new JSONArray());
content.put("format", "grid");
reportDefinition.put("content", content);
JSONObject meta = new JSONObject();
meta.put("category", "reportDefinition");
meta.put("title", "N/A");
reportDefinition.put("meta", meta);
MetadataObject obj = new MetadataObject();
obj.put("reportDefinition", reportDefinition);
MetadataObject resp = new MetadataObject(createMetadataObject(projectId, obj));
int retryCnt = Constants.MAX_RETRY;
boolean hasFinished = false;
while (retryCnt-- > 0 && !hasFinished) {
try {
String dataResultUri = executeReportDefinition(resp.getUri());
JSONObject result = getObjectByUri(dataResultUri);
hasFinished = true;
if (result != null && !result.isEmpty() && !result.isNullObject()) {
JSONObject xtabData = result.getJSONObject("xtab_data");
if (xtabData != null && !xtabData.isEmpty() && !xtabData.isNullObject()) {
JSONArray data = xtabData.getJSONArray("data");
if (data != null && !data.isEmpty()) {
retVal = data.getJSONArray(0).getDouble(0);
} else {
l.debug("Can't compute the metric. No data structure in result.");
throw new InvalidParameterException("Can't compute the metric. No data structure in result.");
}
} else {
l.debug("Can't compute the metric. No xtab_data structure in result.");
throw new InvalidParameterException("Can't compute the metric. No xtab_data structure in result.");
}
} else {
l.debug("Can't compute the metric. No result from XTAB.");
throw new InvalidParameterException("Can't compute the metric. No result from XTAB.");
}
} catch (HttpMethodNotFinishedYetException e) {
l.debug("computeMetric: Waiting for DataResult");
try {
Thread.sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
}
}
l.debug("Metric uri=" + metricUri + " computed. Result is " + retVal);
return retVal;
}
/**
* Computes a simple report and returns the report text
*
* @param reportUri report URI
* @return the report rendered in text
*/
public String computeReport(String reportUri) {
l.debug("Computing report uri=" + reportUri);
String retVal = "";
int retryCnt = Constants.MAX_RETRY;
boolean hasFinished = false;
while (retryCnt-- > 0 && !hasFinished) {
try {
String dataResultUri = executeReport(reportUri).getJSONObject("execResult").getString("dataResult");
JSONObject result = getObjectByUri(dataResultUri);
hasFinished = true;
if (result != null && !result.isEmpty() && !result.isNullObject()) {
JSONObject xtabData = result.getJSONObject("xtab_data");
if (xtabData != null && !xtabData.isEmpty() && !xtabData.isNullObject()) {
JSONArray data = xtabData.getJSONArray("data");
if (data != null && !data.isEmpty()) {
double[] values = new double[data.size()];
for (int i = 0; i < data.size(); i++) {
JSONArray vals = data.getJSONArray(i);
values[i] = vals.getDouble(0);
}
JSONObject rows = xtabData.getJSONObject("rows");
if (rows != null && !rows.isEmpty() && !rows.isNullObject()) {
JSONArray lookups = rows.getJSONArray("lookups");
if (lookups != null && !lookups.isEmpty()) {
Map<String, String> attributes = new HashMap<String, String>();
JSONObject lkpData = lookups.getJSONObject(0);
for (Object key : lkpData.keySet()) {
Object value = lkpData.get(key);
if (key != null && value != null)
attributes.put(key.toString(), value.toString());
}
JSONObject tree = rows.getJSONObject("tree");
if (tree != null && !tree.isEmpty() && !tree.isNullObject()) {
Map<String, Integer> indexes = new HashMap<String, Integer>();
JSONObject index = tree.getJSONObject("index");
if (index != null && !index.isEmpty()) {
for (Object key : index.keySet()) {
if (key != null) {
JSONArray valIdxs = index.getJSONArray(key.toString());
if (valIdxs != null && !valIdxs.isEmpty()) {
indexes.put(key.toString(), valIdxs.getInt(0));
}
}
}
JSONArray children = tree.getJSONArray("children");
if (children != null && !children.isEmpty()) {
for (int i = 0; i < children.size(); i++) {
JSONObject c = children.getJSONObject(i);
String id = c.getString("id");
if (id != null && id.length() > 0) {
String attribute = attributes.get(id);
int v = indexes.get(id);
double vl = values[v];
if (retVal.length() > 0) {
retVal += ", " + attribute + " : " + vl;
} else {
retVal += attribute + " : " + vl;
}
} else {
l.debug("Can't compute the report. No id in children.");
throw new InvalidParameterException("Can't compute the report. No id in children.");
}
}
} else {
l.debug("Can't compute the report. No tree structure in result.");
throw new InvalidParameterException("Can't compute the report. No tree structure in result.");
}
} else {
l.debug("Can't compute the report. No index structure in result.");
throw new InvalidParameterException("Can't compute the report. No index structure in result.");
}
} else {
l.debug("Can't compute the report. No tree structure in result.");
throw new InvalidParameterException("Can't compute the report. No tree structure in result.");
}
} else {
l.debug("Can't compute the report. No lookups structure in result.");
throw new InvalidParameterException("Can't compute the report. No lookups structure in result.");
}
} else {
l.debug("Can't compute the report. No rows structure in result.");
throw new InvalidParameterException("Can't compute the report. No rows structure in result.");
}
} else {
l.debug("Can't compute the report. No data structure in result.");
throw new InvalidParameterException("Can't compute the report. No data structure in result.");
}
} else {
l.debug("Can't compute the report. No xtab_data structure in result.");
throw new InvalidParameterException("Can't compute the report. No xtab_data structure in result.");
}
} else {
l.debug("Can't compute the report. No result from XTAB.");
throw new InvalidParameterException("Can't compute the metric. No result from XTAB.");
}
} catch (HttpMethodNotFinishedYetException e) {
l.debug("computeReport: Waiting for DataResult");
try {
Thread.sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
}
}
l.debug("Report uri=" + reportUri + " computed.");
return retVal;
}
/**
* Report definition to execute
*
* @param reportDefUri report definition to execute
*/
public String executeReportDefinition(String reportDefUri) {
l.debug("Executing report definition uri=" + reportDefUri);
PostMethod execPost = createPostMethod(getServerUrl() + EXECUTOR);
JSONObject execDef = new JSONObject();
execDef.put("reportDefinition", reportDefUri);
JSONObject exec = new JSONObject();
exec.put("report_req", execDef);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(exec.toString().getBytes()));
execPost.setRequestEntity(request);
try {
String task = executeMethodOk(execPost);
if (task != null && task.length() > 0) {
JSONObject tr = JSONObject.fromObject(task);
if (tr.isNullObject()) {
l.debug("Executing report definition uri=" + reportDefUri + " failed. Returned invalid result result=" + tr);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri + " failed. " +
"Returned invalid result result=" + tr);
}
JSONObject reportResult = tr.getJSONObject("execResult");
if (reportResult.isNullObject()) {
l.debug("Executing report definition uri=" + reportDefUri + " failed. Returned invalid result result=" + tr);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri + " failed. " +
"Returned invalid result result=" + tr);
}
String dataResult = reportResult.getString("dataResult");
if (dataResult == null || dataResult.length()<=0) {
l.debug("Executing report definition uri=" + reportDefUri + " failed. Returned invalid result result=" + tr);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri + " failed. " +
"Returned invalid result result=" + tr);
}
return dataResult;
} else {
l.debug("Executing report definition uri=" + reportDefUri + " failed. Returned invalid task link uri=" + task);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri +
" failed. Returned invalid task link uri=" + task);
}
} catch (HttpMethodException ex) {
l.debug("Executing report definition uri=" + reportDefUri + " failed.", ex);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri + " failed.");
} finally {
execPost.releaseConnection();
}
}
/**
* Report to execute.
*
* @return JSON representation of the report result (the "execResult" object including the "execResult" root key)
* @param reportUri report definition to execute
*/
public JSONObject executeReport(String reportUri) {
l.debug("Executing report uri=" + reportUri);
PostMethod execPost = createPostMethod(getServerUrl() + EXECUTOR);
JSONObject execDef = new JSONObject();
execDef.put("report", reportUri);
JSONObject exec = new JSONObject();
exec.put("report_req", execDef);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(exec.toString().getBytes()));
execPost.setRequestEntity(request);
String taskLink = null;
try {
String task = executeMethodOk(execPost);
if (task != null && task.length() > 0) {
JSONObject tr = JSONObject.fromObject(task);
if (tr.isNullObject()) {
l.debug("Executing report uri=" + reportUri + " failed. Returned invalid result=" + tr);
throw new GdcRestApiException("Executing report uri=" + reportUri + " failed. " +
"Returned invalid result result=" + tr);
}
JSONObject reportResult = tr.getJSONObject("execResult");
if (reportResult.isNullObject()) {
l.debug("Executing report uri=" + reportUri + " failed. Returned invalid result=" + tr);
throw new GdcRestApiException("Executing report uri=" + reportUri + " failed. " +
"Returned invalid result result=" + tr);
}
String dataResult = reportResult.getString("dataResult");
if (dataResult == null || dataResult.length()<=0) {
l.debug("Executing report uri=" + reportUri + " failed. Returned invalid dataResult=" + tr);
throw new GdcRestApiException("Executing report uri=" + reportUri + " failed. " +
"Returned invalid dataResult=" + tr);
}
return tr;
} else {
l.debug("Executing report uri=" + reportUri + " failed. Returned invalid task link uri=" + task);
throw new GdcRestApiException("Executing report uri=" + reportUri +
" failed. Returned invalid task link uri=" + task);
}
} catch (HttpMethodException ex) {
l.debug("Executing report uri=" + reportUri + " failed.", ex);
throw new GdcRestApiException("Executing report uri=" + reportUri + " failed.");
} finally {
execPost.releaseConnection();
}
}
/**
* Export a report result
*
* @param execResult object returned by the {@link #executeReport(String)} method
* @param format export format (pdf | xls | png | csv)
*/
public byte[] exportReportResult(JSONObject execResult, String format) {
String resultUri = execResult.getJSONObject("execResult").getString("dataResult");
l.debug("Exporting report result uri=" + resultUri);
PostMethod execPost = createPostMethod(getServerUrl() + EXPORT_EXECUTOR);
JSONObject execDef = new JSONObject();
execDef.put("result", execResult);
execDef.put("format", format);
JSONObject exec = new JSONObject();
exec.put("result_req", execDef);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(exec.toString().getBytes()));
execPost.setRequestEntity(request);
String taskLink = null;
try {
String task = executeMethodOk(execPost);
if (task != null && task.length() > 0) {
JSONObject tr = JSONObject.fromObject(task);
if (tr.isNullObject()) {
l.debug("Exporting report result uri=" + resultUri + " failed. Returned invalid result=" + tr);
throw new GdcRestApiException("Exporting report result uri=" + resultUri + " failed. " +
"Returned invalid result=" + tr);
}
String uri = tr.getString("uri");
if (uri != null && uri.length() > 0) {
return getReportResult(uri);
} else {
l.debug("Exporting report result uri=" + resultUri + " failed. Returned invalid result=" + tr);
throw new GdcRestApiException("Exporting report result uri=" + resultUri + " failed. " +
"Returned invalid result=" + tr);
}
} else {
l.debug("Exporting report result uri=" + resultUri + " failed. Returned invalid task link uri=" + task);
throw new GdcRestApiException("Exporting report result uri=" + resultUri +
" failed. Returned invalid task link uri=" + task);
}
} catch (HttpMethodException ex) {
l.debug("Exporting report result uri=" + resultUri + " failed.", ex);
throw new GdcRestApiException("Exporting report result uri=" + resultUri + " failed.");
} finally {
execPost.releaseConnection();
}
}
/**
* Retrieves the report export result
*
* @param uri the export result
* @return attribute object
*/
public byte[] getReportResult(String uri) {
l.debug("Retrieving export result uri=" + uri);
byte[] buf = null;
String qUri = getServerUrl() + uri;
boolean finished = false;
do {
HttpMethod qGet = createGetMethod(qUri);
try {
executeMethodOkOnly(qGet);
finished = true;
buf = qGet.getResponseBody();
} catch (HttpMethodNotFinishedYetException e) {
l.debug("Waiting for exporter to finish.");
try {
Thread.currentThread().sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
} catch (IOException e) {
l.debug("Network error during the report result export.", e);
throw new GdcRestApiException("Network error during the report result export.", e);
} finally {
qGet.releaseConnection();
}
} while (!finished);
return buf;
}
/**
* Kicks the GDC platform to inform it that the FTP transfer is finished.
*
* @param projectId the project's ID
* @param remoteDir the remote (FTP) directory that contains the data
* @return the link that is used for polling the loading progress
* @throws GdcRestApiException
*/
public String startLoading(String projectId, String remoteDir) throws GdcRestApiException {
l.debug("Initiating data load project id=" + projectId + " remoteDir=" + remoteDir);
PostMethod pullPost = createPostMethod(getProjectMdUrl(projectId) + PULL_URI);
JSONObject pullStructure = getPullStructure(remoteDir);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(pullStructure.toString().getBytes()));
pullPost.setRequestEntity(request);
String taskLink = null;
try {
String response = executeMethodOk(pullPost);
JSONObject responseObject = JSONObject.fromObject(response);
taskLink = responseObject.getJSONObject("pullTask").getString("uri");
} catch (HttpMethodException ex) {
throw new GdcRestApiException("Loading fails: " + ex.getMessage());
} finally {
pullPost.releaseConnection();
}
l.debug("Data load project id=" + projectId + " remoteDir=" + remoteDir + " initiated. Status is on uri=" + taskLink);
return taskLink;
}
/**
* Returns the pull API JSON structure
*
* @param directory the remote directory
* @return the pull API JSON structure
*/
private JSONObject getPullStructure(String directory) {
JSONObject pullStructure = new JSONObject();
pullStructure.put("pullIntegration", directory);
return pullStructure;
}
/**
* Checks if the loading is finished
*
* @param link the link returned from the start loading
* @return the loading status
*/
public String getLoadingStatus(String link) throws HttpMethodException {
l.debug("Getting data loading status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = executeMethodOk(ptm);
JSONObject task = JSONObject.fromObject(response);
String status = task.getString("taskStatus");
l.debug("Loading status=" + status);
return status;
} finally {
ptm.releaseConnection();
}
}
/**
* Create a new GoodData project
*
* @param name project name
* @param desc project description
* @param templateUri project template uri
* @param driver underlying database driver
* @param accessToken access token
* @return the project Id
* @throws GdcRestApiException
*/
public String createProject(String name, String desc, String templateUri, String driver, String accessToken) throws GdcRestApiException {
l.debug("Creating project name=" + name);
PostMethod createProjectPost = createPostMethod(getServerUrl() + PROJECTS_URI);
JSONObject createProjectStructure = getCreateProject(name, desc, templateUri, driver, accessToken);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
createProjectStructure.toString().getBytes()));
createProjectPost.setRequestEntity(request);
String uri = null;
try {
String response = executeMethodOk(createProjectPost);
JSONObject responseObject = JSONObject.fromObject(response);
uri = responseObject.getString("uri");
} catch (HttpMethodException ex) {
l.debug("Creating project fails: ", ex);
throw new GdcRestApiException("Creating project fails: ", ex);
} finally {
createProjectPost.releaseConnection();
}
if (uri != null && uri.length() > 0) {
String id = getProjectId(uri);
l.debug("Created project id=" + id);
return id;
}
l.debug("Error creating project.");
throw new GdcRestApiException("Error creating project.");
}
/**
* Returns the create project JSON structure
*
* @param name project name
* @param desc project description
* @param templateUri project template uri
* @param driver underlying database driver
* @param accessToken access token
* @return the create project JSON structure
*/
private JSONObject getCreateProject(String name, String desc, String templateUri, String driver, String accessToken) {
JSONObject meta = new JSONObject();
meta.put("title", name);
meta.put("summary", desc);
if (templateUri != null && templateUri.length() > 0) {
meta.put("projectTemplate", templateUri);
}
JSONObject content = new JSONObject();
//content.put("state", "ENABLED");
content.put("guidedNavigation", "1");
if(driver != null && driver.length()>0) {
content.put("driver", driver);
}
if(accessToken != null && accessToken.length()>0) {
content.put("authorizationToken", accessToken);
}
JSONObject project = new JSONObject();
project.put("meta", meta);
project.put("content", content);
JSONObject createStructure = new JSONObject();
createStructure.put("project", project);
return createStructure;
}
/**
* Returns the project status
*
* @param id project ID
* @return current project status
*/
public String getProjectStatus(String id) {
l.debug("Getting project status for project " + id);
HttpMethod req = createGetMethod(getServerUrl() + PROJECTS_URI + "/" + id);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
JSONObject project = parsedResp.getJSONObject("project");
JSONObject content = project.getJSONObject("content");
String state = content.getString("state");
return state;
} catch (HttpMethodException e) {
l.debug("The project id=" + id + " doesn't exists.");
throw new GdcProjectAccessException("The project id=" + id + " doesn't exists.");
} finally {
req.releaseConnection();
}
}
/**
* Drops a GoodData project
*
* @param projectId project id
* @throws GdcRestApiException
*/
public void dropProject(String projectId) throws GdcRestApiException {
l.debug("Dropping project id=" + projectId);
DeleteMethod dropProjectDelete = createDeleteMethod(getServerUrl() + PROJECTS_URI + "/"+projectId);
try {
executeMethodOk(dropProjectDelete);
} catch (HttpMethodException ex) {
l.debug("Dropping project id=" + projectId + " failed.", ex);
throw new GdcRestApiException("Dropping project id=" + projectId + " failed.", ex);
} finally {
dropProjectDelete.releaseConnection();
}
l.debug("Dropped project id=" + projectId);
}
/**
* Retrieves the project id from the URI returned by the create project
*
* @param uri the create project URI
* @return project id
* @throws GdcRestApiException in case the project doesn't exist
*/
protected String getProjectId(String uri) throws GdcRestApiException {
l.debug("Getting project id by uri=" + uri);
if (uri != null && uri.length() > 0) {
String[] cs = uri.split("/");
if (cs != null && cs.length > 0) {
l.debug("Got project id=" + cs[cs.length - 1] + " by uri=" + uri);
return cs[cs.length - 1];
}
}
l.debug("Can't get project from " + uri);
throw new GdcRestApiException("Can't get project from " + uri);
}
/**
* Executes the MAQL and creates/modifies the project's LDM
*
* @param projectId the project's ID
* @param maql String with the MAQL statements
* @return result String
* @throws GdcRestApiException
*/
public String[] executeMAQL(String projectId, String maql) throws GdcRestApiException {
l.debug("Executing MAQL projectId=" + projectId + " MAQL:\n" + maql);
PostMethod maqlPost = createPostMethod(getProjectMdUrl(projectId) + MAQL_EXEC_URI);
JSONObject maqlStructure = getMAQLExecStructure(maql);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
maqlStructure.toString().getBytes()));
maqlPost.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(maqlPost);
JSONObject responseObject = JSONObject.fromObject(response);
JSONArray uris = responseObject.getJSONArray("uris");
return (String[]) uris.toArray(new String[]{""});
} catch (HttpMethodException ex) {
l.debug("MAQL execution: ", ex);
throw new GdcRestApiException("MAQL execution: " + ex.getMessage(), ex);
} finally {
maqlPost.releaseConnection();
}
}
/**
* Executes the MAQL and creates/modifies the project's LDM asynchronously
*
* @param projectId the project's ID
* @param maql String with the MAQL statements
* @return result String
* @throws GdcRestApiException
*/
public void executeMAQLAsync(String projectId, String maql) throws GdcRestApiException {
l.debug("Executing async MAQL projectId=" + projectId + " MAQL:\n" + maql);
PostMethod maqlPost = createPostMethod(getProjectMdUrl(projectId) + MAQL_ASYNC_EXEC_URI);
JSONObject maqlStructure = getMAQLExecStructure(maql);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
maqlStructure.toString().getBytes()));
maqlPost.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(maqlPost);
JSONObject responseObject = JSONObject.fromObject(response);
JSONArray uris = responseObject.getJSONArray("entries");
String taskmanUri = "";
for(Object ouri : uris) {
JSONObject uri = (JSONObject)ouri;
String category = uri.getString("category");
if(category.equals("tasks-status")) {
taskmanUri = uri.getString("link");
}
}
if(taskmanUri != null && taskmanUri.length()>0) {
l.debug("Checking async MAQL DDL execution status.");
TaskmanStatus status = new TaskmanStatus("",new String[]{});
while (!"OK".equalsIgnoreCase(status.getStatus()) && !"ERROR".equalsIgnoreCase(status.getStatus()) &&
!"WARNING".equalsIgnoreCase(status.getStatus())) {
status = getDetailedTaskManStatus(taskmanUri);
l.debug("Async MAQL DDL status = " + status.getStatus());
Thread.sleep(Constants.POLL_INTERVAL);
}
l.info("Async MAQL DDL finished with status " + status.getStatus());
if (!("OK".equalsIgnoreCase(status.getStatus()) || "WARNING".equalsIgnoreCase(status.getStatus()))) {
String[] messages = status.getMessage();
String message = "";
for(String msg : messages) {
if(message.length()>0) message += "\n";
message += msg;
}
throw new GdcRestApiException("Async MAQL execution failed with status "+status.getStatus() +
". Errors: "+message);
}
}
} catch (HttpMethodException ex) {
l.debug("MAQL execution: ", ex);
throw new GdcRestApiException("MAQL execution: " + ex.getMessage(), ex);
} catch (InterruptedException e) {
throw new InternalErrorException(e);
} finally {
maqlPost.releaseConnection();
}
}
public static class ProjectExportResult {
private String taskUri;
private String exportToken;
public String getTaskUri() {
return taskUri;
}
public void setTaskUri(String taskUri) {
this.taskUri = taskUri;
}
public String getExportToken() {
return exportToken;
}
public void setExportToken(String exportToken) {
this.exportToken = exportToken;
}
}
/**
* Exports the project
*
* @param projectId the project's ID
* @param exportUsers flag
* @param exportData flag
* @param authorizedUsers list of authorized users
* @return result the taskUri and the export token
* @throws GdcRestApiException
*/
public ProjectExportResult exportProject(String projectId, boolean exportUsers, boolean exportData, String[] authorizedUsers)
throws GdcRestApiException {
l.debug("Exporting project projectId=" + projectId + " users:" + exportUsers + " data:" + exportData + " authorized users:" +
authorizedUsers);
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + PROJECT_EXPORT_URI);
JSONObject param = getProjectExportStructure(exportUsers, exportData, authorizedUsers);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
ProjectExportResult result = null;
try {
String response = executeMethodOk(req);
result = new ProjectExportResult();
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject exportArtifact = responseObject.getJSONObject("exportArtifact");
JSONObject status = exportArtifact.getJSONObject("status");
result.setTaskUri(status.getString("uri"));
result.setExportToken(exportArtifact.getString("token"));
return result;
} catch (HttpMethodException ex) {
l.debug("Error exporting project", ex);
throw new GdcRestApiException("Error exporting project", ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getProjectExportStructure(boolean exportUsers, boolean exportData, String[] authorizedUsers) {
JSONObject param = new JSONObject();
JSONObject exportProject = new JSONObject();
exportProject.put("exportUsers", (exportUsers) ? (1) : (0));
exportProject.put("exportData", (exportData) ? (1) : (0));
if (authorizedUsers != null && authorizedUsers.length > 0) {
JSONArray aUsers = new JSONArray();
aUsers.addAll(Arrays.asList(authorizedUsers));
exportProject.put("authorizedUsers", aUsers);
}
param.put("exportProject", exportProject);
return param;
}
private GdcRole getRoleFromUri(String roleUri) {
l.debug("Getting role from uri: " + roleUri);
HttpMethod req = createGetMethod( getServerUrl() + roleUri);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getRoleFromUri for uri " + roleUri + ". Invalid response.");
throw new GdcRestApiException("Can't getRoleFromUri for uri " + roleUri + ". Invalid response.");
}
return new GdcRole(parsedResp);
} catch (HttpMethodException ex) {
l.debug("Error getRoleFromUri.", ex);
throw new GdcRestApiException("Error getRoleFromUri", ex);
} finally {
req.releaseConnection();
}
}
private GdcUser getUserFromUri(String userUri) {
l.debug("Getting user from uri: " + userUri);
HttpMethod req = createGetMethod( getServerUrl() + userUri);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getUserFromUri for uri " + userUri + ". Invalid response.");
throw new GdcRestApiException("Can't getUserFromUri for uri " + userUri + ". Invalid response.");
}
return new GdcUser(parsedResp);
} catch (HttpMethodException ex) {
l.debug("Error getUserFromUri.", ex);
throw new GdcRestApiException("Error getUserFromUri", ex);
} finally {
req.releaseConnection();
}
}
public static class GdcRole{
private String name;
private String identifier;
private String uri;
public GdcRole() {
}
public GdcRole(JSONObject role) {
if (role == null || role.isEmpty() || role.isNullObject()) {
throw new GdcRestApiException("Can't extract role from JSON. The JSON is empty.");
}
JSONObject pr = role.getJSONObject("projectRole");
if (pr == null || pr.isEmpty() || pr.isNullObject()) {
throw new GdcRestApiException("Can't extract role from JSON. No projectRole key in the JSON.");
}
JSONObject m = pr.getJSONObject("meta");
if (m == null || m.isEmpty() || m.isNullObject()) {
throw new GdcRestApiException("Can't extract role from JSON. No meta key in the JSON.");
}
JSONObject l = pr.getJSONObject("links");
if (l == null || l.isEmpty() || l.isNullObject()) {
throw new GdcRestApiException("Can't extract role from JSON. No links key in the JSON.");
}
String title = m.getString("title");
if (title == null || title.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract user from JSON. No email key in the JSON.");
}
this.setName(title);
String u = l.getString("roleUsers");
if (u == null || u.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract role from JSON. No roleUsers key in the JSON.");
}
this.setUri(u.replace(USERS_URI,""));
String i = m.getString("identifier");
if (i == null || i.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract user from JSON. No email key in the JSON.");
}
this.setIdentifier(i);
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getIdentifier() {
return identifier;
}
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
public boolean validate() {
if (getName() != null && getIdentifier().length() > 0 && getUri() != null) // email is not mandatory
return true;
return false;
}
}
public static class GdcUser {
private String login;
private String email;
private String licence;
private String firstName;
private String lastName;
private String companyName;
private String position;
private String timezone;
private String country;
private String phoneNumber;
private String password;
private String verifyPassword;
private String ssoProvider;
private String status;
private String uri;
public GdcUser() {
}
public GdcUser(JSONObject user) {
if (user == null || user.isEmpty() || user.isNullObject()) {
throw new GdcRestApiException("Can't extract user from JSON. The JSON is empty.");
}
JSONObject u = user.getJSONObject("user");
if (u == null || u.isEmpty() || u.isNullObject()) {
throw new GdcRestApiException("Can't extract user from JSON. No user key in the JSON.");
}
JSONObject c = u.getJSONObject("content");
if (c == null || c.isEmpty() || c.isNullObject()) {
throw new GdcRestApiException("Can't extract user from JSON. No content key in the JSON.");
}
String v = c.getString("email");
if (v == null || v.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract user from JSON. No email key in the JSON.");
}
this.setLogin(v);
v = c.getString("firstname");
if (v != null && v.trim().length() > 0) {
this.setFirstName(v);
}
v = c.getString("lastname");
if (v != null && v.trim().length() > 0) {
this.setLastName(v);
}
v = c.getString("email");
if (v != null && v.trim().length() > 0) {
this.setEmail(v);
}
v = c.getString("phonenumber");
if (v != null && v.trim().length() > 0) {
this.setPhoneNumber(v);
}
v = c.getString("status");
if (v != null && v.trim().length() > 0) {
this.setStatus(v);
}
JSONObject l = u.getJSONObject("links");
if (l == null || l.isEmpty() || l.isNullObject()) {
throw new GdcRestApiException("Can't extract user from JSON. No links key in the JSON.");
}
v = l.getString("self");
if (v == null || v.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract user from JSON. No self key in the JSON.");
}
this.setUri(v);
}
public boolean validate() {
if (getLogin() != null && getLogin().length() > 0 && getPassword() != null
&& getPassword().length() > 0 && getVerifyPassword() != null
&& getVerifyPassword().length() > 0 && getFirstName() != null
&& getFirstName().length() > 0 && getLastName() != null
&& getLastName().length() > 0) // email is not mandatory
return true;
return false;
}
public String getLogin() {
return login;
}
public void setLogin(String login) {
this.login = login;
}
public String getUri() {
return uri;
}
public void setUri(String u) {
this.uri = u;
}
public String getStatus() {
return status;
}
public void setStatus(String s) {
this.status = s;
}
public String getLicence() {
return licence;
}
public void setLicence(String licence) {
this.licence = licence;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getCompanyName() {
return companyName;
}
public void setCompanyName(String companyName) {
this.companyName = companyName;
}
public String getPosition() {
return position;
}
public void setPosition(String position) {
this.position = position;
}
public String getTimezone() {
return timezone;
}
public void setTimezone(String timezone) {
this.timezone = timezone;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public String getPhoneNumber() {
return phoneNumber;
}
public void setPhoneNumber(String phoneNumber) {
this.phoneNumber = phoneNumber;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getVerifyPassword() {
return verifyPassword;
}
public void setVerifyPassword(String verifyPassword) {
this.verifyPassword = verifyPassword;
}
public String getSsoProvider() {
return ssoProvider;
}
public void setSsoProvider(String ssoProvider) {
this.ssoProvider = ssoProvider;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
@Override
public String toString() {
return "DWGdcUser [getLogin()=" + getLogin() + ", getUri()=" + getUri() + ", getStatus()=" + getStatus()
+ ", getLicence()=" + getLicence() + ", getFirstName()=" + getFirstName() + ", getLastName()="
+ getLastName() + ", getCompanyName()=" + getCompanyName() + ", getPosition()=" + getPosition()
+ ", getTimezone()=" + getTimezone() + ", getCountry()=" + getCountry() + ", getPhoneNumber()="
+ getPhoneNumber() + ", getPassword()=" + getPassword() + ", getVerifyPassword()="
+ getVerifyPassword() + ", getEmail()=" + getEmail() + "," + " getSsoProvider()=" + getSsoProvider() + "]";
}
}
/**
* Create a new user
*
* @param domain the domain where the user is going to be created
* @param user new user data
* @return the new user's URI
* @throws GdcRestApiException
*/
public String createUser(String domain, GdcUser user)
throws GdcRestApiException {
if (user != null && user.validate()) {
l.debug("Creating new user " + user.getLogin() + " in domain " + domain);
PostMethod req = createPostMethod(getServerUrl() + DOMAIN_URI + "/" + domain + DOMAIN_USERS_SUFFIX);
JSONObject param = getCreateUserStructure(user);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
result = responseObject.getString("uri");
return result;
} catch (HttpMethodException ex) {
l.debug("Error creating user ", ex);
throw new GdcRestApiException("Error creating user ", ex);
} finally {
req.releaseConnection();
}
} else {
throw new InvalidParameterException("The new user must contain valid login, firstName, lastName, and password fields.");
}
}
private JSONObject getCreateUserStructure(GdcUser user) {
JSONObject param = new JSONObject();
JSONObject accountSetting = new JSONObject();
accountSetting.put("login", user.getLogin());
accountSetting.put("password", user.getPassword());
accountSetting.put("verifyPassword", user.getVerifyPassword());
accountSetting.put("firstName", user.getFirstName());
accountSetting.put("lastName", user.getLastName());
if (user.getCompanyName() != null && user.getCompanyName().length() > 0)
accountSetting.put("companyName", user.getCompanyName());
if (user.getPosition() != null && user.getPosition().length() > 0)
accountSetting.put("position", user.getPosition());
if (user.getCountry() != null && user.getCountry().length() > 0)
accountSetting.put("country", user.getCountry());
if (user.getTimezone() != null && user.getTimezone().length() > 0)
accountSetting.put("timezone", user.getTimezone());
else
accountSetting.put("timezone", null);
if (user.getPhoneNumber() != null && user.getPhoneNumber().length() > 0)
accountSetting.put("phoneNumber", user.getPhoneNumber());
if (user.getSsoProvider() != null && user.getSsoProvider().length() > 0)
accountSetting.put("ssoProvider", user.getSsoProvider());
if (user.getEmail() != null && user.getEmail().length() > 0)
accountSetting.put("email", user.getEmail());
param.put("accountSetting", accountSetting);
return param;
}
private String getRoleUri(String projectId, String role) {
String roleUri = null;
// for backward compatibility
if(ROLES.containsKey(role.toUpperCase())) {
role = ROLES.get(role.toUpperCase());
}
List<GdcRole> roles = getProjectRoles(projectId);
for(GdcRole r : roles) {
String identifier = r.getIdentifier();
if(identifier.equalsIgnoreCase(role)) {
roleUri = r.getUri();
}
}
return roleUri;
}
/**
* Create a new user
*
* @param projectId project ID
* @param uris user URIs
* @param role user's role
* @return the new user's URI
* @throws GdcRestApiException
*/
public void addUsersToProject(String projectId, List<String> uris, String role)
throws GdcRestApiException {
l.debug("Adding users " + uris + " to project " + projectId + " in role "+ role);
String projectsUrl = getProjectUrl(projectId);
String roleUri = getRoleUri(projectId, role);
addUsersToProjectWithRoleUri(projectId, uris, roleUri);
}
public void addUsersToProjectWithRoleUri(String projectId, List<String> uris, String roleUri)
throws GdcRestApiException {
l.debug("Adding users " + uris + " to project " + projectId + " with roleUri "+ roleUri);
String projectsUrl = getProjectUrl(projectId);
PostMethod req = createPostMethod(projectsUrl + PROJECT_USERS_SUFFIX);
JSONObject param = getAddUsersToProjectStructure(uris, roleUri);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject projectUsersUpdateResult = responseObject.getJSONObject("projectUsersUpdateResult");
JSONArray failed = projectUsersUpdateResult.getJSONArray("failed");
if (!failed.isEmpty()) {
String errMsg = "Following users can't be added to the project:";
for (Object uri : failed.toArray()) {
errMsg += " " + uris.toString();
}
l.debug(errMsg);
throw new GdcRestApiException(errMsg);
}
//JSONArray successful = projectUsersUpdateResult.getJSONArray("successful");
} catch (HttpMethodException ex) {
l.debug("Error adding users " + uris + " to project", ex);
throw new GdcRestApiException("Error adding users " + uris + " to project ", ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getAddUsersToProjectStructure(List<String> uris, String roleUri) {
JSONObject param = new JSONObject();
JSONArray users = new JSONArray();
JSONArray roles = null;
if (roleUri != null && roleUri.trim().length() > 0) {
roles = new JSONArray();
roles.add(roleUri);
}
for (String uri : uris) {
JSONObject user = new JSONObject();
JSONObject content = new JSONObject();
if (roles != null)
content.put("userRoles", roles);
content.put("status", "ENABLED");
user.put("content", content);
JSONObject links = new JSONObject();
links.put("self", uri);
user.put("links", links);
JSONObject item = new JSONObject();
item.put("user", user);
users.add(item);
}
param.put("users", users);
return param;
}
/**
* Disables a user in project
*
* @param projectId project ID
* @param uris user URIs
* @throws GdcRestApiException
*/
public void disableUsersInProject(String projectId, List<String> uris)
throws GdcRestApiException {
l.debug("Disabling users " + uris + " in project " + projectId);
String projectsUrl = getProjectUrl(projectId);
PostMethod req = createPostMethod(projectsUrl + PROJECT_USERS_SUFFIX);
JSONObject param = getDisableUsersInProjectStructure(uris);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject projectUsersUpdateResult = responseObject.getJSONObject("projectUsersUpdateResult");
JSONArray failed = projectUsersUpdateResult.getJSONArray("failed");
if (!failed.isEmpty()) {
String errMsg = "Following users can't be disabled in the project:";
for (Object uri : failed.toArray()) {
errMsg += " " + uris.toString();
}
l.debug(errMsg);
throw new GdcRestApiException(errMsg);
}
//JSONArray successful = projectUsersUpdateResult.getJSONArray("successful");
} catch (HttpMethodException ex) {
l.debug("Error disabling users " + uris + " in project", ex);
throw new GdcRestApiException("Error disabling users " + uris + " in project ", ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getDisableUsersInProjectStructure(List<String> uris) {
JSONObject param = new JSONObject();
JSONArray users = new JSONArray();
for (String uri : uris) {
JSONObject user = new JSONObject();
JSONObject content = new JSONObject();
content.put("status", "DISABLED");
user.put("content", content);
JSONObject links = new JSONObject();
links.put("self", uri);
user.put("links", links);
JSONObject item = new JSONObject();
item.put("user", user);
users.add(item);
}
param.put("users", users);
return param;
}
/**
* Returns the selected project's roles
*
* @param pid project ID
* @return array of the project's users
*/
public ArrayList<GdcRole> getProjectRoles(String pid) {
ArrayList<GdcRole> ret = new ArrayList<GdcRole>();
l.debug("Executing getProjectRoles for project id=" + pid);
HttpMethod req = createGetMethod(getProjectUrl(pid) + ROLES_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getProjectRoles for project id=" + pid + ". Invalid response.");
throw new GdcRestApiException("Can't getProjectRoles for project id=" + pid + ". Invalid response.");
}
JSONObject projectRoles = parsedResp.getJSONObject("projectRoles");
if (projectRoles == null || projectRoles.isNullObject() || projectRoles.isEmpty()) {
l.debug("Can't getProjectRoles for project id=" + pid + ". No projectRoles key in the response.");
throw new GdcRestApiException("Can't getProjectRoles for project id=" + pid + ". No projectRoles key in the response.");
}
JSONArray roles = projectRoles.getJSONArray("roles");
if (roles == null) {
l.debug("Can't getRoleUsers. No getProjectRoles key in the response.");
throw new GdcRestApiException("Can't getProjectRoles. No roles key in the response.");
}
for (Object o : roles) {
String role = (String) o;
GdcRole g = getRoleFromUri(role);
ret.add(g);
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Returns the selected project's roles
*
* @return array of the project's users
*/
public ArrayList<String> getRoleUsers(GdcRole role, boolean activeUsersOnly) {
ArrayList<String> ret = new ArrayList<String>();
if(role == null || role.getIdentifier() == null || role.getIdentifier().length() == 0 || role.getUri() == null
|| role.getUri().length() == 0 || role.getName() == null || role.getName().length() == 0) {
l.debug("Can't getRoleUsers . Invalid role object passed.");
throw new GdcRestApiException("Can't getRoleUsers. Invalid role object passed.");
}
l.debug("Executing getRoleUsers for role "+role.getIdentifier());
HttpMethod req = createGetMethod(getServerUrl() + role.getUri() + USERS_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getRoleUsers. Invalid response.");
throw new GdcRestApiException("Can't getRoleUsers. Invalid response.");
}
JSONObject associatedUsers = parsedResp.getJSONObject("associatedUsers");
if (associatedUsers == null || associatedUsers.isNullObject() || associatedUsers.isEmpty()) {
l.debug("Can't getRoleUsers. Invalid response. No associatedUsers key.");
throw new GdcRestApiException("Can't getRoleUsers. Invalid response. No associatedUsers key.");
}
JSONArray users = associatedUsers.getJSONArray("users");
if (users == null) {
l.debug("Can't getRoleUsers. No users key in the response.");
throw new GdcRestApiException("Can't getRoleUsers. No users key in the response.");
}
for (Object o : users) {
String user = (String) o;
ret.add(user);
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Returns the selected project's users
*
* @param pid project ID
* @param activeUsersOnly lists only active users
* @return array of the project's users
*/
public ArrayList<GdcUser> getProjectUsers(String pid, boolean activeUsersOnly) {
ArrayList<GdcUser> ret = new ArrayList<GdcUser>();
l.debug("Executing getProjectUsers for project id=" + pid);
HttpMethod req = createGetMethod(getProjectUrl(pid) + PROJECT_USERS_SUFFIX);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getProjectUsers for project id=" + pid + ". Invalid response.");
throw new GdcRestApiException("Can't getProjectUsers for project id=" + pid + ". Invalid response.");
}
JSONArray users = parsedResp.getJSONArray("users");
if (users == null) {
l.debug("Can't getProjectUsers for project id=" + pid + ". No users key in the response.");
throw new GdcRestApiException("Can't getProjectUsers for project id=" + pid + ". No users key in the response.");
}
for (Object o : users) {
JSONObject user = (JSONObject) o;
GdcUser g = new GdcUser(user);
if ((activeUsersOnly && "ENABLED".equalsIgnoreCase(g.getStatus())) || (!activeUsersOnly)) {
ret.add(g);
}
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Imports the project
*
* @param projectId the project's ID
* @param token export token
* @return result the taskUri
* @throws GdcRestApiException
*/
public String importProject(String projectId, String token)
throws GdcRestApiException {
l.debug("Importing project projectId=" + projectId + " token:" + token);
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + PROJECT_IMPORT_URI);
JSONObject param = getImportProjectStructure(token);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
result = responseObject.getString("uri");
return result;
} catch (HttpMethodException ex) {
l.debug("Error importing project", ex);
throw new GdcRestApiException("Error importing project", ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getImportProjectStructure(String token) {
JSONObject param = new JSONObject();
JSONObject importProject = new JSONObject();
importProject.put("token", token);
param.put("importProject", importProject);
return param;
}
/**
* Imports a MD object to the project
*
* @param projectId the project's ID
* @param token export token
* @param overwrite overwrite existing objects
* @param updateLDM update LDM names, descriptions and tags
* @return result the taskUri
* @throws GdcRestApiException
*/
public String importMD(String projectId, String token, boolean overwrite, boolean updateLDM)
throws GdcRestApiException {
l.debug("Importing metadata objects for projectId=" + projectId + " token:" + token);
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + PROJECT_PARTIAL_IMPORT_URI);
JSONObject param = getMDImportStructure(token, overwrite, updateLDM);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
result = responseObject.getString("uri");
return result;
} catch (HttpMethodException ex) {
l.debug("Error importing metadata objects for projectId=" + projectId + " token:" + token, ex);
throw new GdcRestApiException("Error importing metadata objects for projectId=" + projectId + " token:" + token, ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getMDImportStructure(String token, boolean overwrite, boolean updateLDM) {
JSONObject param = new JSONObject();
JSONObject importMD = new JSONObject();
importMD.put("token", token);
importMD.put("overwriteNewer", (overwrite) ? (1) : (0));
importMD.put("updateLDMObjects", (updateLDM) ? (1) : (0));
param.put("partialMDImport", importMD);
return param;
}
/**
* Exports selected MD object with dependencies from the project
*
* @param projectId the project's ID
* @param ids - list of the exported MD objects IDs
* @return result the taskUri and the export token
* @throws GdcRestApiException
*/
public ProjectExportResult exportMD(String projectId, List<Integer> ids)
throws GdcRestApiException {
l.debug("Exporting metadata objects with IDs " + ids + " from project " + projectId);
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + PROJECT_PARTIAL_EXPORT_URI);
JSONObject param = getMDExportStructure(projectId, ids);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
ProjectExportResult result = null;
try {
String response = executeMethodOk(req);
result = new ProjectExportResult();
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject exportArtifact = responseObject.getJSONObject("partialMDArtifact");
JSONObject status = exportArtifact.getJSONObject("status");
result.setTaskUri(status.getString("uri"));
result.setExportToken(exportArtifact.getString("token"));
return result;
} catch (HttpMethodException ex) {
l.debug("Error exporting metadata objects with IDs " + ids + " from project " + projectId, ex);
throw new GdcRestApiException("Error exporting metadata objects with IDs " + ids + " from project " + projectId, ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getMDExportStructure(String projectId, List<Integer> ids) {
JSONObject param = new JSONObject();
String puri = "/gdc/md/" + projectId;
JSONObject partialMDExport = new JSONObject();
JSONArray uris = new JSONArray();
for (Integer id : ids) {
uris.add(puri + "/obj/" + id);
}
partialMDExport.put("uris", uris);
param.put("partialMDExport", partialMDExport);
return param;
}
/**
* Checks if the migration is finished
*
* @param link the link returned from the start loading
* @return the loading status
*/
public String getMigrationStatus(String link) throws HttpMethodException {
l.debug("Getting project migration status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = executeMethodOk(ptm);
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("taskState");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
l.debug("Migration status=" + status);
return status;
} else {
l.debug("No taskState structure in the migration status!");
throw new GdcRestApiException("No taskState structure in the migration status!");
}
} finally {
ptm.releaseConnection();
}
}
/**
* Executes the MAQL and creates/modifies the project's LDM
*
* @param projectId the project's ID
* @param maql String with the MAQL statements
* @return result {@link GraphExecutionResult}
* @throws GdcRestApiException
*/
public GraphExecutionResult executeGraph(String processUri, String graph, Map<String,String> params) throws GdcRestApiException {
l.debug("Executing Graph processUri=" + processUri + " graph = " + graph);
PostMethod execPost = createPostMethod(processUri + "/executions");
JSONObject execStructure = getGraphExecStructure(graph, params);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
execStructure.toString().getBytes()));
execPost.setRequestEntity(request);
try {
String response = executeMethodOk(execPost);
JSONObject responseObject = JSONObject.fromObject(response);
String detailUri = responseObject.getJSONObject("executionTask").getJSONObject("links").getString("detail");
GraphExecutionResult execResult = getGraphExecutionResult(detailUri);
if (!GraphExecutionResult.OK.equals(execResult.getStatus())) {
throw new GdcRestApiException("ETL error, see log file at " + execResult.logUrl);
}
return execResult;
} finally {
execPost.releaseConnection();
}
}
/**
* Checks if the migration is finished
*
* @param link the link returned from the start loading
* @return the loading status
*/
public GraphExecutionResult getGraphExecutionResult(String link) throws HttpMethodException {
l.debug("Getting Graph execution status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = "";
while (true) {
response = executeMethodOk(ptm);
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("executionDetail");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
l.debug("TaskMan status=" + status);
if (!"RUNNING".equals(status)) {
String logUrl = state.getString("logFileName");
return new GraphExecutionResult(status, logUrl);
}
} else {
l.debug("No executionDetail structure in the execution status!");
throw new GdcRestApiException("No execution structure in the execution status!");
}
}
} finally {
ptm.releaseConnection();
}
}
/**
* Executes the MAQL and creates/modifies the project's LDM
*
* @param projectId the project's ID
* @param maql String with the MAQL statements
* @return result String
* @throws GdcRestApiException
*/
public String executeDML(String projectId, String maql) throws GdcRestApiException {
l.debug("Executing MAQL DML projectId=" + projectId + " MAQL DML:\n" + maql);
PostMethod maqlPost = createPostMethod(getProjectMdUrl(projectId) + DML_EXEC_URI);
JSONObject maqlStructure = getMAQLExecStructure(maql);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
maqlStructure.toString().getBytes()));
maqlPost.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(maqlPost);
JSONObject responseObject = JSONObject.fromObject(response);
String uris = responseObject.getString("uri");
return uris;
} catch (HttpMethodException ex) {
l.debug("MAQL DML execution: ", ex);
throw new GdcRestApiException("MAQL DML execution: ", ex);
} finally {
maqlPost.releaseConnection();
}
}
/**
* Returns the pull API JSON structure
*
* @param maql String with the MAQL statements
* @return the MAQL API JSON structure
*/
private JSONObject getMAQLExecStructure(String maql) {
JSONObject maqlStructure = new JSONObject();
JSONObject maqlObj = new JSONObject();
maqlObj.put("maql", maql);
maqlStructure.put("manage", maqlObj);
return maqlStructure;
}
private JSONObject getGraphExecStructure(String graph, Map<String, String> params) {
JSONObject structure = new JSONObject();
JSONObject execution = new JSONObject();
execution.put("graph", graph);
JSONObject paramsStructure = new JSONObject();
for (Map.Entry<String, String> entry : params.entrySet()) {
paramsStructure.put(entry.getKey(), entry.getValue());
}
execution.put("params", paramsStructure);
structure.put("execution", execution);
return structure;
}
protected String executeMethodOk(HttpMethod method) throws HttpMethodException {
return executeMethodOk(method, true);
}
protected String executeMethodOk(HttpMethod method, boolean reloginOn401) throws HttpMethodException {
return executeMethodOk(method, reloginOn401, 16);
}
/**
* Executes HttpMethod and test if the response if 200(OK)
*
* @param method the HTTP method
* @return response body as String
* @throws HttpMethodException
*/
private String executeMethodOk(HttpMethod method, boolean reloginOn401, int retries) throws HttpMethodException {
try {
executeMethodOkOnly(method, reloginOn401, retries);
return method.getResponseBodyAsString();
} catch (IOException e) {
l.debug("Error invoking GoodData REST API.", e);
throw new HttpMethodException("Error invoking GoodData REST API.", e);
}
}
private void executeMethodOkOnly(HttpMethod method) throws HttpMethodException {
executeMethodOkOnly(method, true);
}
private void executeMethodOkOnly(HttpMethod method, boolean reloginOn401) throws HttpMethodException {
executeMethodOk(method, reloginOn401, 16);
}
/**
* Executes HttpMethod and test if the response if 200(OK)
*
* @param method the HTTP method
* @return response as Stream
* @throws HttpMethodException
*/
private void executeMethodOkOnly(HttpMethod method, boolean reloginOn401, int retries) throws HttpMethodException, IOException {
try {
client.executeMethod(method);
/* HttpClient is rather unsupportive when it comes to robust interpreting
* of response classes; which is mandated by RFC and extensively used in
* GoodData API. Let us grok the classes ourselves. */
/* 2xx success class */
if (method.getStatusCode() == HttpStatus.SC_CREATED) {
return;
} else if (method.getStatusCode() == HttpStatus.SC_ACCEPTED) {
throw new HttpMethodNotFinishedYetException(method.getResponseBodyAsString());
} else if (method.getStatusCode() == HttpStatus.SC_NO_CONTENT) {
return;
} else if (method.getStatusCode() >= HttpStatus.SC_OK
&& method.getStatusCode() < HttpStatus.SC_BAD_REQUEST) {
return;
/* 4xx user errors and
* 5xx backend trouble */
} else if (method.getStatusCode() == HttpStatus.SC_UNAUTHORIZED && reloginOn401) {
// refresh the temporary token
setTokenCookie();
executeMethodOkOnly(method, false, retries);
return;
} else if (method.getStatusCode() == HttpStatus.SC_SERVICE_UNAVAILABLE && retries-- > 0
&& method.getResponseHeader("Retry-After") != null) {
/* This is recommended by RFC 2616 and should probably be dealt with by the
* client library. May god have mercy with it. */
int timeout = Integer.parseInt(method.getResponseHeader("Retry-After").getValue());
l.debug("Remote asked us to retry after " + timeout + " seconds, sleeping.");
l.debug(retries + " more retries");
try {
Thread.currentThread().sleep(Constants.RETRY_INTERVAL * timeout);
} catch (java.lang.InterruptedException e) {
}
executeMethodOkOnly(method, false, retries);
return;
} else if (method.getStatusCode() == HttpStatus.SC_GONE) {
throw new GdcProjectAccessException("Invalid project.");
} else if (method.getStatusCode() >= HttpStatus.SC_BAD_REQUEST
&& method.getStatusCode() < 600) {
throw new HttpMethodException(method);
/* 1xx informational responses class and
* 3xx redirects should not get past the client library internals. */
} else {
throw new HttpMethodException("Unsupported HTTP status received from remote: " +
method.getStatusCode());
}
} catch (HttpException e) {
l.debug("Error invoking GoodData REST API.", e);
throw new HttpMethodException("Error invoking GoodData REST API.", e);
}
}
/**
* Returns the data interfaces URI
*
* @param projectId project ID
* @return SLI collection URI
*/
public String getSLIsUri(String projectId) {
return getProjectMdUrl(projectId) + DATA_INTERFACES_URI;
}
/**
* Returns the SLI URI
*
* @param sliId SLI ID
* @param projectId project ID
* @return DLI URI
*/
public String getSLIUri(String sliId, String projectId) {
return getProjectMdUrl(projectId) + DATA_INTERFACES_URI + "/" + sliId + SLI_DESCRIPTOR_URI;
}
protected String getServerUrl() {
return config.getUrl();
}
/**
* Constructs project's metadata uri
*
* @param projectId project ID
*/
protected String getProjectMdUrl(String projectId) {
return getServerUrl() + MD_URI + projectId;
}
/**
* Constructs project's projects uri
*
* @param projectId project ID
*/
protected String getProjectUrl(String projectId) {
return getServerUrl() + PROJECTS_URI + "/" + projectId;
}
/**
* Gets the project ID from the project URI
*
* @param projectUri project URI
* @return the project id
*/
public String getProjectIdFromUri(String projectUri) {
String[] cmpnts = projectUri.split("/");
if (cmpnts != null && cmpnts.length > 0) {
String id = cmpnts[cmpnts.length - 1];
return id;
} else
throw new GdcRestApiException("Invalid project uri structure uri=" + projectUri);
}
/**
* Gets the project delete URI from the project id
*
* @param projectId project ID
* @return the project delete URI
*/
public String getProjectDeleteUri(String projectId) {
return PROJECTS_URI + "/" + projectId;
}
/**
* Profile getter
*
* @return the profile of the currently logged user
*/
protected JSONObject getProfile() {
return profile;
}
/**
* Invites a new user to a project
*
* @param projectId project ID
* @param eMail invited user e-mail
* @param message invitation message
*/
public void inviteUser(String projectId, String eMail, String message) {
this.inviteUser(projectId, eMail, message, null);
}
/**
* Invites a new user to a project
*
* @param projectId project ID
* @param eMail invited user e-mail
* @param message invitation message
*/
public void inviteUser(String projectId, String eMail, String message, String role) {
l.debug("Executing inviteUser projectId=" + projectId + " e-mail=" + eMail + " message=" + message);
PostMethod invitePost = createPostMethod(getServerUrl() + getProjectDeleteUri(projectId) + INVITATION_URI);
JSONObject inviteStructure = getInviteStructure(projectId, eMail, message, role);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
inviteStructure.toString().getBytes()));
invitePost.setRequestEntity(request);
try {
executeMethodOk(invitePost);
} catch (HttpMethodException ex) {
l.debug("Failed executing inviteUser projectId=" + projectId + " e-mail=" + eMail + " message=" + message);
throw new GdcRestApiException("Failed executing inviteUser projectId=" + projectId + " e-mail=" + eMail + " message=" + message, ex);
} finally {
invitePost.releaseConnection();
}
}
/**
* Creates a new invitation structure
*
* @param pid project id
* @param eMail e-mail
* @param msg invitation message
* @return the new invitation structure
*/
private JSONObject getInviteStructure(String pid, String eMail, String msg, String role) {
JSONObject content = new JSONObject();
content.put("firstname", "");
content.put("lastname", "");
content.put("email", eMail);
if (role != null && role.length() > 0) {
String roleUri = getRoleUri(pid, role);
if (roleUri == null)
throw new InvalidParameterException("The role '" + role + "' is not recognized by the GoodData platform.");
content.put("role", roleUri);
}
JSONObject action = new JSONObject();
action.put("setMessage", msg);
content.put("action", action);
JSONObject invitation = new JSONObject();
invitation.put("content", content);
JSONObject invitations = new JSONObject();
JSONArray ia = new JSONArray();
JSONObject inve = new JSONObject();
inve.put("invitation", invitation);
ia.add(inve);
invitations.put("invitations", ia);
return invitations;
}
/**
* Converst MD identifier to uri
*
* @param projectId project ID
* @param identifiers MD object identifiers
* @return map identifier:uri
*/
public Map<String, String> identifierToUri(String projectId, String[] identifiers) {
l.debug("Executing identifierToUri identifier=" + identifiers);
Map<String, String> result = new HashMap<String, String>();
PostMethod p = createPostMethod(getProjectMdUrl(projectId) + IDENTIFIER_URI);
JSONObject is = getIdentifiersStructure(identifiers);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
is.toString().getBytes()));
p.setRequestEntity(request);
try {
String resp = executeMethodOk(p);
JSONObject parsedResp = JSONObject.fromObject(resp);
JSONArray idents = parsedResp.getJSONArray("identifiers");
if (idents != null && !idents.isEmpty()) {
for (int i = 0; i < idents.size(); i++) {
JSONObject ident = idents.getJSONObject(i);
result.put(ident.getString("identifier"), ident.getString("uri"));
}
}
} catch (HttpMethodException ex) {
l.debug("Failed executing identifierToUri identifier=" + identifiers);
throw new GdcRestApiException("Failed executing identifierToUri identifier=" + identifiers, ex);
} finally {
p.releaseConnection();
}
return result;
}
/**
* Creates a new identifiers structure
*
* @param identifiers MD object identifier
* @return the new identifiers structure
*/
private JSONObject getIdentifiersStructure(String[] identifiers) {
JSONObject identifierToUri = new JSONObject();
JSONArray ids = new JSONArray();
for (int i = 0; i < identifiers.length; i++) {
ids.add(identifiers[i]);
}
identifierToUri.put("identifierToUri", ids);
return identifierToUri;
}
/**
* Retrieves a metadata object definition by Uri
*
* @param objectUri object uri
* @return the object to get
*/
public JSONObject getObjectByUri(String objectUri) {
l.debug("Executing getObjectByUri uri=" + objectUri);
HttpMethod req = createGetMethod(getServerUrl() + objectUri);
try {
String resp = executeMethodOk(req);
// workaround for a possible mess in MAQL source and missing charset in /obj response
resp = resp.replace("\\\\_", " ").replace("\u00A0", " ");
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp.isNullObject()) {
l.debug("Can't getObjectByUri object uri=" + objectUri);
throw new GdcRestApiException("Can't getObjectByUri object uri=" + objectUri);
}
return parsedResp;
} finally {
req.releaseConnection();
}
}
/**
* Retrieves a metadata object definition
*
* @param objectUri object uri
* @return the object to get
*/
public MetadataObject getMetadataObject(String objectUri) {
l.debug("Executing getMetadataObject uri=" + objectUri);
MetadataObject o = new MetadataObject(getObjectByUri(objectUri));
return o;
}
/**
* Retrieves a metadata object definition
*
* @param projectId project id (hash)
* @param objectId object id (integer)
* @return the object to get
*/
public MetadataObject getMetadataObject(String projectId, int objectId) {
l.debug("Executing getMetadataObject id=" + objectId + " on project id=" + projectId);
return getMetadataObject(MD_URI + projectId + OBJ_URI + "/" + objectId);
}
/**
* Retrieves a metadata object definition
*
* @param projectId project id (hash)
* @param identifier object identifier
* @return the object to get
*/
public MetadataObject getMetadataObject(String projectId, String identifier) {
l.debug("Executing getObjectByIdentifier identifier=" + identifier);
Map<String, String> uris = identifierToUri(projectId, new String[]{identifier});
if (uris != null && uris.size() > 0) {
String uri = uris.get(identifier);
if (uri != null && uri.length() > 0)
return getMetadataObject(uri);
else {
l.debug("Can't getObjectByIdentifier identifier=" + identifier + " The identifier doesn't exists.");
throw new GdcRestApiException("Can't getObjectByIdentifier identifier=" + identifier + " The identifier doesn't exists.");
}
} else {
l.debug("Can't getObjectByIdentifier identifier=" + identifier + " The identifier doesn't exists.");
throw new GdcRestApiException("Can't getObjectByIdentifier identifier=" + identifier + " The identifier doesn't exists.");
}
}
/**
* Returns the dependent objects
*
* @param uri the uri of the top-level object
* @return list of dependent objects
*/
public List<JSONObject> using(String uri) {
l.debug("Executing using uri=" + uri);
List<JSONObject> ret = new ArrayList<JSONObject>();
//HACK!
String usedUri = uri.replace("/obj/", "/using/");
HttpMethod req = createGetMethod(getServerUrl() + usedUri);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't call using on uri=" + uri + ". Invalid response.");
throw new GdcRestApiException("Can't call using on uri=" + uri + ". Invalid response.");
}
JSONObject using = parsedResp.getJSONObject("using");
if (using == null || using.isNullObject() || using.isEmpty()) {
l.debug("Can't call using on uri=" + uri + ". No using data.");
throw new GdcRestApiException("Can't call using on uri=" + uri + ". No using data.");
}
JSONArray nodes = using.getJSONArray("nodes");
if (nodes == null) {
l.debug("Can't call using on uri=" + uri + ". No nodes key in the response.");
throw new GdcRestApiException("Can't call using on uri=" + uri + ". No nodes key in the response.");
}
for (Object o : nodes) {
JSONObject obj = (JSONObject) o;
ret.add(obj);
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Returns the dependent objects
*
* @param uri the uri of the top-level object
* @return list of dependent objects
*/
public List<JSONObject> usedBy(String uri) {
l.debug("Executing usedby uri=" + uri);
List<JSONObject> ret = new ArrayList<JSONObject>();
//HACK!
String usedUri = uri.replace("/obj/", "/usedby/");
HttpMethod req = createGetMethod(getServerUrl() + usedUri);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't call usedby on uri=" + uri + ". Invalid response.");
throw new GdcRestApiException("Can't call usedby on uri=" + uri + ". Invalid response.");
}
JSONObject usedby = parsedResp.getJSONObject("usedby");
if (usedby == null || usedby.isNullObject() || usedby.isEmpty()) {
l.debug("Can't call usedby on uri=" + uri + ". No usedby data.");
throw new GdcRestApiException("Can't call usedby on uri=" + uri + ". No usedby data.");
}
JSONArray nodes = usedby.getJSONArray("nodes");
if (nodes == null) {
l.debug("Can't call usedby on uri=" + uri + ". No nodes key in the response.");
throw new GdcRestApiException("Can't call usedby on uri=" + uri + ". No nodes key in the response.");
}
for (Object o : nodes) {
JSONObject obj = (JSONObject) o;
ret.add(obj);
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Creates a new object in the metadata server
*
* @param projectId project id (hash)
* @param content the new object content
* @return the new object
*/
public JSONObject createMetadataObject(String projectId, JSON content) {
l.debug("Executing createMetadataObject on project id=" + projectId + "content='" + content.toString() + "'");
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + OBJ_URI + "?createAndGet=true");
try {
String str = content.toString();
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(str.getBytes("utf-8")));
req.setRequestEntity(request);
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
return parsedResp;
} catch (HttpMethodException ex) {
l.debug("Failed executing createMetadataObject on project id=" + projectId + "content='" + content.toString() + "'");
throw new GdcRestApiException("Failed executing createMetadataObject on project id=" + projectId + "content='" + content.toString() + "'", ex);
} catch (UnsupportedEncodingException e) {
l.debug("String#getBytes(\"utf-8\") threw UnsupportedEncodingException", e);
throw new IllegalStateException(e);
} finally {
req.releaseConnection();
}
}
/**
* Modifies an object in the metadata server
*
* @param projectId project id (hash)
* @param objectId object id (integer)
* @param content the new object content
* @return the new object
*/
public JSONObject modifyMetadataObject(String projectId, int objectId, JSON content) {
l.debug("Executing modifyMetadataObject on project id=" + projectId + " objectId=" + objectId + " content='" + content.toString() + "'");
return modifyMetadataObject(MD_URI + projectId + OBJ_URI + "/" + objectId, content);
}
/**
* Modifies an object in the metadata server
*
* @param uri object uri
* @param content the new object content
* @return the new object
*/
public JSONObject modifyMetadataObject(String uri, JSON content) {
l.debug("Executing modifyMetadataObject on uri=" + uri + " content='" + content.toString() + "'");
PostMethod req = createPostMethod(getServerUrl() + uri);
try {
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
content.toString().getBytes("utf-8")));
req.setRequestEntity(request);
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
return parsedResp;
} catch (HttpMethodException ex) {
l.debug("Failed executing modifyMetadataObject on uri=" + uri + " content='" + content.toString() + "'");
throw new GdcRestApiException("Failed executing modifyMetadataObject on uri=" + uri + " content='" + content.toString() + "'", ex);
} catch (UnsupportedEncodingException e) {
l.debug("String#getBytes(\"utf-8\") threw UnsupportedEncodingException", e);
throw new IllegalStateException(e);
} finally {
req.releaseConnection();
}
}
/**
* Deletes an object in the metadata server
*
* @param projectId project id (hash)
* @param objectId object id (integer)
* @return the new object
*/
public void deleteMetadataObject(String projectId, int objectId) {
l.debug("Executing deleteMetadataObject on project id=" + projectId + " objectId=" + objectId);
deleteMetadataObject(MD_URI + projectId + OBJ_URI + "/" + objectId);
}
/**
* Deletes an object in the metadata server
*
* @param uri object uri
* @return the new object
*/
public void deleteMetadataObject(String uri) {
l.debug("Executing deleteMetadataObject on project uri=" + uri);
DeleteMethod req = createDeleteMethod(getServerUrl() + uri);
try {
String resp = executeMethodOk(req);
} catch (HttpMethodException ex) {
l.debug("Failed executing deleteMetadataObject on project uri=" + uri);
throw new GdcRestApiException("Failed executing deleteMetadataObject on uri=" + uri, ex);
} finally {
req.releaseConnection();
}
}
/**
* Determines the projet's ETL mode (SLI/DLI/VOID)
*
* @param pid project id
* @return project's ETL mode
*/
public String getProjectEtlMode(String pid) {
l.debug("Getting project etl status.");
GetMethod req = createGetMethod(getProjectMdUrl(pid) + ETL_MODE_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp != null && !parsedResp.isNullObject() && !parsedResp.isEmpty()) {
JSONObject etlMode = parsedResp.getJSONObject("etlMode");
if (etlMode != null && !etlMode.isNullObject() && !etlMode.isEmpty()) {
String mode = etlMode.getString("mode");
if (mode != null && mode.length() > 0) {
return mode;
} else {
l.debug("Getting project etl status. No mode in the result: " + etlMode.toString());
throw new GdcRestApiException("Getting project etl status. No mode in the result: " + etlMode.toString());
}
} else {
l.debug("Getting project etl status. No etlMode in the result: " + parsedResp.toString());
throw new GdcRestApiException("Getting project etl status. No etlMode in the result: " + parsedResp.toString());
}
} else {
l.debug("Getting project etl status. Empty result.");
throw new GdcRestApiException("Getting project etl status. Empty result.");
}
} finally {
req.releaseConnection();
}
}
protected JSONObject getMigrationRequest(List<String> manifests) {
JSONObject etlMode = new JSONObject();
etlMode.put("mode", "SLI");
JSONArray mnfsts = new JSONArray();
mnfsts.addAll(manifests);
etlMode.put("sli", mnfsts);
JSONObject ret = new JSONObject();
ret.put("etlMode", etlMode);
return ret;
}
/**
* Checks if the migration is finished
*
* @param link the link returned from the start loading
* @return the loading status
*/
public String getTaskManStatus(String link) throws HttpMethodException {
l.debug("Getting TaskMan status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = "";
boolean isFinished = false;
while (!isFinished) {
try {
response = executeMethodOk(ptm);
isFinished = true;
} catch (HttpMethodNotFinishedYetException e) {
l.debug("getTaskManStatus: Waiting for status");
try {
Thread.sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
}
}
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("wTaskStatus");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
l.debug("TaskMan status=" + status);
return status;
} else {
l.debug("No wTaskStatus structure in the taskman status!");
throw new GdcRestApiException("No wTaskStatus structure in the taskman status!");
}
} finally {
ptm.releaseConnection();
}
}
public static class TaskmanStatus {
private String[] message;
private String status;
public TaskmanStatus(String s, String[] m) {
this.status = s;
this.message = m;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String[] getMessage() {
return message;
}
public void setMessage(String[] message) {
this.message = message;
}
}
public TaskmanStatus getDetailedTaskManStatus(String link) throws HttpMethodException {
l.debug("Getting TaskMan status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = "";
boolean isFinished = false;
while (!isFinished) {
try {
response = executeMethodOk(ptm);
isFinished = true;
} catch (HttpMethodNotFinishedYetException e) {
l.debug("getTaskManStatus: Waiting for status");
try {
Thread.sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
}
}
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("wTaskStatus");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
ArrayList<String> messages = new ArrayList<String>();
l.debug("TaskMan status=" + status);
if(state.containsKey("messages")) {
JSONArray msgs = state.getJSONArray("messages");
if(msgs != null && !msgs.isEmpty()) {
for (Object msgo : msgs) {
JSONObject msg = (JSONObject)msgo;
String root = (String)msg.keys().next();
JSONObject inner = msg.getJSONObject(root);
JSONArray prms = inner.getJSONArray("parameters");
String message = inner.getString("message");
if(prms != null && !prms.isEmpty()) {
for(Object prmo : prms) {
String prm = (String)prmo;
message = message.replaceFirst("\\%s",prm);
}
}
messages.add(message);
}
}
}
return new TaskmanStatus(status, (String[])messages.toArray(new String[]{}));
} else {
l.debug("No wTaskStatus structure in the taskman status!");
throw new GdcRestApiException("No wTaskStatus structure in the taskman status!");
}
} finally {
ptm.releaseConnection();
}
}
/**
* Migrates project datasets from DLI to SLI
*
* @param pid project ID
* @param manifests array of all dataset's manifests
*/
public String migrateDataSets(String pid, List<String> manifests) {
l.debug("Migrating project to SLI.");
String currentMode = getProjectEtlMode(pid);
l.debug("Migrating project to SLI: current status is " + currentMode);
if (ETL_MODE_DLI.equalsIgnoreCase(currentMode) || ETL_MODE_VOID.equalsIgnoreCase(currentMode)) {
PostMethod req = createPostMethod(getProjectMdUrl(pid) + ETL_MODE_URI);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(getMigrationRequest(manifests).toString().getBytes()));
req.setRequestEntity(request);
try {
String resp = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(resp);
String taskLink = responseObject.getString("uri");
return taskLink;
} catch (HttpMethodException ex) {
l.debug("Migrating project to SLI failed.", ex);
throw new GdcRestApiException("Migrating project to SLI failed.", ex);
} finally {
req.releaseConnection();
}
} else {
l.debug("Migrating project to SLI: no migration needed. Skipping.");
return "";
}
}
private static GetMethod createGetMethod(String path) {
return configureHttpMethod(new GetMethod(path));
}
private static PostMethod createPostMethod(String path) {
return configureHttpMethod(new PostMethod(path));
}
private static DeleteMethod createDeleteMethod(String path) {
return configureHttpMethod(new DeleteMethod(path));
}
private static <T extends HttpMethod> T configureHttpMethod(T request) {
request.setRequestHeader("Content-Type", "application/json; charset=utf-8");
request.setRequestHeader("Accept", "application/json");
request.setRequestHeader("Accept-Charset", "utf-u");
request.setRequestHeader("User-Agent", "GoodData Agent/0.4");
request.getParams().setCookiePolicy(CookiePolicy.BROWSER_COMPATIBILITY);
return request;
}
protected void finalize() throws Throwable {
try {
// logout();
} finally {
super.finalize();
}
}
/**
* API for querying users in a domain
*
* @param domain
* @return
*/
public Map<String, GdcUser> getUsers(String domain) {
Map<String, GdcUser> users = new HashMap<String, GdcUser>();
String url = "/gdc/account/domains/" + domain + "/users";
JSONObject jsonObject = getObjectByUri(url);
if (jsonObject == null) {
return users;
}
JSONObject accountSettings = jsonObject
.getJSONObject("accountSettings");
if (accountSettings == null) {
return users;
}
JSONArray items = (JSONArray) accountSettings.get("items");
if (items == null) {
return users;
}
for (Object item : items) {
JSONObject itemJSON = JSONObject.fromObject(item);
if (itemJSON == null) {
continue;
}
JSONObject accountSetting = itemJSON
.getJSONObject("accountSetting");
if (accountSetting == null) {
continue;
}
GdcUser user = new GdcUser();
user.setLogin(accountSetting.getString("login"));
user.setFirstName(accountSetting.getString("firstName"));
user.setLastName(accountSetting.getString("lastName"));
user.setCompanyName(accountSetting.getString("companyName"));
user.setPosition(accountSetting.getString("position"));
user.setCountry(accountSetting.getString("country"));
user.setTimezone(accountSetting.getString("timezone"));
user.setPhoneNumber(accountSetting.getString("phoneNumber"));
user.setEmail(accountSetting.getString("email"));
JSONObject links = accountSetting.getJSONObject("links");
if (links == null)
throw new RuntimeException(
"The URL link for a user cannot be null: "
+ user.getLogin());
String uri = links.getString("self");
if (uri == null)
throw new RuntimeException("The URL for a user cannot be null: "
+ user.getLogin());
user.setUri(uri);
users.put(user.getLogin(), user);
}
return users;
}
public List<String> enumerateDimensions(String projectId) {
return enumerateResource(projectId, QUERY_DIMENSIONS);
}
public List<String> enumerateDataSets(String projectId) {
return enumerateResource(projectId, QUERY_DATASETS);
}
public List<String> enumerateFolders(String projectId) {
return enumerateResource(projectId, QUERY_FOLDERS);
}
public List<String> enumerateDashboards(String projectId) {
return enumerateResource(projectId, QUERY_PROJECTDASHBOARDS);
}
protected List<String> enumerateResource(String projectId, String resource) {
l.debug("Enumerating attributes for project id=" + projectId);
List<String> list = new ArrayList<String>();
String qUri = getProjectMdUrl(projectId) + QUERY_PREFIX + resource;
HttpMethod qGet = createGetMethod(qUri);
try {
String qr = executeMethodOk(qGet);
JSONObject q = JSONObject.fromObject(qr);
if (q.isNullObject()) {
l.debug("Enumerating "+resource+" for project id="+projectId+" failed.");
throw new RuntimeException(
"Enumerating "+resource+" for project id="+projectId+" failed.");
}
JSONObject qry = q.getJSONObject("query");
if (qry.isNullObject()) {
l.debug("Enumerating "+resource+" for project id="+projectId+" failed.");
throw new RuntimeException(
"Enumerating "+resource+" for project id="+projectId+" failed.");
}
JSONArray entries = qry.getJSONArray("entries");
if (entries == null) {
l.debug("Enumerating "+resource+" for project id="+projectId+" failed.");
throw new RuntimeException(
"Enumerating "+resource+" for project id="+projectId+" failed.");
}
for (Object oentry : entries) {
JSONObject entry = (JSONObject) oentry;
list.add(entry.getString("link"));
}
} finally {
qGet.releaseConnection();
}
return list;
}
public ProjectExportResult exportMDByUrl(String projectId, List<String> urls) {
l.debug("Exporting metadata objects with URls " + urls
+ " from project " + projectId);
PostMethod req = createPostMethod(getProjectMdUrl(projectId)
+ PROJECT_PARTIAL_EXPORT_URI);
JSONObject param = getMDExportStructureStrings(projectId, urls);
InputStreamRequestEntity request = new InputStreamRequestEntity(
new ByteArrayInputStream(param.toString().getBytes(
Charset.forName("UTF-8"))));
req.setRequestEntity(request);
ProjectExportResult result = null;
try {
String response = executeMethodOk(req);
result = new ProjectExportResult();
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject exportArtifact = responseObject
.getJSONObject("partialMDArtifact");
JSONObject status = exportArtifact.getJSONObject("status");
result.setTaskUri(status.getString("uri"));
result.setExportToken(exportArtifact.getString("token"));
return result;
} catch (HttpMethodException ex) {
l.debug("Error exporting metadata objects with URls " + urls
+ " from project " + projectId, ex);
throw new GdcRestApiException(
"Error exporting metadata objects with URls " + urls
+ " from project " + projectId, ex);
} finally {
req.releaseConnection();
}
}
protected JSONObject getMDExportStructureStrings(String projectId,
List<String> urls) {
JSONObject param = new JSONObject();
JSONObject partialMDExport = new JSONObject();
JSONArray uris = new JSONArray();
for (String url : urls) {
uris.add(url);
}
partialMDExport.put("uris", uris);
param.put("partialMDExport", partialMDExport);
return param;
}
public NamePasswordConfiguration getNamePasswordConfiguration() {
return config;
}
/**
* Checks if report copying is finished. Workaround implementation due to
* wrong handling of status code.
*
* @param link
* the link returned from the start loading
* @return the loading status
*/
public String getCopyStatus(String link) {
l.debug("Getting Cloning Status status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = executeMethodOk(ptm);
if (response != null && !response.isEmpty()) {
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("taskState");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
l.debug("TaskMan status=" + status);
return status;
} else {
l.debug("No wTaskStatus structure in the migration status!");
throw new GdcRestApiException(
"No wTaskStatus structure in the migration status!");
}
}
return "RUNNING";
} catch (HttpMethodException e) {
// workaround implementation due to wrong handling (at least for
// this status)
if (e instanceof HttpMethodNotFinishedYetException
|| (e.getCause() != null && e.getCause() instanceof HttpMethodNotFinishedYetException)) {
l.debug("getTaskManStatus: Waiting for status");
return "RUNNING";
}
throw e;
} finally {
ptm.releaseConnection();
}
}
/**
* Retrieves the project info by the project's name
*
* @param name
* the project name
* @return the GoodDataProjectInfo populated with the project's information
* @throws HttpMethodException
* @throws GdcProjectAccessException
*/
@Deprecated
public Project getProjectByName(String name) throws HttpMethodException,
GdcProjectAccessException {
l.debug("Getting project by name=" + name);
for (Iterator<JSONObject> linksIter = getProjectsLinks(); linksIter
.hasNext();) {
JSONObject link = linksIter.next();
String cat = link.getString("category");
if (!"project".equalsIgnoreCase(cat)) {
continue;
}
String title = link.getString("title");
if (title.equals(name)) {
Project proj = new Project(link);
l.debug("Got project by name=" + name);
return proj;
}
}
l.debug("The project name=" + name + " doesn't exists.");
throw new GdcProjectAccessException("The project name=" + name
+ " doesn't exists.");
}
/**
* Returns the existing projects links
*
* @return accessible projects links
* @throws com.gooddata.exception.HttpMethodException
*/
@Deprecated
@SuppressWarnings("unchecked")
private Iterator<JSONObject> getProjectsLinks() throws HttpMethodException {
l.debug("Getting project links.");
HttpMethod req = createGetMethod(getServerUrl() + MD_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
JSONObject about = parsedResp.getJSONObject("about");
JSONArray links = about.getJSONArray("links");
l.debug("Got project links " + links);
return links.iterator();
} finally {
req.releaseConnection();
}
}
/**
* Create a new GoodData project
*
* @param name
* project name
* @param desc
* project description
* @param templateUri
* project template uri
* @return the project Id
* @throws GdcRestApiException
*/
@Deprecated
public String createProject(String name, String desc, String templateUri)
throws GdcRestApiException {
return this.createProject(name, desc, templateUri, null, null);
}
/**
* Returns the List of GoodDataProjectInfo structures for the accessible
* projects
*
* @return the List of GoodDataProjectInfo structures for the accessible
* projects
* @throws HttpMethodException
*/
@Deprecated
public List<Project> listProjects() throws HttpMethodException {
l.debug("Listing projects.");
List<Project> list = new ArrayList<Project>();
for (Iterator<JSONObject> linksIter = getProjectsLinks(); linksIter
.hasNext();) {
JSONObject link = linksIter.next();
String cat = link.getString("category");
if (!"project".equalsIgnoreCase(cat)) {
continue;
}
Project proj = new Project(link);
list.add(proj);
}
l.debug("Found projects " + list);
return list;
}
/**
* Gets a report definition from the report uri (/gdc/obj...)
*
* @param reportUri report uri (/gdc/obj...)
* @return report definition
*/
@Deprecated
public String getReportDefinition(String reportUri) {
l.debug( "Getting report definition for report uri=" + reportUri );
String qUri = getServerUrl() + reportUri;
HttpMethod qGet = createGetMethod( qUri );
try {
String qr = executeMethodOk( qGet );
JSONObject q = JSONObject.fromObject( qr );
if (q.isNullObject()) {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
JSONObject report = q.getJSONObject("report");
if (report.isNullObject()) {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
JSONObject content = report.getJSONObject("content");
if (content.isNullObject()) {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
JSONArray definitions = content.getJSONArray("definitions");
if (definitions == null) {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
if (definitions.size() > 0) {
String lastDefUri = definitions.getString(definitions.size() - 1);
qUri = getServerUrl() + lastDefUri;
return lastDefUri;
}
else {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
} finally {
if (qGet != null)
qGet.releaseConnection();
}
}
public static class GraphExecutionResult {
public final static String OK = "OK";
private final String status;
private final String logUrl;
private GraphExecutionResult(final String status, final String logUrl) {
this.status = status;
this.logUrl = logUrl;
}
public String getStatus() {
return status;
}
public String getLogUrl() {
return logUrl;
}
}
}
| src/main/java/com/gooddata/agent/api/GdcRESTApiWrapper.java | /*
* Copyright (c) 2009, GoodData Corporation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and
* the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of the GoodData Corporation nor the names of its contributors may be used to endorse
* or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.gooddata.agent.api;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.Charset;
import java.security.InvalidParameterException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.sf.json.JSON;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpException;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.cookie.CookiePolicy;
import org.apache.commons.httpclient.methods.DeleteMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.InputStreamRequestEntity;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.log4j.Logger;
import com.gooddata.agent.api.model.Column;
import com.gooddata.agent.api.model.MetadataObject;
import com.gooddata.agent.api.model.Project;
import com.gooddata.agent.api.model.SLI;
import com.gooddata.agent.util.Constants;
import com.gooddata.agent.util.NetUtil;
/**
* The GoodData REST API Java wrapper. Stolen from the CL tool's code.
* Should be replaced by a call of an official GoodData Java library
* once it's available.
*
* @author Zdenek Svoboda <[email protected]>
* @version 1.0
*/
public class GdcRESTApiWrapper {
private static Logger l = Logger.getLogger(GdcRESTApiWrapper.class);
/**
* GDC URIs
*/
private static final String PLATFORM_URI = "/gdc/";
private static final String MD_URI = "/gdc/md/";
private static final String LOGIN_URI = "/gdc/account/login";
private static final String DOMAIN_URI = "/gdc/account/domains";
private static final String DOMAIN_USERS_SUFFIX = "/users";
private static final String PROJECT_USERS_SUFFIX = "/users";
private static final String PROJECT_ROLES_SUFFIX = "/roles";
private static final String TOKEN_URI = "/gdc/account/token";
private static final String DATA_INTERFACES_URI = "/ldm/singleloadinterface";
private static final String PROJECTS_URI = "/gdc/projects";
private static final String PULL_URI = "/etl/pull";
private static final String IDENTIFIER_URI = "/identifiers";
private static final String SLI_DESCRIPTOR_URI = "/descriptor";
public static final String MAQL_EXEC_URI = "/ldm/manage";
public static final String MAQL_ASYNC_EXEC_URI = "/ldm/manage2";
public static final String DML_EXEC_URI = "/dml/manage";
public static final String PROJECT_EXPORT_URI = "/maintenance/export";
public static final String PROJECT_IMPORT_URI = "/maintenance/import";
public static final String PROJECT_PARTIAL_EXPORT_URI = "/maintenance/partialmdexport";
public static final String PROJECT_PARTIAL_IMPORT_URI = "/maintenance/partialmdimport";
public static final String REPORT_QUERY = "/query/reports";
public static final String ATTR_QUERY = "/query/attributes";
public static final String EXECUTOR = "/gdc/xtab2/executor3";
public static final String EXPORT_EXECUTOR = "/gdc/exporter/executor";
public static final String INVITATION_URI = "/invitations";
public static final String ETL_MODE_URI = "/etl/mode";
public static final String OBJ_URI = "/obj";
public static final String ROLES_URI = "/roles";
public static final String USERS_URI = "/users";
public static final String ETL_MODE_DLI = "DLI";
public static final String ETL_MODE_VOID = "VOID";
public static final String LINKS_UPLOADS_KEY = "uploads";
public static final String DLI_MANIFEST_FILENAME = "upload_info.json";
public static final String QUERY_PROJECTDASHBOARDS = "projectdashboards";
public static final String QUERY_FOLDERS = "folders";
public static final String QUERY_DATASETS = "datasets";
public static final String QUERY_DIMENSIONS = "dimensions";
public static final String QUERY_PREFIX = "/query/";
protected HttpClient client;
protected NamePasswordConfiguration config;
private JSONObject userLogin = null;
private JSONObject profile;
private static HashMap<String, String> ROLES = new HashMap<String, String>();
/* TODO This is fragile and may not work for all projects and/or future versions.
* Use /gdc/projects/{projectId}/roles to retrieve roles for a particular project.
*/
static {
ROLES.put("ADMIN", "adminRole");
ROLES.put("EDITOR", "editorRole");
ROLES.put("DASHBOARD ONLY", "dashboardOnlyRole");
ROLES.put("UNVERIFIED ADMIN", "unverifiedAdminRole");
ROLES.put("READONLY", "readOnlyUserRole");
}
/**
* Constructs the GoodData REST API Java wrapper
*
* @param config NamePasswordConfiguration object with the GDC name and password configuration
*/
public GdcRESTApiWrapper(NamePasswordConfiguration config) {
this.config = config;
client = new HttpClient();
NetUtil.configureHttpProxy(client);
}
/**
* GDC login - obtain GDC SSToken
*
* @throws HttpMethodException
*/
public void login() throws HttpMethodException {
//logout();
l.debug("Logging into GoodData.");
JSONObject loginStructure = getLoginStructure();
PostMethod loginPost = createPostMethod(getServerUrl() + LOGIN_URI);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(loginStructure.toString().getBytes()));
loginPost.setRequestEntity(request);
try {
String resp = executeMethodOk(loginPost, false); // do not re-login on SC_UNAUTHORIZED
// enabling this prevents the following message:
// WARN org.apache.commons.httpclient.HttpMethodDirector -
// Unable to respond to any of these challenges:
// {gooddata=GoodData realm="GoodData API" cookie=GDCAuthTT}
// appearing always after those:
// DEBUG com.gooddata.integration.rest.GdcRESTApiWrapper -
// Logging into GoodData.
// DEBUG com.gooddata.integration.rest.GdcRESTApiWrapper -
// Successfully logged into GoodData.
setTokenCookie();
l.debug("Successfully logged into GoodData.");
JSONObject rsp = JSONObject.fromObject(resp);
userLogin = rsp.getJSONObject("userLogin");
String profileUri = userLogin.getString("profile");
if (profileUri != null && profileUri.length() > 0) {
GetMethod gm = createGetMethod(getServerUrl() + profileUri);
try {
resp = executeMethodOk(gm);
this.profile = JSONObject.fromObject(resp);
}
finally {
gm.releaseConnection();
}
} else {
l.debug("Empty account profile.");
throw new GdcRestApiException("Empty account profile.");
}
} finally {
loginPost.releaseConnection();
}
}
/**
* Creates a new login JSON structure
*
* @return the login JSON structure
*/
private JSONObject getLoginStructure() {
JSONObject credentialsStructure = new JSONObject();
credentialsStructure.put("login", config.getUsername());
credentialsStructure.put("password", config.getPassword());
credentialsStructure.put("remember", 1);
JSONObject loginStructure = new JSONObject();
loginStructure.put("postUserLogin", credentialsStructure);
return loginStructure;
}
/**
* Sets the SS token
*
* @throws HttpMethodException
*/
private void setTokenCookie() throws HttpMethodException {
HttpMethod secutityTokenGet = createGetMethod(getServerUrl() + TOKEN_URI);
try {
executeMethodOk(secutityTokenGet);
} finally {
secutityTokenGet.releaseConnection();
}
}
/**
* GDC logout - remove active session, if any exists
*
* @throws HttpMethodException
*/
public void logout() throws HttpMethodException {
if (userLogin == null)
return;
l.debug("Logging out.");
DeleteMethod logoutDelete = createDeleteMethod(getServerUrl() + userLogin.getString("state"));
try {
String resp = executeMethodOk(logoutDelete, false); // do not re-login on SC_UNAUTHORIZED
userLogin = null;
profile = null;
l.debug("Successfully logged out.");
} finally {
logoutDelete.releaseConnection();
}
this.client = new HttpClient();
NetUtil.configureHttpProxy( client );
}
/**
* Retrieves the project info by the project's ID
*
* @param id the project id
* @return the GoodDataProjectInfo populated with the project's information
* @throws HttpMethodException
* @throws GdcProjectAccessException
*/
public Project getProjectById(String id) throws HttpMethodException, GdcProjectAccessException {
l.debug("Getting project by id=" + id);
HttpMethod req = createGetMethod(getServerUrl() + PROJECTS_URI + "/" + id);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if(parsedResp != null && !parsedResp.isEmpty() && !parsedResp.isNullObject()) {
JSONObject project = parsedResp.getJSONObject("project");
if(project != null && !project.isEmpty() && !project.isNullObject()) {
JSONObject meta = project.getJSONObject("meta");
String title = meta.getString("title");
if(title != null && title.length() > 0)
return new Project(MD_URI + "/" + id, id, title);
else
throw new IllegalArgumentException("getProjectById: The project structure doesn't contain the title key.");
}
else {
throw new IllegalArgumentException("getProjectById: The project structure doesn't contain the project key.");
}
} else {
throw new IllegalArgumentException("getProjectById: Invalid response.");
}
} catch (HttpMethodException e) {
l.debug("The project id=" + id + " doesn't exists.");
throw new GdcProjectAccessException("The project id=" + id + " doesn't exists.");
} finally {
req.releaseConnection();
}
}
/**
* Returns the global platform links
*
* @return accessible platform links
* @throws com.gooddata.exception.HttpMethodException
*
*/
@SuppressWarnings("unchecked")
private Iterator<JSONObject> getPlatformLinks() throws HttpMethodException {
l.debug("Getting project links.");
HttpMethod req = createGetMethod(getServerUrl() + PLATFORM_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
JSONObject about = parsedResp.getJSONObject("about");
JSONArray links = about.getJSONArray("links");
l.debug("Got platform links " + links);
return links.iterator();
} finally {
req.releaseConnection();
}
}
/**
*
*
* @return the WebDav URL from the platform configuration
*/
public URL getWebDavURL() {
Iterator<JSONObject> links = getPlatformLinks();
while(links.hasNext()) {
JSONObject link = links.next();
if(link != null && !link.isEmpty() && !link.isNullObject()) {
String category = link.getString("category");
if(category != null && category.length() > 0 && category.equalsIgnoreCase(LINKS_UPLOADS_KEY)) {
try {
String uri = link.getString("link");
if(uri != null && uri.length()>0) {
if(uri.startsWith("/")) {
uri = getServerUrl() + uri;
}
return new URL(uri);
}
else {
throw new IllegalArgumentException("No uploads URL configured for the server: "+category);
}
}
catch (MalformedURLException e) {
throw new IllegalArgumentException("Invalid uploads URL configured for the server: "+category);
}
}
}
}
throw new IllegalArgumentException("No uploads platform link configured for the GoodData cluster.");
}
/**
* Returns a list of project's SLIs
*
* @param projectId project's ID
* @return a list of project's SLIs
* @throws HttpMethodException if there is a communication error
* @throws GdcProjectAccessException if the SLI doesn't exist
*/
public List<SLI> getSLIs(String projectId) throws HttpMethodException, GdcProjectAccessException {
l.debug("Getting SLIs from project id=" + projectId);
List<SLI> list = new ArrayList<SLI>();
String ifcUri = getSLIsUri(projectId);
HttpMethod interfacesGet = createGetMethod(ifcUri);
try {
String response = executeMethodOk(interfacesGet);
JSONObject responseObject = JSONObject.fromObject(response);
if (responseObject.isNullObject()) {
l.debug("The project id=" + projectId + " doesn't exist!");
throw new GdcProjectAccessException("The project id=" + projectId + " doesn't exist!");
}
JSONObject interfaceQuery = responseObject.getJSONObject("about");
if (interfaceQuery.isNullObject()) {
l.debug("The project id=" + projectId + " doesn't exist!");
throw new GdcProjectAccessException("The project id=" + projectId + " doesn't exist!");
}
JSONArray links = interfaceQuery.getJSONArray("links");
if (links == null) {
l.debug("The project id=" + projectId + " doesn't exist!");
throw new GdcProjectAccessException("The project id=" + projectId + " doesn't exist!");
}
for (Object ol : links) {
JSONObject link = (JSONObject) ol;
SLI ii = new SLI(link);
list.add(ii);
}
l.debug("Got SLIs " + list + " from project id=" + projectId);
} finally {
interfacesGet.releaseConnection();
}
return list;
}
/**
* Retrieves the SLI columns
*
* @param uri the SLI uri
* @return list of SLI columns
* @throws GdcProjectAccessException if the SLI doesn't exist
* @throws HttpMethodException if there is a communication issue with the GDC platform
*/
public List<Column> getSLIColumns(String uri) throws GdcProjectAccessException, HttpMethodException {
l.debug("Retrieveing SLI columns for SLI uri=" + uri);
List<Column> list = new ArrayList<Column>();
HttpMethod sliGet = createGetMethod(getServerUrl() + uri + "/manifest");
try {
String response = executeMethodOk(sliGet);
JSONObject responseObject = JSONObject.fromObject(response);
if (responseObject.isNullObject()) {
l.debug("The SLI uri=" + uri + " doesn't exist!");
throw new GdcProjectAccessException("The SLI uri=" + uri + " doesn't exist!");
}
JSONObject dataSetSLIManifest = responseObject.getJSONObject("dataSetSLIManifest");
if (dataSetSLIManifest.isNullObject()) {
l.debug("The SLI uri=" + uri + " doesn't exist!");
throw new GdcProjectAccessException("The SLI uri=" + uri + " doesn't exist!");
}
JSONArray parts = dataSetSLIManifest.getJSONArray("parts");
for (Object oPart : parts) {
list.add(new Column((JSONObject) oPart));
}
} finally {
sliGet.releaseConnection();
}
return list;
}
/**
* Retrieves the SLI column data type
*
* @param projectId projectId
* @param sliColumnIdentifier SLI column identifier (name in the SLI manifest)
* @return the SLI column datatype
*/
public String getSLIColumnDataType(String projectId, String sliColumnIdentifier) {
l.debug("Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier);
MetadataObject o = getMetadataObject(projectId, sliColumnIdentifier);
if (o != null) {
JSONObject c = o.getContent();
if (c != null) {
String type = c.getString("columnType");
if (type != null && type.length() > 0) {
return type;
} else {
l.debug("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " No columnType key in the content.");
throw new GdcRestApiException("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " No columnType key in the content.");
}
} else {
l.debug("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " No content structure.");
throw new GdcRestApiException("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " No content structure.");
}
} else {
l.debug("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " MD object doesn't exist.");
throw new GdcRestApiException("Error Retrieveing SLI column datatype projectId=" + projectId + " SLI column name=" + sliColumnIdentifier + " MD object doesn't exist.");
}
}
/**
* Retrieves the SLI columns
*
* @param uri the SLI uri
* @return JSON manifest
* @throws GdcProjectAccessException if the SLI doesn't exist
* @throws HttpMethodException if there is a communication issue with the GDC platform
*/
public JSONObject getSLIManifest(String uri) throws GdcProjectAccessException, HttpMethodException {
l.debug("Retrieveing SLI columns for SLI uri=" + uri);
List<Column> list = new ArrayList<Column>();
HttpMethod sliGet = createGetMethod(getServerUrl() + uri + "/manifest");
try {
String response = executeMethodOk(sliGet);
JSONObject responseObject = JSONObject.fromObject(response);
if (responseObject.isNullObject()) {
l.debug("The SLI uri=" + uri + " doesn't exist!");
throw new GdcProjectAccessException("The SLI uri=" + uri + " doesn't exist!");
}
return responseObject;
} finally {
sliGet.releaseConnection();
}
}
/**
* Finds a project SLI by it's id
*
* @param id the SLI id
* @param projectId the project id
* @return the SLI
* @throws GdcProjectAccessException if the SLI doesn't exist
* @throws HttpMethodException if there is a communication issue with the GDC platform
*/
public SLI getSLIById(String id, String projectId) throws GdcProjectAccessException, HttpMethodException {
l.debug("Get SLI by id=" + id + " project id=" + projectId);
List<SLI> slis = getSLIs(projectId);
return getSLIById(id, slis, projectId);
}
/**
* Finds a project SLI in list of SLI
*
* @param id the SLI id
* @param slis of SLI (related to one project)
* @param projectId the project id
* @return the SLI
* @throws GdcProjectAccessException if the SLI doesn't exist
*/
public static SLI getSLIById(String id, List<SLI> slis, String projectId) throws GdcProjectAccessException {
l.debug("Get SLI by id=" + id + " project id=" + projectId);
for (SLI sli : slis) {
if (id.equals(sli.getId())) {
l.debug("Got SLI by id=" + id + " project id=" + projectId);
return sli;
}
}
l.debug("The SLI id=" + id + " doesn't exist in the project id=" + projectId);
throw new GdcProjectAccessException("The SLI id=" + id + " doesn't exist in the project id=" + projectId);
}
/**
* Enumerates all attributes in the project
*
* @param projectId project Id
* @return LIst of attr uris
*/
public List<String> enumerateAttributes(String projectId) {
l.debug("Enumerating attributes for project id=" + projectId);
List<String> list = new ArrayList<String>();
String qUri = getProjectMdUrl(projectId) + ATTR_QUERY;
HttpMethod qGet = createGetMethod(qUri);
try {
String qr = executeMethodOk(qGet);
JSONObject q = JSONObject.fromObject(qr);
if (q.isNullObject()) {
l.debug("Enumerating attributes for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating attributes for project id=" + projectId + " failed.");
}
JSONObject qry = q.getJSONObject("query");
if (qry.isNullObject()) {
l.debug("Enumerating attributes for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
JSONArray entries = qry.getJSONArray("entries");
if (entries == null) {
l.debug("Enumerating attributes for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
for (Object oentry : entries) {
JSONObject entry = (JSONObject) oentry;
list.add(entry.getString("link"));
}
} finally {
qGet.releaseConnection();
}
return list;
}
/**
* Gets attribute PK
*
* @param attrUri attribute URI
* @return list of attribute PKs (columns)
*/
public List<JSONObject> getAttributePk(String attrUri) {
List<JSONObject> ret = new ArrayList<JSONObject>();
JSONObject attr = getObjectByUri(attrUri);
JSONObject a = attr.getJSONObject("attribute");
if (a != null && !a.isEmpty() && !a.isEmpty()) {
JSONObject c = a.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
JSONArray pks = c.getJSONArray("pk");
if (pks != null && !pks.isEmpty()) {
Object[] p = pks.toArray();
for (Object pko : p) {
JSONObject pk = (JSONObject) pko;
String columnUri = pk.getString("data");
if (columnUri != null) {
ret.add(getObjectByUri(columnUri));
} else {
l.debug("Error getting attribute PK. No PK data.");
throw new GdcProjectAccessException("Error getting attribute PK. No PK data.");
}
}
}
} else {
l.debug("Error getting attribute PK. No content.");
throw new GdcProjectAccessException("Error getting attribute PK. No content.");
}
} else {
l.debug("Error getting attribute PK. No attribute.");
throw new GdcProjectAccessException("Error getting attribute PK. No attribute.");
}
return ret;
}
/**
* Gets attribute FK
*
* @param attrUri attribute URI
* @return list of attribute FKs (columns)
*/
public List<JSONObject> getAttributeFk(String attrUri) {
List<JSONObject> ret = new ArrayList<JSONObject>();
JSONObject attr = getObjectByUri(attrUri);
JSONObject a = attr.getJSONObject("attribute");
if (a != null && !a.isEmpty() && !a.isEmpty()) {
JSONObject c = a.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
if (c.containsKey("fk")) {
JSONArray pks = c.getJSONArray("fk");
if (pks != null && !pks.isEmpty()) {
Object[] p = pks.toArray();
for (Object pko : p) {
JSONObject pk = (JSONObject) pko;
String columnUri = pk.getString("data");
if (columnUri != null && columnUri.trim().length() > 0) {
ret.add(getObjectByUri(columnUri));
} else {
l.debug("Error getting attribute FK. No FK data.");
throw new GdcProjectAccessException("Error getting attribute FK. No FK data.");
}
}
}
}
} else {
l.debug("Error getting attribute FK. No content.");
throw new GdcProjectAccessException("Error getting attribute FK. No content.");
}
} else {
l.debug("Error getting attribute FK. No attribute.");
throw new GdcProjectAccessException("Error getting attribute FK. No attribute.");
}
return ret;
}
/**
* Gets column DB name
*
* @param column column object
* @return column DB name
*/
public String getColumnDbName(JSONObject column) {
JSONObject cl = column.getJSONObject("column");
if (cl != null && !cl.isEmpty() && !cl.isEmpty()) {
JSONObject c = cl.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
String cn = c.getString("columnDBName");
if (cn != null && cn.trim().length() > 0) {
return cn;
} else {
l.debug("Error getting column name. No columnDBName.");
throw new GdcProjectAccessException("Error getting column name. No columnDBName.");
}
} else {
l.debug("Error getting column name. No content.");
throw new GdcProjectAccessException("Error getting column name. No content.");
}
} else {
l.debug("Error getting column name. No column.");
throw new GdcProjectAccessException("Error getting column name. No column.");
}
}
/**
* Gets column table name
*
* @param column column object
* @return column table name
*/
public String getColumnTableName(JSONObject column) {
JSONObject cl = column.getJSONObject("column");
if (cl != null && !cl.isEmpty() && !cl.isEmpty()) {
JSONObject c = cl.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
String t = c.getString("table");
if (t != null && t.trim().length() > 0) {
JSONObject tbl = getObjectByUri(t);
JSONObject root = tbl.getJSONObject("table");
if (root != null && !root.isEmpty() && !root.isEmpty()) {
c = root.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
String dl = c.getString("activeDataLoad");
if (dl != null && dl.trim().length() > 0) {
JSONObject tdl = getObjectByUri(dl);
root = tdl.getJSONObject("tableDataLoad");
if (root != null && !root.isEmpty() && !root.isEmpty()) {
c = root.getJSONObject("content");
if (c != null && !c.isEmpty() && !c.isEmpty()) {
String tn = c.getString("dataSourceLocation");
if (tn != null && tn.trim().length() > 0) {
return tn;
} else {
l.debug("Error getting column name. No dataSourceLocation.");
throw new GdcProjectAccessException("Error getting column name. No dataSourceLocation.");
}
} else {
l.debug("Error getting column name. No active table data load content.");
throw new GdcProjectAccessException("Error getting column name. No active table data load content.");
}
} else {
l.debug("Error getting column name. No table data load root.");
throw new GdcProjectAccessException("Error getting column name. No table data load root.");
}
} else {
l.debug("Error getting column name. No active data load.");
throw new GdcProjectAccessException("Error getting column name. No active data load.");
}
} else {
l.debug("Error getting column name. No table content.");
throw new GdcProjectAccessException("Error getting column name. No table content.");
}
} else {
l.debug("Error getting column table. No table root.");
throw new GdcProjectAccessException("Error getting column table. No table root.");
}
} else {
l.debug("Error getting column name. No table.");
throw new GdcProjectAccessException("Error getting column name. No table.");
}
} else {
l.debug("Error getting column name. No content.");
throw new GdcProjectAccessException("Error getting column name. No content.");
}
} else {
l.debug("Error getting column name. No column.");
throw new GdcProjectAccessException("Error getting column name. No column.");
}
}
/**
* Enumerates all attributes in the project
*
* @param attrUri attribute URI
* @return attribute object
*/
public JSONObject getAttribute(String attrUri) {
l.debug("Getting attribute uri=" + attrUri);
String qUri = getServerUrl() + attrUri;
HttpMethod qGet = createGetMethod(qUri);
try {
String qr = executeMethodOk(qGet);
return JSONObject.fromObject(qr);
} finally {
qGet.releaseConnection();
}
}
/**
* Enumerates all reports on in a project
*
* @param projectId project Id
* @return LIst of report uris
*/
public List<String> enumerateReports(String projectId) {
l.debug("Enumerating reports for project id=" + projectId);
List<String> list = new ArrayList<String>();
String qUri = getProjectMdUrl(projectId) + REPORT_QUERY;
HttpMethod qGet = createGetMethod(qUri);
try {
String qr = executeMethodOk(qGet);
JSONObject q = JSONObject.fromObject(qr);
if (q.isNullObject()) {
l.debug("Enumerating reports for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
JSONObject qry = q.getJSONObject("query");
if (qry.isNullObject()) {
l.debug("Enumerating reports for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
JSONArray entries = qry.getJSONArray("entries");
if (entries == null) {
l.debug("Enumerating reports for project id=" + projectId + " failed.");
throw new GdcProjectAccessException("Enumerating reports for project id=" + projectId + " failed.");
}
for (Object oentry : entries) {
JSONObject entry = (JSONObject) oentry;
int deprecated = entry.getInt("deprecated");
if (deprecated == 0)
list.add(entry.getString("link"));
}
} finally {
qGet.releaseConnection();
}
return list;
}
private String getProjectIdFromObjectUri(String uri) {
Pattern regexp = Pattern.compile("gdc/md/.*?/");
Matcher m = regexp.matcher(uri);
if (m.find()) {
return m.group().split("/")[2];
} else {
l.debug("The passed string '" + uri + "' doesn't have the GoodData URI structure!");
throw new InvalidParameterException("The passed string '" + uri + "' doesn't have the GoodData URI structure!");
}
}
/**
* Computes the metric value
*
* @param metricUri metric URI
* @return the metric value
*/
public double computeMetric(String metricUri) {
l.debug("Computing metric uri=" + metricUri);
double retVal = 0;
String projectId = getProjectIdFromObjectUri(metricUri);
JSONObject reportDefinition = new JSONObject();
JSONObject metric = new JSONObject();
metric.put("alias", "");
metric.put("uri", metricUri);
JSONArray metrics = new JSONArray();
metrics.add(metric);
JSONArray columns = new JSONArray();
columns.add("metricGroup");
JSONObject grid = new JSONObject();
grid.put("metrics", metrics);
grid.put("columns", columns);
grid.put("rows", new JSONArray());
grid.put("columnWidths", new JSONArray());
JSONObject sort = new JSONObject();
sort.put("columns", new JSONArray());
sort.put("rows", new JSONArray());
grid.put("sort", sort);
JSONObject content = new JSONObject();
content.put("grid", grid);
content.put("filters", new JSONArray());
content.put("format", "grid");
reportDefinition.put("content", content);
JSONObject meta = new JSONObject();
meta.put("category", "reportDefinition");
meta.put("title", "N/A");
reportDefinition.put("meta", meta);
MetadataObject obj = new MetadataObject();
obj.put("reportDefinition", reportDefinition);
MetadataObject resp = new MetadataObject(createMetadataObject(projectId, obj));
int retryCnt = Constants.MAX_RETRY;
boolean hasFinished = false;
while (retryCnt-- > 0 && !hasFinished) {
try {
String dataResultUri = executeReportDefinition(resp.getUri());
JSONObject result = getObjectByUri(dataResultUri);
hasFinished = true;
if (result != null && !result.isEmpty() && !result.isNullObject()) {
JSONObject xtabData = result.getJSONObject("xtab_data");
if (xtabData != null && !xtabData.isEmpty() && !xtabData.isNullObject()) {
JSONArray data = xtabData.getJSONArray("data");
if (data != null && !data.isEmpty()) {
retVal = data.getJSONArray(0).getDouble(0);
} else {
l.debug("Can't compute the metric. No data structure in result.");
throw new InvalidParameterException("Can't compute the metric. No data structure in result.");
}
} else {
l.debug("Can't compute the metric. No xtab_data structure in result.");
throw new InvalidParameterException("Can't compute the metric. No xtab_data structure in result.");
}
} else {
l.debug("Can't compute the metric. No result from XTAB.");
throw new InvalidParameterException("Can't compute the metric. No result from XTAB.");
}
} catch (HttpMethodNotFinishedYetException e) {
l.debug("computeMetric: Waiting for DataResult");
try {
Thread.sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
}
}
l.debug("Metric uri=" + metricUri + " computed. Result is " + retVal);
return retVal;
}
/**
* Computes a simple report and returns the report text
*
* @param reportUri report URI
* @return the report rendered in text
*/
public String computeReport(String reportUri) {
l.debug("Computing report uri=" + reportUri);
String retVal = "";
int retryCnt = Constants.MAX_RETRY;
boolean hasFinished = false;
while (retryCnt-- > 0 && !hasFinished) {
try {
String dataResultUri = executeReport(reportUri).getJSONObject("execResult").getString("dataResult");
JSONObject result = getObjectByUri(dataResultUri);
hasFinished = true;
if (result != null && !result.isEmpty() && !result.isNullObject()) {
JSONObject xtabData = result.getJSONObject("xtab_data");
if (xtabData != null && !xtabData.isEmpty() && !xtabData.isNullObject()) {
JSONArray data = xtabData.getJSONArray("data");
if (data != null && !data.isEmpty()) {
double[] values = new double[data.size()];
for (int i = 0; i < data.size(); i++) {
JSONArray vals = data.getJSONArray(i);
values[i] = vals.getDouble(0);
}
JSONObject rows = xtabData.getJSONObject("rows");
if (rows != null && !rows.isEmpty() && !rows.isNullObject()) {
JSONArray lookups = rows.getJSONArray("lookups");
if (lookups != null && !lookups.isEmpty()) {
Map<String, String> attributes = new HashMap<String, String>();
JSONObject lkpData = lookups.getJSONObject(0);
for (Object key : lkpData.keySet()) {
Object value = lkpData.get(key);
if (key != null && value != null)
attributes.put(key.toString(), value.toString());
}
JSONObject tree = rows.getJSONObject("tree");
if (tree != null && !tree.isEmpty() && !tree.isNullObject()) {
Map<String, Integer> indexes = new HashMap<String, Integer>();
JSONObject index = tree.getJSONObject("index");
if (index != null && !index.isEmpty()) {
for (Object key : index.keySet()) {
if (key != null) {
JSONArray valIdxs = index.getJSONArray(key.toString());
if (valIdxs != null && !valIdxs.isEmpty()) {
indexes.put(key.toString(), valIdxs.getInt(0));
}
}
}
JSONArray children = tree.getJSONArray("children");
if (children != null && !children.isEmpty()) {
for (int i = 0; i < children.size(); i++) {
JSONObject c = children.getJSONObject(i);
String id = c.getString("id");
if (id != null && id.length() > 0) {
String attribute = attributes.get(id);
int v = indexes.get(id);
double vl = values[v];
if (retVal.length() > 0) {
retVal += ", " + attribute + " : " + vl;
} else {
retVal += attribute + " : " + vl;
}
} else {
l.debug("Can't compute the report. No id in children.");
throw new InvalidParameterException("Can't compute the report. No id in children.");
}
}
} else {
l.debug("Can't compute the report. No tree structure in result.");
throw new InvalidParameterException("Can't compute the report. No tree structure in result.");
}
} else {
l.debug("Can't compute the report. No index structure in result.");
throw new InvalidParameterException("Can't compute the report. No index structure in result.");
}
} else {
l.debug("Can't compute the report. No tree structure in result.");
throw new InvalidParameterException("Can't compute the report. No tree structure in result.");
}
} else {
l.debug("Can't compute the report. No lookups structure in result.");
throw new InvalidParameterException("Can't compute the report. No lookups structure in result.");
}
} else {
l.debug("Can't compute the report. No rows structure in result.");
throw new InvalidParameterException("Can't compute the report. No rows structure in result.");
}
} else {
l.debug("Can't compute the report. No data structure in result.");
throw new InvalidParameterException("Can't compute the report. No data structure in result.");
}
} else {
l.debug("Can't compute the report. No xtab_data structure in result.");
throw new InvalidParameterException("Can't compute the report. No xtab_data structure in result.");
}
} else {
l.debug("Can't compute the report. No result from XTAB.");
throw new InvalidParameterException("Can't compute the metric. No result from XTAB.");
}
} catch (HttpMethodNotFinishedYetException e) {
l.debug("computeReport: Waiting for DataResult");
try {
Thread.sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
}
}
l.debug("Report uri=" + reportUri + " computed.");
return retVal;
}
/**
* Report definition to execute
*
* @param reportDefUri report definition to execute
*/
public String executeReportDefinition(String reportDefUri) {
l.debug("Executing report definition uri=" + reportDefUri);
PostMethod execPost = createPostMethod(getServerUrl() + EXECUTOR);
JSONObject execDef = new JSONObject();
execDef.put("reportDefinition", reportDefUri);
JSONObject exec = new JSONObject();
exec.put("report_req", execDef);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(exec.toString().getBytes()));
execPost.setRequestEntity(request);
try {
String task = executeMethodOk(execPost);
if (task != null && task.length() > 0) {
JSONObject tr = JSONObject.fromObject(task);
if (tr.isNullObject()) {
l.debug("Executing report definition uri=" + reportDefUri + " failed. Returned invalid result result=" + tr);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri + " failed. " +
"Returned invalid result result=" + tr);
}
JSONObject reportResult = tr.getJSONObject("execResult");
if (reportResult.isNullObject()) {
l.debug("Executing report definition uri=" + reportDefUri + " failed. Returned invalid result result=" + tr);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri + " failed. " +
"Returned invalid result result=" + tr);
}
String dataResult = reportResult.getString("dataResult");
if (dataResult == null || dataResult.length()<=0) {
l.debug("Executing report definition uri=" + reportDefUri + " failed. Returned invalid result result=" + tr);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri + " failed. " +
"Returned invalid result result=" + tr);
}
return dataResult;
} else {
l.debug("Executing report definition uri=" + reportDefUri + " failed. Returned invalid task link uri=" + task);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri +
" failed. Returned invalid task link uri=" + task);
}
} catch (HttpMethodException ex) {
l.debug("Executing report definition uri=" + reportDefUri + " failed.", ex);
throw new GdcRestApiException("Executing report definition uri=" + reportDefUri + " failed.");
} finally {
execPost.releaseConnection();
}
}
/**
* Report to execute.
*
* @return JSON representation of the report result (the "execResult" object including the "execResult" root key)
* @param reportUri report definition to execute
*/
public JSONObject executeReport(String reportUri) {
l.debug("Executing report uri=" + reportUri);
PostMethod execPost = createPostMethod(getServerUrl() + EXECUTOR);
JSONObject execDef = new JSONObject();
execDef.put("report", reportUri);
JSONObject exec = new JSONObject();
exec.put("report_req", execDef);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(exec.toString().getBytes()));
execPost.setRequestEntity(request);
String taskLink = null;
try {
String task = executeMethodOk(execPost);
if (task != null && task.length() > 0) {
JSONObject tr = JSONObject.fromObject(task);
if (tr.isNullObject()) {
l.debug("Executing report uri=" + reportUri + " failed. Returned invalid result=" + tr);
throw new GdcRestApiException("Executing report uri=" + reportUri + " failed. " +
"Returned invalid result result=" + tr);
}
JSONObject reportResult = tr.getJSONObject("execResult");
if (reportResult.isNullObject()) {
l.debug("Executing report uri=" + reportUri + " failed. Returned invalid result=" + tr);
throw new GdcRestApiException("Executing report uri=" + reportUri + " failed. " +
"Returned invalid result result=" + tr);
}
String dataResult = reportResult.getString("dataResult");
if (dataResult == null || dataResult.length()<=0) {
l.debug("Executing report uri=" + reportUri + " failed. Returned invalid dataResult=" + tr);
throw new GdcRestApiException("Executing report uri=" + reportUri + " failed. " +
"Returned invalid dataResult=" + tr);
}
return tr;
} else {
l.debug("Executing report uri=" + reportUri + " failed. Returned invalid task link uri=" + task);
throw new GdcRestApiException("Executing report uri=" + reportUri +
" failed. Returned invalid task link uri=" + task);
}
} catch (HttpMethodException ex) {
l.debug("Executing report uri=" + reportUri + " failed.", ex);
throw new GdcRestApiException("Executing report uri=" + reportUri + " failed.");
} finally {
execPost.releaseConnection();
}
}
/**
* Export a report result
*
* @param execResult object returned by the {@link #executeReport(String)} method
* @param format export format (pdf | xls | png | csv)
*/
public byte[] exportReportResult(JSONObject execResult, String format) {
String resultUri = execResult.getJSONObject("execResult").getString("dataResult");
l.debug("Exporting report result uri=" + resultUri);
PostMethod execPost = createPostMethod(getServerUrl() + EXPORT_EXECUTOR);
JSONObject execDef = new JSONObject();
execDef.put("result", execResult);
execDef.put("format", format);
JSONObject exec = new JSONObject();
exec.put("result_req", execDef);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(exec.toString().getBytes()));
execPost.setRequestEntity(request);
String taskLink = null;
try {
String task = executeMethodOk(execPost);
if (task != null && task.length() > 0) {
JSONObject tr = JSONObject.fromObject(task);
if (tr.isNullObject()) {
l.debug("Exporting report result uri=" + resultUri + " failed. Returned invalid result=" + tr);
throw new GdcRestApiException("Exporting report result uri=" + resultUri + " failed. " +
"Returned invalid result=" + tr);
}
String uri = tr.getString("uri");
if (uri != null && uri.length() > 0) {
return getReportResult(uri);
} else {
l.debug("Exporting report result uri=" + resultUri + " failed. Returned invalid result=" + tr);
throw new GdcRestApiException("Exporting report result uri=" + resultUri + " failed. " +
"Returned invalid result=" + tr);
}
} else {
l.debug("Exporting report result uri=" + resultUri + " failed. Returned invalid task link uri=" + task);
throw new GdcRestApiException("Exporting report result uri=" + resultUri +
" failed. Returned invalid task link uri=" + task);
}
} catch (HttpMethodException ex) {
l.debug("Exporting report result uri=" + resultUri + " failed.", ex);
throw new GdcRestApiException("Exporting report result uri=" + resultUri + " failed.");
} finally {
execPost.releaseConnection();
}
}
/**
* Retrieves the report export result
*
* @param uri the export result
* @return attribute object
*/
public byte[] getReportResult(String uri) {
l.debug("Retrieving export result uri=" + uri);
byte[] buf = null;
String qUri = getServerUrl() + uri;
boolean finished = false;
do {
HttpMethod qGet = createGetMethod(qUri);
try {
executeMethodOkOnly(qGet);
finished = true;
buf = qGet.getResponseBody();
} catch (HttpMethodNotFinishedYetException e) {
l.debug("Waiting for exporter to finish.");
try {
Thread.currentThread().sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
} catch (IOException e) {
l.debug("Network error during the report result export.", e);
throw new GdcRestApiException("Network error during the report result export.", e);
} finally {
qGet.releaseConnection();
}
} while (!finished);
return buf;
}
/**
* Kicks the GDC platform to inform it that the FTP transfer is finished.
*
* @param projectId the project's ID
* @param remoteDir the remote (FTP) directory that contains the data
* @return the link that is used for polling the loading progress
* @throws GdcRestApiException
*/
public String startLoading(String projectId, String remoteDir) throws GdcRestApiException {
l.debug("Initiating data load project id=" + projectId + " remoteDir=" + remoteDir);
PostMethod pullPost = createPostMethod(getProjectMdUrl(projectId) + PULL_URI);
JSONObject pullStructure = getPullStructure(remoteDir);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(pullStructure.toString().getBytes()));
pullPost.setRequestEntity(request);
String taskLink = null;
try {
String response = executeMethodOk(pullPost);
JSONObject responseObject = JSONObject.fromObject(response);
taskLink = responseObject.getJSONObject("pullTask").getString("uri");
} catch (HttpMethodException ex) {
throw new GdcRestApiException("Loading fails: " + ex.getMessage());
} finally {
pullPost.releaseConnection();
}
l.debug("Data load project id=" + projectId + " remoteDir=" + remoteDir + " initiated. Status is on uri=" + taskLink);
return taskLink;
}
/**
* Returns the pull API JSON structure
*
* @param directory the remote directory
* @return the pull API JSON structure
*/
private JSONObject getPullStructure(String directory) {
JSONObject pullStructure = new JSONObject();
pullStructure.put("pullIntegration", directory);
return pullStructure;
}
/**
* Checks if the loading is finished
*
* @param link the link returned from the start loading
* @return the loading status
*/
public String getLoadingStatus(String link) throws HttpMethodException {
l.debug("Getting data loading status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = executeMethodOk(ptm);
JSONObject task = JSONObject.fromObject(response);
String status = task.getString("taskStatus");
l.debug("Loading status=" + status);
return status;
} finally {
ptm.releaseConnection();
}
}
/**
* Create a new GoodData project
*
* @param name project name
* @param desc project description
* @param templateUri project template uri
* @param driver underlying database driver
* @param accessToken access token
* @return the project Id
* @throws GdcRestApiException
*/
public String createProject(String name, String desc, String templateUri, String driver, String accessToken) throws GdcRestApiException {
l.debug("Creating project name=" + name);
PostMethod createProjectPost = createPostMethod(getServerUrl() + PROJECTS_URI);
JSONObject createProjectStructure = getCreateProject(name, desc, templateUri, driver, accessToken);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
createProjectStructure.toString().getBytes()));
createProjectPost.setRequestEntity(request);
String uri = null;
try {
String response = executeMethodOk(createProjectPost);
JSONObject responseObject = JSONObject.fromObject(response);
uri = responseObject.getString("uri");
} catch (HttpMethodException ex) {
l.debug("Creating project fails: ", ex);
throw new GdcRestApiException("Creating project fails: ", ex);
} finally {
createProjectPost.releaseConnection();
}
if (uri != null && uri.length() > 0) {
String id = getProjectId(uri);
l.debug("Created project id=" + id);
return id;
}
l.debug("Error creating project.");
throw new GdcRestApiException("Error creating project.");
}
/**
* Returns the create project JSON structure
*
* @param name project name
* @param desc project description
* @param templateUri project template uri
* @param driver underlying database driver
* @param accessToken access token
* @return the create project JSON structure
*/
private JSONObject getCreateProject(String name, String desc, String templateUri, String driver, String accessToken) {
JSONObject meta = new JSONObject();
meta.put("title", name);
meta.put("summary", desc);
if (templateUri != null && templateUri.length() > 0) {
meta.put("projectTemplate", templateUri);
}
JSONObject content = new JSONObject();
//content.put("state", "ENABLED");
content.put("guidedNavigation", "1");
if(driver != null && driver.length()>0) {
content.put("driver", driver);
}
if(accessToken != null && accessToken.length()>0) {
content.put("authorizationToken", accessToken);
}
JSONObject project = new JSONObject();
project.put("meta", meta);
project.put("content", content);
JSONObject createStructure = new JSONObject();
createStructure.put("project", project);
return createStructure;
}
/**
* Returns the project status
*
* @param id project ID
* @return current project status
*/
public String getProjectStatus(String id) {
l.debug("Getting project status for project " + id);
HttpMethod req = createGetMethod(getServerUrl() + PROJECTS_URI + "/" + id);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
JSONObject project = parsedResp.getJSONObject("project");
JSONObject content = project.getJSONObject("content");
String state = content.getString("state");
return state;
} catch (HttpMethodException e) {
l.debug("The project id=" + id + " doesn't exists.");
throw new GdcProjectAccessException("The project id=" + id + " doesn't exists.");
} finally {
req.releaseConnection();
}
}
/**
* Drops a GoodData project
*
* @param projectId project id
* @throws GdcRestApiException
*/
public void dropProject(String projectId) throws GdcRestApiException {
l.debug("Dropping project id=" + projectId);
DeleteMethod dropProjectDelete = createDeleteMethod(getServerUrl() + PROJECTS_URI + "/"+projectId);
try {
executeMethodOk(dropProjectDelete);
} catch (HttpMethodException ex) {
l.debug("Dropping project id=" + projectId + " failed.", ex);
throw new GdcRestApiException("Dropping project id=" + projectId + " failed.", ex);
} finally {
dropProjectDelete.releaseConnection();
}
l.debug("Dropped project id=" + projectId);
}
/**
* Retrieves the project id from the URI returned by the create project
*
* @param uri the create project URI
* @return project id
* @throws GdcRestApiException in case the project doesn't exist
*/
protected String getProjectId(String uri) throws GdcRestApiException {
l.debug("Getting project id by uri=" + uri);
if (uri != null && uri.length() > 0) {
String[] cs = uri.split("/");
if (cs != null && cs.length > 0) {
l.debug("Got project id=" + cs[cs.length - 1] + " by uri=" + uri);
return cs[cs.length - 1];
}
}
l.debug("Can't get project from " + uri);
throw new GdcRestApiException("Can't get project from " + uri);
}
/**
* Executes the MAQL and creates/modifies the project's LDM
*
* @param projectId the project's ID
* @param maql String with the MAQL statements
* @return result String
* @throws GdcRestApiException
*/
public String[] executeMAQL(String projectId, String maql) throws GdcRestApiException {
l.debug("Executing MAQL projectId=" + projectId + " MAQL:\n" + maql);
PostMethod maqlPost = createPostMethod(getProjectMdUrl(projectId) + MAQL_EXEC_URI);
JSONObject maqlStructure = getMAQLExecStructure(maql);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
maqlStructure.toString().getBytes()));
maqlPost.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(maqlPost);
JSONObject responseObject = JSONObject.fromObject(response);
JSONArray uris = responseObject.getJSONArray("uris");
return (String[]) uris.toArray(new String[]{""});
} catch (HttpMethodException ex) {
l.debug("MAQL execution: ", ex);
throw new GdcRestApiException("MAQL execution: " + ex.getMessage(), ex);
} finally {
maqlPost.releaseConnection();
}
}
/**
* Executes the MAQL and creates/modifies the project's LDM asynchronously
*
* @param projectId the project's ID
* @param maql String with the MAQL statements
* @return result String
* @throws GdcRestApiException
*/
public void executeMAQLAsync(String projectId, String maql) throws GdcRestApiException {
l.debug("Executing async MAQL projectId=" + projectId + " MAQL:\n" + maql);
PostMethod maqlPost = createPostMethod(getProjectMdUrl(projectId) + MAQL_ASYNC_EXEC_URI);
JSONObject maqlStructure = getMAQLExecStructure(maql);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
maqlStructure.toString().getBytes()));
maqlPost.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(maqlPost);
JSONObject responseObject = JSONObject.fromObject(response);
JSONArray uris = responseObject.getJSONArray("entries");
String taskmanUri = "";
for(Object ouri : uris) {
JSONObject uri = (JSONObject)ouri;
String category = uri.getString("category");
if(category.equals("tasks-status")) {
taskmanUri = uri.getString("link");
}
}
if(taskmanUri != null && taskmanUri.length()>0) {
l.debug("Checking async MAQL DDL execution status.");
TaskmanStatus status = new TaskmanStatus("",new String[]{});
while (!"OK".equalsIgnoreCase(status.getStatus()) && !"ERROR".equalsIgnoreCase(status.getStatus()) &&
!"WARNING".equalsIgnoreCase(status.getStatus())) {
status = getDetailedTaskManStatus(taskmanUri);
l.debug("Async MAQL DDL status = " + status.getStatus());
Thread.sleep(Constants.POLL_INTERVAL);
}
l.info("Async MAQL DDL finished with status " + status.getStatus());
if (!("OK".equalsIgnoreCase(status.getStatus()) || "WARNING".equalsIgnoreCase(status.getStatus()))) {
String[] messages = status.getMessage();
String message = "";
for(String msg : messages) {
if(message.length()>0) message += "\n";
message += msg;
}
throw new GdcRestApiException("Async MAQL execution failed with status "+status.getStatus() +
". Errors: "+message);
}
}
} catch (HttpMethodException ex) {
l.debug("MAQL execution: ", ex);
throw new GdcRestApiException("MAQL execution: " + ex.getMessage(), ex);
} catch (InterruptedException e) {
throw new InternalErrorException(e);
} finally {
maqlPost.releaseConnection();
}
}
public static class ProjectExportResult {
private String taskUri;
private String exportToken;
public String getTaskUri() {
return taskUri;
}
public void setTaskUri(String taskUri) {
this.taskUri = taskUri;
}
public String getExportToken() {
return exportToken;
}
public void setExportToken(String exportToken) {
this.exportToken = exportToken;
}
}
/**
* Exports the project
*
* @param projectId the project's ID
* @param exportUsers flag
* @param exportData flag
* @param authorizedUsers list of authorized users
* @return result the taskUri and the export token
* @throws GdcRestApiException
*/
public ProjectExportResult exportProject(String projectId, boolean exportUsers, boolean exportData, String[] authorizedUsers)
throws GdcRestApiException {
l.debug("Exporting project projectId=" + projectId + " users:" + exportUsers + " data:" + exportData + " authorized users:" +
authorizedUsers);
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + PROJECT_EXPORT_URI);
JSONObject param = getProjectExportStructure(exportUsers, exportData, authorizedUsers);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
ProjectExportResult result = null;
try {
String response = executeMethodOk(req);
result = new ProjectExportResult();
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject exportArtifact = responseObject.getJSONObject("exportArtifact");
JSONObject status = exportArtifact.getJSONObject("status");
result.setTaskUri(status.getString("uri"));
result.setExportToken(exportArtifact.getString("token"));
return result;
} catch (HttpMethodException ex) {
l.debug("Error exporting project", ex);
throw new GdcRestApiException("Error exporting project", ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getProjectExportStructure(boolean exportUsers, boolean exportData, String[] authorizedUsers) {
JSONObject param = new JSONObject();
JSONObject exportProject = new JSONObject();
exportProject.put("exportUsers", (exportUsers) ? (1) : (0));
exportProject.put("exportData", (exportData) ? (1) : (0));
if (authorizedUsers != null && authorizedUsers.length > 0) {
JSONArray aUsers = new JSONArray();
aUsers.addAll(Arrays.asList(authorizedUsers));
exportProject.put("authorizedUsers", aUsers);
}
param.put("exportProject", exportProject);
return param;
}
private GdcRole getRoleFromUri(String roleUri) {
l.debug("Getting role from uri: " + roleUri);
HttpMethod req = createGetMethod( getServerUrl() + roleUri);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getRoleFromUri for uri " + roleUri + ". Invalid response.");
throw new GdcRestApiException("Can't getRoleFromUri for uri " + roleUri + ". Invalid response.");
}
return new GdcRole(parsedResp);
} catch (HttpMethodException ex) {
l.debug("Error getRoleFromUri.", ex);
throw new GdcRestApiException("Error getRoleFromUri", ex);
} finally {
req.releaseConnection();
}
}
private GdcUser getUserFromUri(String userUri) {
l.debug("Getting user from uri: " + userUri);
HttpMethod req = createGetMethod( getServerUrl() + userUri);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getUserFromUri for uri " + userUri + ". Invalid response.");
throw new GdcRestApiException("Can't getUserFromUri for uri " + userUri + ". Invalid response.");
}
return new GdcUser(parsedResp);
} catch (HttpMethodException ex) {
l.debug("Error getUserFromUri.", ex);
throw new GdcRestApiException("Error getUserFromUri", ex);
} finally {
req.releaseConnection();
}
}
public static class GdcRole{
private String name;
private String identifier;
private String uri;
public GdcRole() {
}
public GdcRole(JSONObject role) {
if (role == null || role.isEmpty() || role.isNullObject()) {
throw new GdcRestApiException("Can't extract role from JSON. The JSON is empty.");
}
JSONObject pr = role.getJSONObject("projectRole");
if (pr == null || pr.isEmpty() || pr.isNullObject()) {
throw new GdcRestApiException("Can't extract role from JSON. No projectRole key in the JSON.");
}
JSONObject m = pr.getJSONObject("meta");
if (m == null || m.isEmpty() || m.isNullObject()) {
throw new GdcRestApiException("Can't extract role from JSON. No meta key in the JSON.");
}
JSONObject l = pr.getJSONObject("links");
if (l == null || l.isEmpty() || l.isNullObject()) {
throw new GdcRestApiException("Can't extract role from JSON. No links key in the JSON.");
}
String title = m.getString("title");
if (title == null || title.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract user from JSON. No email key in the JSON.");
}
this.setName(title);
String u = l.getString("roleUsers");
if (u == null || u.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract role from JSON. No roleUsers key in the JSON.");
}
this.setUri(u.replace(USERS_URI,""));
String i = m.getString("identifier");
if (i == null || i.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract user from JSON. No email key in the JSON.");
}
this.setIdentifier(i);
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getIdentifier() {
return identifier;
}
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
public boolean validate() {
if (getName() != null && getIdentifier().length() > 0 && getUri() != null) // email is not mandatory
return true;
return false;
}
}
public static class GdcUser {
private String login;
private String email;
private String licence;
private String firstName;
private String lastName;
private String companyName;
private String position;
private String timezone;
private String country;
private String phoneNumber;
private String password;
private String verifyPassword;
private String ssoProvider;
private String status;
private String uri;
public GdcUser() {
}
public GdcUser(JSONObject user) {
if (user == null || user.isEmpty() || user.isNullObject()) {
throw new GdcRestApiException("Can't extract user from JSON. The JSON is empty.");
}
JSONObject u = user.getJSONObject("user");
if (u == null || u.isEmpty() || u.isNullObject()) {
throw new GdcRestApiException("Can't extract user from JSON. No user key in the JSON.");
}
JSONObject c = u.getJSONObject("content");
if (c == null || c.isEmpty() || c.isNullObject()) {
throw new GdcRestApiException("Can't extract user from JSON. No content key in the JSON.");
}
String v = c.getString("email");
if (v == null || v.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract user from JSON. No email key in the JSON.");
}
this.setLogin(v);
v = c.getString("firstname");
if (v != null && v.trim().length() > 0) {
this.setFirstName(v);
}
v = c.getString("lastname");
if (v != null && v.trim().length() > 0) {
this.setLastName(v);
}
v = c.getString("email");
if (v != null && v.trim().length() > 0) {
this.setEmail(v);
}
v = c.getString("phonenumber");
if (v != null && v.trim().length() > 0) {
this.setPhoneNumber(v);
}
v = c.getString("status");
if (v != null && v.trim().length() > 0) {
this.setStatus(v);
}
JSONObject l = u.getJSONObject("links");
if (l == null || l.isEmpty() || l.isNullObject()) {
throw new GdcRestApiException("Can't extract user from JSON. No links key in the JSON.");
}
v = l.getString("self");
if (v == null || v.trim().length() <= 0) {
throw new GdcRestApiException("Can't extract user from JSON. No self key in the JSON.");
}
this.setUri(v);
}
public boolean validate() {
if (getLogin() != null && getLogin().length() > 0 && getPassword() != null
&& getPassword().length() > 0 && getVerifyPassword() != null
&& getVerifyPassword().length() > 0 && getFirstName() != null
&& getFirstName().length() > 0 && getLastName() != null
&& getLastName().length() > 0) // email is not mandatory
return true;
return false;
}
public String getLogin() {
return login;
}
public void setLogin(String login) {
this.login = login;
}
public String getUri() {
return uri;
}
public void setUri(String u) {
this.uri = u;
}
public String getStatus() {
return status;
}
public void setStatus(String s) {
this.status = s;
}
public String getLicence() {
return licence;
}
public void setLicence(String licence) {
this.licence = licence;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getCompanyName() {
return companyName;
}
public void setCompanyName(String companyName) {
this.companyName = companyName;
}
public String getPosition() {
return position;
}
public void setPosition(String position) {
this.position = position;
}
public String getTimezone() {
return timezone;
}
public void setTimezone(String timezone) {
this.timezone = timezone;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public String getPhoneNumber() {
return phoneNumber;
}
public void setPhoneNumber(String phoneNumber) {
this.phoneNumber = phoneNumber;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getVerifyPassword() {
return verifyPassword;
}
public void setVerifyPassword(String verifyPassword) {
this.verifyPassword = verifyPassword;
}
public String getSsoProvider() {
return ssoProvider;
}
public void setSsoProvider(String ssoProvider) {
this.ssoProvider = ssoProvider;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
@Override
public String toString() {
return "DWGdcUser [getLogin()=" + getLogin() + ", getUri()=" + getUri() + ", getStatus()=" + getStatus()
+ ", getLicence()=" + getLicence() + ", getFirstName()=" + getFirstName() + ", getLastName()="
+ getLastName() + ", getCompanyName()=" + getCompanyName() + ", getPosition()=" + getPosition()
+ ", getTimezone()=" + getTimezone() + ", getCountry()=" + getCountry() + ", getPhoneNumber()="
+ getPhoneNumber() + ", getPassword()=" + getPassword() + ", getVerifyPassword()="
+ getVerifyPassword() + ", getEmail()=" + getEmail() + "," + " getSsoProvider()=" + getSsoProvider() + "]";
}
}
/**
* Create a new user
*
* @param domain the domain where the user is going to be created
* @param user new user data
* @return the new user's URI
* @throws GdcRestApiException
*/
public String createUser(String domain, GdcUser user)
throws GdcRestApiException {
if (user != null && user.validate()) {
l.debug("Creating new user " + user.getLogin() + " in domain " + domain);
PostMethod req = createPostMethod(getServerUrl() + DOMAIN_URI + "/" + domain + DOMAIN_USERS_SUFFIX);
JSONObject param = getCreateUserStructure(user);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
result = responseObject.getString("uri");
return result;
} catch (HttpMethodException ex) {
l.debug("Error creating user ", ex);
throw new GdcRestApiException("Error creating user ", ex);
} finally {
req.releaseConnection();
}
} else {
throw new InvalidParameterException("The new user must contain valid login, firstName, lastName, and password fields.");
}
}
private JSONObject getCreateUserStructure(GdcUser user) {
JSONObject param = new JSONObject();
JSONObject accountSetting = new JSONObject();
accountSetting.put("login", user.getLogin());
accountSetting.put("password", user.getPassword());
accountSetting.put("verifyPassword", user.getVerifyPassword());
accountSetting.put("firstName", user.getFirstName());
accountSetting.put("lastName", user.getLastName());
if (user.getCompanyName() != null && user.getCompanyName().length() > 0)
accountSetting.put("companyName", user.getCompanyName());
if (user.getPosition() != null && user.getPosition().length() > 0)
accountSetting.put("position", user.getPosition());
if (user.getCountry() != null && user.getCountry().length() > 0)
accountSetting.put("country", user.getCountry());
if (user.getTimezone() != null && user.getTimezone().length() > 0)
accountSetting.put("timezone", user.getTimezone());
else
accountSetting.put("timezone", null);
if (user.getPhoneNumber() != null && user.getPhoneNumber().length() > 0)
accountSetting.put("phoneNumber", user.getPhoneNumber());
if (user.getSsoProvider() != null && user.getSsoProvider().length() > 0)
accountSetting.put("ssoProvider", user.getSsoProvider());
if (user.getEmail() != null && user.getEmail().length() > 0)
accountSetting.put("email", user.getEmail());
param.put("accountSetting", accountSetting);
return param;
}
private String getRoleUri(String projectId, String role) {
String roleUri = null;
// for backward compatibility
if(ROLES.containsKey(role.toUpperCase())) {
role = ROLES.get(role.toUpperCase());
}
List<GdcRole> roles = getProjectRoles(projectId);
for(GdcRole r : roles) {
String identifier = r.getIdentifier();
if(identifier.equalsIgnoreCase(role)) {
roleUri = r.getUri();
}
}
return roleUri;
}
/**
* Create a new user
*
* @param projectId project ID
* @param uris user URIs
* @param role user's role
* @return the new user's URI
* @throws GdcRestApiException
*/
public void addUsersToProject(String projectId, List<String> uris, String role)
throws GdcRestApiException {
l.debug("Adding users " + uris + " to project " + projectId + " in role "+ role);
String projectsUrl = getProjectUrl(projectId);
String roleUri = getRoleUri(projectId, role);
addUsersToProjectWithRoleUri(projectId, uris, roleUri);
}
public void addUsersToProjectWithRoleUri(String projectId, List<String> uris, String roleUri)
throws GdcRestApiException {
l.debug("Adding users " + uris + " to project " + projectId + " with roleUri "+ roleUri);
String projectsUrl = getProjectUrl(projectId);
PostMethod req = createPostMethod(projectsUrl + PROJECT_USERS_SUFFIX);
JSONObject param = getAddUsersToProjectStructure(uris, roleUri);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject projectUsersUpdateResult = responseObject.getJSONObject("projectUsersUpdateResult");
JSONArray failed = projectUsersUpdateResult.getJSONArray("failed");
if (!failed.isEmpty()) {
String errMsg = "Following users can't be added to the project:";
for (Object uri : failed.toArray()) {
errMsg += " " + uris.toString();
}
l.debug(errMsg);
throw new GdcRestApiException(errMsg);
}
//JSONArray successful = projectUsersUpdateResult.getJSONArray("successful");
} catch (HttpMethodException ex) {
l.debug("Error adding users " + uris + " to project", ex);
throw new GdcRestApiException("Error adding users " + uris + " to project ", ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getAddUsersToProjectStructure(List<String> uris, String roleUri) {
JSONObject param = new JSONObject();
JSONArray users = new JSONArray();
JSONArray roles = null;
if (roleUri != null && roleUri.trim().length() > 0) {
roles = new JSONArray();
roles.add(roleUri);
}
for (String uri : uris) {
JSONObject user = new JSONObject();
JSONObject content = new JSONObject();
if (roles != null)
content.put("userRoles", roles);
content.put("status", "ENABLED");
user.put("content", content);
JSONObject links = new JSONObject();
links.put("self", uri);
user.put("links", links);
JSONObject item = new JSONObject();
item.put("user", user);
users.add(item);
}
param.put("users", users);
return param;
}
/**
* Disables a user in project
*
* @param projectId project ID
* @param uris user URIs
* @throws GdcRestApiException
*/
public void disableUsersInProject(String projectId, List<String> uris)
throws GdcRestApiException {
l.debug("Disabling users " + uris + " in project " + projectId);
String projectsUrl = getProjectUrl(projectId);
PostMethod req = createPostMethod(projectsUrl + PROJECT_USERS_SUFFIX);
JSONObject param = getDisableUsersInProjectStructure(uris);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject projectUsersUpdateResult = responseObject.getJSONObject("projectUsersUpdateResult");
JSONArray failed = projectUsersUpdateResult.getJSONArray("failed");
if (!failed.isEmpty()) {
String errMsg = "Following users can't be disabled in the project:";
for (Object uri : failed.toArray()) {
errMsg += " " + uris.toString();
}
l.debug(errMsg);
throw new GdcRestApiException(errMsg);
}
//JSONArray successful = projectUsersUpdateResult.getJSONArray("successful");
} catch (HttpMethodException ex) {
l.debug("Error disabling users " + uris + " in project", ex);
throw new GdcRestApiException("Error disabling users " + uris + " in project ", ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getDisableUsersInProjectStructure(List<String> uris) {
JSONObject param = new JSONObject();
JSONArray users = new JSONArray();
for (String uri : uris) {
JSONObject user = new JSONObject();
JSONObject content = new JSONObject();
content.put("status", "DISABLED");
user.put("content", content);
JSONObject links = new JSONObject();
links.put("self", uri);
user.put("links", links);
JSONObject item = new JSONObject();
item.put("user", user);
users.add(item);
}
param.put("users", users);
return param;
}
/**
* Returns the selected project's roles
*
* @param pid project ID
* @return array of the project's users
*/
public ArrayList<GdcRole> getProjectRoles(String pid) {
ArrayList<GdcRole> ret = new ArrayList<GdcRole>();
l.debug("Executing getProjectRoles for project id=" + pid);
HttpMethod req = createGetMethod(getProjectUrl(pid) + ROLES_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getProjectRoles for project id=" + pid + ". Invalid response.");
throw new GdcRestApiException("Can't getProjectRoles for project id=" + pid + ". Invalid response.");
}
JSONObject projectRoles = parsedResp.getJSONObject("projectRoles");
if (projectRoles == null || projectRoles.isNullObject() || projectRoles.isEmpty()) {
l.debug("Can't getProjectRoles for project id=" + pid + ". No projectRoles key in the response.");
throw new GdcRestApiException("Can't getProjectRoles for project id=" + pid + ". No projectRoles key in the response.");
}
JSONArray roles = projectRoles.getJSONArray("roles");
if (roles == null) {
l.debug("Can't getRoleUsers. No getProjectRoles key in the response.");
throw new GdcRestApiException("Can't getProjectRoles. No roles key in the response.");
}
for (Object o : roles) {
String role = (String) o;
GdcRole g = getRoleFromUri(role);
ret.add(g);
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Returns the selected project's roles
*
* @return array of the project's users
*/
public ArrayList<String> getRoleUsers(GdcRole role, boolean activeUsersOnly) {
ArrayList<String> ret = new ArrayList<String>();
if(role == null || role.getIdentifier() == null || role.getIdentifier().length() == 0 || role.getUri() == null
|| role.getUri().length() == 0 || role.getName() == null || role.getName().length() == 0) {
l.debug("Can't getRoleUsers . Invalid role object passed.");
throw new GdcRestApiException("Can't getRoleUsers. Invalid role object passed.");
}
l.debug("Executing getRoleUsers for role "+role.getIdentifier());
HttpMethod req = createGetMethod(getServerUrl() + role.getUri() + USERS_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getRoleUsers. Invalid response.");
throw new GdcRestApiException("Can't getRoleUsers. Invalid response.");
}
JSONObject associatedUsers = parsedResp.getJSONObject("associatedUsers");
if (associatedUsers == null || associatedUsers.isNullObject() || associatedUsers.isEmpty()) {
l.debug("Can't getRoleUsers. Invalid response. No associatedUsers key.");
throw new GdcRestApiException("Can't getRoleUsers. Invalid response. No associatedUsers key.");
}
JSONArray users = associatedUsers.getJSONArray("users");
if (users == null) {
l.debug("Can't getRoleUsers. No users key in the response.");
throw new GdcRestApiException("Can't getRoleUsers. No users key in the response.");
}
for (Object o : users) {
String user = (String) o;
ret.add(user);
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Returns the selected project's users
*
* @param pid project ID
* @param activeUsersOnly lists only active users
* @return array of the project's users
*/
public ArrayList<GdcUser> getProjectUsers(String pid, boolean activeUsersOnly) {
ArrayList<GdcUser> ret = new ArrayList<GdcUser>();
l.debug("Executing getProjectUsers for project id=" + pid);
HttpMethod req = createGetMethod(getProjectUrl(pid) + PROJECT_USERS_SUFFIX);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't getProjectUsers for project id=" + pid + ". Invalid response.");
throw new GdcRestApiException("Can't getProjectUsers for project id=" + pid + ". Invalid response.");
}
JSONArray users = parsedResp.getJSONArray("users");
if (users == null) {
l.debug("Can't getProjectUsers for project id=" + pid + ". No users key in the response.");
throw new GdcRestApiException("Can't getProjectUsers for project id=" + pid + ". No users key in the response.");
}
for (Object o : users) {
JSONObject user = (JSONObject) o;
GdcUser g = new GdcUser(user);
if ((activeUsersOnly && "ENABLED".equalsIgnoreCase(g.getStatus())) || (!activeUsersOnly)) {
ret.add(g);
}
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Imports the project
*
* @param projectId the project's ID
* @param token export token
* @return result the taskUri
* @throws GdcRestApiException
*/
public String importProject(String projectId, String token)
throws GdcRestApiException {
l.debug("Importing project projectId=" + projectId + " token:" + token);
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + PROJECT_IMPORT_URI);
JSONObject param = getImportProjectStructure(token);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
result = responseObject.getString("uri");
return result;
} catch (HttpMethodException ex) {
l.debug("Error importing project", ex);
throw new GdcRestApiException("Error importing project", ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getImportProjectStructure(String token) {
JSONObject param = new JSONObject();
JSONObject importProject = new JSONObject();
importProject.put("token", token);
param.put("importProject", importProject);
return param;
}
/**
* Imports a MD object to the project
*
* @param projectId the project's ID
* @param token export token
* @param overwrite overwrite existing objects
* @param updateLDM update LDM names, descriptions and tags
* @return result the taskUri
* @throws GdcRestApiException
*/
public String importMD(String projectId, String token, boolean overwrite, boolean updateLDM)
throws GdcRestApiException {
l.debug("Importing metadata objects for projectId=" + projectId + " token:" + token);
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + PROJECT_PARTIAL_IMPORT_URI);
JSONObject param = getMDImportStructure(token, overwrite, updateLDM);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(response);
result = responseObject.getString("uri");
return result;
} catch (HttpMethodException ex) {
l.debug("Error importing metadata objects for projectId=" + projectId + " token:" + token, ex);
throw new GdcRestApiException("Error importing metadata objects for projectId=" + projectId + " token:" + token, ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getMDImportStructure(String token, boolean overwrite, boolean updateLDM) {
JSONObject param = new JSONObject();
JSONObject importMD = new JSONObject();
importMD.put("token", token);
importMD.put("overwriteNewer", (overwrite) ? (1) : (0));
importMD.put("updateLDMObjects", (updateLDM) ? (1) : (0));
param.put("partialMDImport", importMD);
return param;
}
/**
* Exports selected MD object with dependencies from the project
*
* @param projectId the project's ID
* @param ids - list of the exported MD objects IDs
* @return result the taskUri and the export token
* @throws GdcRestApiException
*/
public ProjectExportResult exportMD(String projectId, List<Integer> ids)
throws GdcRestApiException {
l.debug("Exporting metadata objects with IDs " + ids + " from project " + projectId);
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + PROJECT_PARTIAL_EXPORT_URI);
JSONObject param = getMDExportStructure(projectId, ids);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
param.toString().getBytes()));
req.setRequestEntity(request);
ProjectExportResult result = null;
try {
String response = executeMethodOk(req);
result = new ProjectExportResult();
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject exportArtifact = responseObject.getJSONObject("partialMDArtifact");
JSONObject status = exportArtifact.getJSONObject("status");
result.setTaskUri(status.getString("uri"));
result.setExportToken(exportArtifact.getString("token"));
return result;
} catch (HttpMethodException ex) {
l.debug("Error exporting metadata objects with IDs " + ids + " from project " + projectId, ex);
throw new GdcRestApiException("Error exporting metadata objects with IDs " + ids + " from project " + projectId, ex);
} finally {
req.releaseConnection();
}
}
private JSONObject getMDExportStructure(String projectId, List<Integer> ids) {
JSONObject param = new JSONObject();
String puri = "/gdc/md/" + projectId;
JSONObject partialMDExport = new JSONObject();
JSONArray uris = new JSONArray();
for (Integer id : ids) {
uris.add(puri + "/obj/" + id);
}
partialMDExport.put("uris", uris);
param.put("partialMDExport", partialMDExport);
return param;
}
/**
* Checks if the migration is finished
*
* @param link the link returned from the start loading
* @return the loading status
*/
public String getMigrationStatus(String link) throws HttpMethodException {
l.debug("Getting project migration status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = executeMethodOk(ptm);
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("taskState");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
l.debug("Migration status=" + status);
return status;
} else {
l.debug("No taskState structure in the migration status!");
throw new GdcRestApiException("No taskState structure in the migration status!");
}
} finally {
ptm.releaseConnection();
}
}
/**
* Executes the MAQL and creates/modifies the project's LDM
*
* @param projectId the project's ID
* @param maql String with the MAQL statements
* @return result {@link GraphExecutionResult}
* @throws GdcRestApiException
*/
public GraphExecutionResult executeGraph(String processUri, String graph, Map<String,String> params) throws GdcRestApiException {
l.debug("Executing Graph processUri=" + processUri + " graph = " + graph);
PostMethod execPost = createPostMethod(processUri + "/executions");
JSONObject execStructure = getGraphExecStructure(graph, params);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
execStructure.toString().getBytes()));
execPost.setRequestEntity(request);
try {
String response = executeMethodOk(execPost);
JSONObject responseObject = JSONObject.fromObject(response);
String detailUri = responseObject.getJSONObject("executionTask").getJSONObject("links").getString("detail");
GraphExecutionResult execResult = getGraphExecutionResult(detailUri);
if (!GraphExecutionResult.OK.equals(execResult.getStatus())) {
throw new GdcRestApiException("ETL error, see log file at " + execResult.logUrl);
}
return execResult;
} finally {
execPost.releaseConnection();
}
}
/**
* Checks if the migration is finished
*
* @param link the link returned from the start loading
* @return the loading status
*/
public GraphExecutionResult getGraphExecutionResult(String link) throws HttpMethodException {
l.debug("Getting Graph execution status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = "";
while (true) {
response = executeMethodOk(ptm);
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("executionDetail");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
l.debug("TaskMan status=" + status);
if (!"RUNNING".equals(status)) {
String logUrl = state.getString("logFileName");
return new GraphExecutionResult(status, logUrl);
}
} else {
l.debug("No executionDetail structure in the execution status!");
throw new GdcRestApiException("No execution structure in the execution status!");
}
}
} finally {
ptm.releaseConnection();
}
}
/**
* Executes the MAQL and creates/modifies the project's LDM
*
* @param projectId the project's ID
* @param maql String with the MAQL statements
* @return result String
* @throws GdcRestApiException
*/
public String executeDML(String projectId, String maql) throws GdcRestApiException {
l.debug("Executing MAQL DML projectId=" + projectId + " MAQL DML:\n" + maql);
PostMethod maqlPost = createPostMethod(getProjectMdUrl(projectId) + DML_EXEC_URI);
JSONObject maqlStructure = getMAQLExecStructure(maql);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
maqlStructure.toString().getBytes()));
maqlPost.setRequestEntity(request);
String result = null;
try {
String response = executeMethodOk(maqlPost);
JSONObject responseObject = JSONObject.fromObject(response);
String uris = responseObject.getString("uri");
return uris;
} catch (HttpMethodException ex) {
l.debug("MAQL DML execution: ", ex);
throw new GdcRestApiException("MAQL DML execution: ", ex);
} finally {
maqlPost.releaseConnection();
}
}
/**
* Returns the pull API JSON structure
*
* @param maql String with the MAQL statements
* @return the MAQL API JSON structure
*/
private JSONObject getMAQLExecStructure(String maql) {
JSONObject maqlStructure = new JSONObject();
JSONObject maqlObj = new JSONObject();
maqlObj.put("maql", maql);
maqlStructure.put("manage", maqlObj);
return maqlStructure;
}
private JSONObject getGraphExecStructure(String graph, Map<String, String> params) {
JSONObject structure = new JSONObject();
JSONObject execution = new JSONObject();
execution.put("graph", graph);
JSONObject paramsStructure = new JSONObject();
for (Map.Entry<String, String> entry : params.entrySet()) {
paramsStructure.put(entry.getKey(), entry.getValue());
}
execution.put("params", paramsStructure);
structure.put("execution", execution);
return structure;
}
protected String executeMethodOk(HttpMethod method) throws HttpMethodException {
return executeMethodOk(method, true);
}
protected String executeMethodOk(HttpMethod method, boolean reloginOn401) throws HttpMethodException {
return executeMethodOk(method, reloginOn401, 16);
}
/**
* Executes HttpMethod and test if the response if 200(OK)
*
* @param method the HTTP method
* @return response body as String
* @throws HttpMethodException
*/
private String executeMethodOk(HttpMethod method, boolean reloginOn401, int retries) throws HttpMethodException {
try {
executeMethodOkOnly(method, reloginOn401, retries);
return method.getResponseBodyAsString();
} catch (IOException e) {
l.debug("Error invoking GoodData REST API.", e);
throw new HttpMethodException("Error invoking GoodData REST API.", e);
}
}
private void executeMethodOkOnly(HttpMethod method) throws HttpMethodException {
executeMethodOkOnly(method, true);
}
private void executeMethodOkOnly(HttpMethod method, boolean reloginOn401) throws HttpMethodException {
executeMethodOk(method, reloginOn401, 16);
}
/**
* Executes HttpMethod and test if the response if 200(OK)
*
* @param method the HTTP method
* @return response as Stream
* @throws HttpMethodException
*/
private void executeMethodOkOnly(HttpMethod method, boolean reloginOn401, int retries) throws HttpMethodException, IOException {
try {
client.executeMethod(method);
/* HttpClient is rather unsupportive when it comes to robust interpreting
* of response classes; which is mandated by RFC and extensively used in
* GoodData API. Let us grok the classes ourselves. */
/* 2xx success class */
if (method.getStatusCode() == HttpStatus.SC_CREATED) {
return;
} else if (method.getStatusCode() == HttpStatus.SC_ACCEPTED) {
throw new HttpMethodNotFinishedYetException(method.getResponseBodyAsString());
} else if (method.getStatusCode() == HttpStatus.SC_NO_CONTENT) {
return;
} else if (method.getStatusCode() >= HttpStatus.SC_OK
&& method.getStatusCode() < HttpStatus.SC_BAD_REQUEST) {
return;
/* 4xx user errors and
* 5xx backend trouble */
} else if (method.getStatusCode() == HttpStatus.SC_UNAUTHORIZED && reloginOn401) {
// refresh the temporary token
setTokenCookie();
executeMethodOkOnly(method, false, retries);
return;
} else if (method.getStatusCode() == HttpStatus.SC_SERVICE_UNAVAILABLE && retries-- > 0
&& method.getResponseHeader("Retry-After") != null) {
/* This is recommended by RFC 2616 and should probably be dealt with by the
* client library. May god have mercy with it. */
int timeout = Integer.parseInt(method.getResponseHeader("Retry-After").getValue());
l.debug("Remote asked us to retry after " + timeout + " seconds, sleeping.");
l.debug(retries + " more retries");
try {
Thread.currentThread().sleep(Constants.RETRY_INTERVAL * timeout);
} catch (java.lang.InterruptedException e) {
}
executeMethodOkOnly(method, false, retries);
return;
} else if (method.getStatusCode() == HttpStatus.SC_GONE) {
throw new GdcProjectAccessException("Invalid project.");
} else if (method.getStatusCode() >= HttpStatus.SC_BAD_REQUEST
&& method.getStatusCode() < 600) {
throw new HttpMethodException(method);
/* 1xx informational responses class and
* 3xx redirects should not get past the client library internals. */
} else {
throw new HttpMethodException("Unsupported HTTP status received from remote: " +
method.getStatusCode());
}
} catch (HttpException e) {
l.debug("Error invoking GoodData REST API.", e);
throw new HttpMethodException("Error invoking GoodData REST API.", e);
}
}
/**
* Returns the data interfaces URI
*
* @param projectId project ID
* @return SLI collection URI
*/
public String getSLIsUri(String projectId) {
return getProjectMdUrl(projectId) + DATA_INTERFACES_URI;
}
/**
* Returns the SLI URI
*
* @param sliId SLI ID
* @param projectId project ID
* @return DLI URI
*/
public String getSLIUri(String sliId, String projectId) {
return getProjectMdUrl(projectId) + DATA_INTERFACES_URI + "/" + sliId + SLI_DESCRIPTOR_URI;
}
protected String getServerUrl() {
return config.getUrl();
}
/**
* Constructs project's metadata uri
*
* @param projectId project ID
*/
protected String getProjectMdUrl(String projectId) {
return getServerUrl() + MD_URI + projectId;
}
/**
* Constructs project's projects uri
*
* @param projectId project ID
*/
protected String getProjectUrl(String projectId) {
return getServerUrl() + PROJECTS_URI + "/" + projectId;
}
/**
* Gets the project ID from the project URI
*
* @param projectUri project URI
* @return the project id
*/
public String getProjectIdFromUri(String projectUri) {
String[] cmpnts = projectUri.split("/");
if (cmpnts != null && cmpnts.length > 0) {
String id = cmpnts[cmpnts.length - 1];
return id;
} else
throw new GdcRestApiException("Invalid project uri structure uri=" + projectUri);
}
/**
* Gets the project delete URI from the project id
*
* @param projectId project ID
* @return the project delete URI
*/
public String getProjectDeleteUri(String projectId) {
return PROJECTS_URI + "/" + projectId;
}
/**
* Profile getter
*
* @return the profile of the currently logged user
*/
protected JSONObject getProfile() {
return profile;
}
/**
* Invites a new user to a project
*
* @param projectId project ID
* @param eMail invited user e-mail
* @param message invitation message
*/
public void inviteUser(String projectId, String eMail, String message) {
this.inviteUser(projectId, eMail, message, null);
}
/**
* Invites a new user to a project
*
* @param projectId project ID
* @param eMail invited user e-mail
* @param message invitation message
*/
public void inviteUser(String projectId, String eMail, String message, String role) {
l.debug("Executing inviteUser projectId=" + projectId + " e-mail=" + eMail + " message=" + message);
PostMethod invitePost = createPostMethod(getServerUrl() + getProjectDeleteUri(projectId) + INVITATION_URI);
JSONObject inviteStructure = getInviteStructure(projectId, eMail, message, role);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
inviteStructure.toString().getBytes()));
invitePost.setRequestEntity(request);
try {
executeMethodOk(invitePost);
} catch (HttpMethodException ex) {
l.debug("Failed executing inviteUser projectId=" + projectId + " e-mail=" + eMail + " message=" + message);
throw new GdcRestApiException("Failed executing inviteUser projectId=" + projectId + " e-mail=" + eMail + " message=" + message, ex);
} finally {
invitePost.releaseConnection();
}
}
/**
* Creates a new invitation structure
*
* @param pid project id
* @param eMail e-mail
* @param msg invitation message
* @return the new invitation structure
*/
private JSONObject getInviteStructure(String pid, String eMail, String msg, String role) {
JSONObject content = new JSONObject();
content.put("firstname", "");
content.put("lastname", "");
content.put("email", eMail);
if (role != null && role.length() > 0) {
String roleUri = getRoleUri(pid, role);
if (roleUri == null)
throw new InvalidParameterException("The role '" + role + "' is not recognized by the GoodData platform.");
content.put("role", roleUri);
}
JSONObject action = new JSONObject();
action.put("setMessage", msg);
content.put("action", action);
JSONObject invitation = new JSONObject();
invitation.put("content", content);
JSONObject invitations = new JSONObject();
JSONArray ia = new JSONArray();
JSONObject inve = new JSONObject();
inve.put("invitation", invitation);
ia.add(inve);
invitations.put("invitations", ia);
return invitations;
}
/**
* Converst MD identifier to uri
*
* @param projectId project ID
* @param identifiers MD object identifiers
* @return map identifier:uri
*/
public Map<String, String> identifierToUri(String projectId, String[] identifiers) {
l.debug("Executing identifierToUri identifier=" + identifiers);
Map<String, String> result = new HashMap<String, String>();
PostMethod p = createPostMethod(getProjectMdUrl(projectId) + IDENTIFIER_URI);
JSONObject is = getIdentifiersStructure(identifiers);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
is.toString().getBytes()));
p.setRequestEntity(request);
try {
String resp = executeMethodOk(p);
JSONObject parsedResp = JSONObject.fromObject(resp);
JSONArray idents = parsedResp.getJSONArray("identifiers");
if (idents != null && !idents.isEmpty()) {
for (int i = 0; i < idents.size(); i++) {
JSONObject ident = idents.getJSONObject(i);
result.put(ident.getString("identifier"), ident.getString("uri"));
}
}
} catch (HttpMethodException ex) {
l.debug("Failed executing identifierToUri identifier=" + identifiers);
throw new GdcRestApiException("Failed executing identifierToUri identifier=" + identifiers, ex);
} finally {
p.releaseConnection();
}
return result;
}
/**
* Creates a new identifiers structure
*
* @param identifiers MD object identifier
* @return the new identifiers structure
*/
private JSONObject getIdentifiersStructure(String[] identifiers) {
JSONObject identifierToUri = new JSONObject();
JSONArray ids = new JSONArray();
for (int i = 0; i < identifiers.length; i++) {
ids.add(identifiers[i]);
}
identifierToUri.put("identifierToUri", ids);
return identifierToUri;
}
/**
* Retrieves a metadata object definition by Uri
*
* @param objectUri object uri
* @return the object to get
*/
public JSONObject getObjectByUri(String objectUri) {
l.debug("Executing getObjectByUri uri=" + objectUri);
HttpMethod req = createGetMethod(getServerUrl() + objectUri);
try {
String resp = executeMethodOk(req);
// workaround for a possible mess in MAQL source and missing charset in /obj response
resp = resp.replace("\\\\_", " ").replace("\u00A0", " ");
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp.isNullObject()) {
l.debug("Can't getObjectByUri object uri=" + objectUri);
throw new GdcRestApiException("Can't getObjectByUri object uri=" + objectUri);
}
return parsedResp;
} finally {
req.releaseConnection();
}
}
/**
* Retrieves a metadata object definition
*
* @param objectUri object uri
* @return the object to get
*/
public MetadataObject getMetadataObject(String objectUri) {
l.debug("Executing getMetadataObject uri=" + objectUri);
MetadataObject o = new MetadataObject(getObjectByUri(objectUri));
return o;
}
/**
* Retrieves a metadata object definition
*
* @param projectId project id (hash)
* @param objectId object id (integer)
* @return the object to get
*/
public MetadataObject getMetadataObject(String projectId, int objectId) {
l.debug("Executing getMetadataObject id=" + objectId + " on project id=" + projectId);
return getMetadataObject(MD_URI + projectId + OBJ_URI + "/" + objectId);
}
/**
* Retrieves a metadata object definition
*
* @param projectId project id (hash)
* @param identifier object identifier
* @return the object to get
*/
public MetadataObject getMetadataObject(String projectId, String identifier) {
l.debug("Executing getObjectByIdentifier identifier=" + identifier);
Map<String, String> uris = identifierToUri(projectId, new String[]{identifier});
if (uris != null && uris.size() > 0) {
String uri = uris.get(identifier);
if (uri != null && uri.length() > 0)
return getMetadataObject(uri);
else {
l.debug("Can't getObjectByIdentifier identifier=" + identifier + " The identifier doesn't exists.");
throw new GdcRestApiException("Can't getObjectByIdentifier identifier=" + identifier + " The identifier doesn't exists.");
}
} else {
l.debug("Can't getObjectByIdentifier identifier=" + identifier + " The identifier doesn't exists.");
throw new GdcRestApiException("Can't getObjectByIdentifier identifier=" + identifier + " The identifier doesn't exists.");
}
}
/**
* Returns the dependent objects
*
* @param uri the uri of the top-level object
* @return list of dependent objects
*/
public List<JSONObject> using(String uri) {
l.debug("Executing using uri=" + uri);
List<JSONObject> ret = new ArrayList<JSONObject>();
//HACK!
String usedUri = uri.replace("/obj/", "/using/");
HttpMethod req = createGetMethod(getServerUrl() + usedUri);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't call using on uri=" + uri + ". Invalid response.");
throw new GdcRestApiException("Can't call using on uri=" + uri + ". Invalid response.");
}
JSONObject using = parsedResp.getJSONObject("using");
if (using == null || using.isNullObject() || using.isEmpty()) {
l.debug("Can't call using on uri=" + uri + ". No using data.");
throw new GdcRestApiException("Can't call using on uri=" + uri + ". No using data.");
}
JSONArray nodes = using.getJSONArray("nodes");
if (nodes == null) {
l.debug("Can't call using on uri=" + uri + ". No nodes key in the response.");
throw new GdcRestApiException("Can't call using on uri=" + uri + ". No nodes key in the response.");
}
for (Object o : nodes) {
JSONObject obj = (JSONObject) o;
ret.add(obj);
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Returns the dependent objects
*
* @param uri the uri of the top-level object
* @return list of dependent objects
*/
public List<JSONObject> usedBy(String uri) {
l.debug("Executing usedby uri=" + uri);
List<JSONObject> ret = new ArrayList<JSONObject>();
//HACK!
String usedUri = uri.replace("/obj/", "/usedby/");
HttpMethod req = createGetMethod(getServerUrl() + usedUri);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp == null || parsedResp.isNullObject() || parsedResp.isEmpty()) {
l.debug("Can't call usedby on uri=" + uri + ". Invalid response.");
throw new GdcRestApiException("Can't call usedby on uri=" + uri + ". Invalid response.");
}
JSONObject usedby = parsedResp.getJSONObject("usedby");
if (usedby == null || usedby.isNullObject() || usedby.isEmpty()) {
l.debug("Can't call usedby on uri=" + uri + ". No usedby data.");
throw new GdcRestApiException("Can't call usedby on uri=" + uri + ". No usedby data.");
}
JSONArray nodes = usedby.getJSONArray("nodes");
if (nodes == null) {
l.debug("Can't call usedby on uri=" + uri + ". No nodes key in the response.");
throw new GdcRestApiException("Can't call usedby on uri=" + uri + ". No nodes key in the response.");
}
for (Object o : nodes) {
JSONObject obj = (JSONObject) o;
ret.add(obj);
}
return ret;
} finally {
req.releaseConnection();
}
}
/**
* Creates a new object in the metadata server
*
* @param projectId project id (hash)
* @param content the new object content
* @return the new object
*/
public JSONObject createMetadataObject(String projectId, JSON content) {
l.debug("Executing createMetadataObject on project id=" + projectId + "content='" + content.toString() + "'");
PostMethod req = createPostMethod(getProjectMdUrl(projectId) + OBJ_URI + "?createAndGet=true");
try {
String str = content.toString();
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(str.getBytes("utf-8")));
req.setRequestEntity(request);
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
return parsedResp;
} catch (HttpMethodException ex) {
l.debug("Failed executing createMetadataObject on project id=" + projectId + "content='" + content.toString() + "'");
throw new GdcRestApiException("Failed executing createMetadataObject on project id=" + projectId + "content='" + content.toString() + "'", ex);
} catch (UnsupportedEncodingException e) {
l.debug("String#getBytes(\"utf-8\") threw UnsupportedEncodingException", e);
throw new IllegalStateException(e);
} finally {
req.releaseConnection();
}
}
/**
* Modifies an object in the metadata server
*
* @param projectId project id (hash)
* @param objectId object id (integer)
* @param content the new object content
* @return the new object
*/
public JSONObject modifyMetadataObject(String projectId, int objectId, JSON content) {
l.debug("Executing modifyMetadataObject on project id=" + projectId + " objectId=" + objectId + " content='" + content.toString() + "'");
return modifyMetadataObject(MD_URI + projectId + OBJ_URI + "/" + objectId, content);
}
/**
* Modifies an object in the metadata server
*
* @param uri object uri
* @param content the new object content
* @return the new object
*/
public JSONObject modifyMetadataObject(String uri, JSON content) {
l.debug("Executing modifyMetadataObject on uri=" + uri + " content='" + content.toString() + "'");
PostMethod req = createPostMethod(getServerUrl() + uri);
try {
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(
content.toString().getBytes("utf-8")));
req.setRequestEntity(request);
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
return parsedResp;
} catch (HttpMethodException ex) {
l.debug("Failed executing modifyMetadataObject on uri=" + uri + " content='" + content.toString() + "'");
throw new GdcRestApiException("Failed executing modifyMetadataObject on uri=" + uri + " content='" + content.toString() + "'", ex);
} catch (UnsupportedEncodingException e) {
l.debug("String#getBytes(\"utf-8\") threw UnsupportedEncodingException", e);
throw new IllegalStateException(e);
} finally {
req.releaseConnection();
}
}
/**
* Deletes an object in the metadata server
*
* @param projectId project id (hash)
* @param objectId object id (integer)
* @return the new object
*/
public void deleteMetadataObject(String projectId, int objectId) {
l.debug("Executing deleteMetadataObject on project id=" + projectId + " objectId=" + objectId);
deleteMetadataObject(MD_URI + projectId + OBJ_URI + "/" + objectId);
}
/**
* Deletes an object in the metadata server
*
* @param uri object uri
* @return the new object
*/
public void deleteMetadataObject(String uri) {
l.debug("Executing deleteMetadataObject on project uri=" + uri);
DeleteMethod req = createDeleteMethod(getServerUrl() + uri);
try {
String resp = executeMethodOk(req);
} catch (HttpMethodException ex) {
l.debug("Failed executing deleteMetadataObject on project uri=" + uri);
throw new GdcRestApiException("Failed executing deleteMetadataObject on uri=" + uri, ex);
} finally {
req.releaseConnection();
}
}
/**
* Determines the projet's ETL mode (SLI/DLI/VOID)
*
* @param pid project id
* @return project's ETL mode
*/
public String getProjectEtlMode(String pid) {
l.debug("Getting project etl status.");
GetMethod req = createGetMethod(getProjectMdUrl(pid) + ETL_MODE_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
if (parsedResp != null && !parsedResp.isNullObject() && !parsedResp.isEmpty()) {
JSONObject etlMode = parsedResp.getJSONObject("etlMode");
if (etlMode != null && !etlMode.isNullObject() && !etlMode.isEmpty()) {
String mode = etlMode.getString("mode");
if (mode != null && mode.length() > 0) {
return mode;
} else {
l.debug("Getting project etl status. No mode in the result: " + etlMode.toString());
throw new GdcRestApiException("Getting project etl status. No mode in the result: " + etlMode.toString());
}
} else {
l.debug("Getting project etl status. No etlMode in the result: " + parsedResp.toString());
throw new GdcRestApiException("Getting project etl status. No etlMode in the result: " + parsedResp.toString());
}
} else {
l.debug("Getting project etl status. Empty result.");
throw new GdcRestApiException("Getting project etl status. Empty result.");
}
} finally {
req.releaseConnection();
}
}
protected JSONObject getMigrationRequest(List<String> manifests) {
JSONObject etlMode = new JSONObject();
etlMode.put("mode", "SLI");
JSONArray mnfsts = new JSONArray();
mnfsts.addAll(manifests);
etlMode.put("sli", mnfsts);
JSONObject ret = new JSONObject();
ret.put("etlMode", etlMode);
return ret;
}
/**
* Checks if the migration is finished
*
* @param link the link returned from the start loading
* @return the loading status
*/
public String getTaskManStatus(String link) throws HttpMethodException {
l.debug("Getting TaskMan status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = "";
boolean isFinished = false;
while (!isFinished) {
try {
response = executeMethodOk(ptm);
isFinished = true;
} catch (HttpMethodNotFinishedYetException e) {
l.debug("getTaskManStatus: Waiting for status");
try {
Thread.sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
}
}
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("wTaskStatus");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
l.debug("TaskMan status=" + status);
return status;
} else {
l.debug("No wTaskStatus structure in the taskman status!");
throw new GdcRestApiException("No wTaskStatus structure in the taskman status!");
}
} finally {
ptm.releaseConnection();
}
}
public static class TaskmanStatus {
private String[] message;
private String status;
public TaskmanStatus(String s, String[] m) {
this.status = s;
this.message = m;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String[] getMessage() {
return message;
}
public void setMessage(String[] message) {
this.message = message;
}
}
public TaskmanStatus getDetailedTaskManStatus(String link) throws HttpMethodException {
l.debug("Getting TaskMan status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = "";
boolean isFinished = false;
while (!isFinished) {
try {
response = executeMethodOk(ptm);
isFinished = true;
} catch (HttpMethodNotFinishedYetException e) {
l.debug("getTaskManStatus: Waiting for status");
try {
Thread.sleep(Constants.POLL_INTERVAL);
} catch (InterruptedException ex) {
// do nothing
}
}
}
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("wTaskStatus");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
ArrayList<String> messages = new ArrayList<String>();
l.debug("TaskMan status=" + status);
if(state.containsKey("messages")) {
JSONArray msgs = state.getJSONArray("messages");
if(msgs != null && !msgs.isEmpty()) {
for (Object msgo : msgs) {
JSONObject msg = (JSONObject)msgo;
String root = (String)msg.keys().next();
JSONObject inner = msg.getJSONObject(root);
JSONArray prms = inner.getJSONArray("parameters");
String message = inner.getString("message");
if(prms != null && !prms.isEmpty()) {
for(Object prmo : prms) {
String prm = (String)prmo;
message = message.replaceFirst("\\%s",prm);
}
}
messages.add(message);
}
}
}
return new TaskmanStatus(status, (String[])messages.toArray(new String[]{}));
} else {
l.debug("No wTaskStatus structure in the taskman status!");
throw new GdcRestApiException("No wTaskStatus structure in the taskman status!");
}
} finally {
ptm.releaseConnection();
}
}
/**
* Migrates project datasets from DLI to SLI
*
* @param pid project ID
* @param manifests array of all dataset's manifests
*/
public String migrateDataSets(String pid, List<String> manifests) {
l.debug("Migrating project to SLI.");
String currentMode = getProjectEtlMode(pid);
l.debug("Migrating project to SLI: current status is " + currentMode);
if (ETL_MODE_DLI.equalsIgnoreCase(currentMode) || ETL_MODE_VOID.equalsIgnoreCase(currentMode)) {
PostMethod req = createPostMethod(getProjectMdUrl(pid) + ETL_MODE_URI);
InputStreamRequestEntity request = new InputStreamRequestEntity(new ByteArrayInputStream(getMigrationRequest(manifests).toString().getBytes()));
req.setRequestEntity(request);
try {
String resp = executeMethodOk(req);
JSONObject responseObject = JSONObject.fromObject(resp);
String taskLink = responseObject.getString("uri");
return taskLink;
} catch (HttpMethodException ex) {
l.debug("Migrating project to SLI failed.", ex);
throw new GdcRestApiException("Migrating project to SLI failed.", ex);
} finally {
req.releaseConnection();
}
} else {
l.debug("Migrating project to SLI: no migration needed. Skipping.");
return "";
}
}
private static GetMethod createGetMethod(String path) {
return configureHttpMethod(new GetMethod(path));
}
private static PostMethod createPostMethod(String path) {
return configureHttpMethod(new PostMethod(path));
}
private static DeleteMethod createDeleteMethod(String path) {
return configureHttpMethod(new DeleteMethod(path));
}
private static <T extends HttpMethod> T configureHttpMethod(T request) {
request.setRequestHeader("Content-Type", "application/json; charset=utf-8");
request.setRequestHeader("Accept", "application/json");
request.setRequestHeader("Accept-Charset", "utf-u");
request.setRequestHeader("User-Agent", "GoodData CL/1.2.68");
request.getParams().setCookiePolicy(CookiePolicy.BROWSER_COMPATIBILITY);
return request;
}
protected void finalize() throws Throwable {
try {
// logout();
} finally {
super.finalize();
}
}
/**
* API for querying users in a domain
*
* @param domain
* @return
*/
public Map<String, GdcUser> getUsers(String domain) {
Map<String, GdcUser> users = new HashMap<String, GdcUser>();
String url = "/gdc/account/domains/" + domain + "/users";
JSONObject jsonObject = getObjectByUri(url);
if (jsonObject == null) {
return users;
}
JSONObject accountSettings = jsonObject
.getJSONObject("accountSettings");
if (accountSettings == null) {
return users;
}
JSONArray items = (JSONArray) accountSettings.get("items");
if (items == null) {
return users;
}
for (Object item : items) {
JSONObject itemJSON = JSONObject.fromObject(item);
if (itemJSON == null) {
continue;
}
JSONObject accountSetting = itemJSON
.getJSONObject("accountSetting");
if (accountSetting == null) {
continue;
}
GdcUser user = new GdcUser();
user.setLogin(accountSetting.getString("login"));
user.setFirstName(accountSetting.getString("firstName"));
user.setLastName(accountSetting.getString("lastName"));
user.setCompanyName(accountSetting.getString("companyName"));
user.setPosition(accountSetting.getString("position"));
user.setCountry(accountSetting.getString("country"));
user.setTimezone(accountSetting.getString("timezone"));
user.setPhoneNumber(accountSetting.getString("phoneNumber"));
user.setEmail(accountSetting.getString("email"));
JSONObject links = accountSetting.getJSONObject("links");
if (links == null)
throw new RuntimeException(
"The URL link for a user cannot be null: "
+ user.getLogin());
String uri = links.getString("self");
if (uri == null)
throw new RuntimeException("The URL for a user cannot be null: "
+ user.getLogin());
user.setUri(uri);
users.put(user.getLogin(), user);
}
return users;
}
public List<String> enumerateDimensions(String projectId) {
return enumerateResource(projectId, QUERY_DIMENSIONS);
}
public List<String> enumerateDataSets(String projectId) {
return enumerateResource(projectId, QUERY_DATASETS);
}
public List<String> enumerateFolders(String projectId) {
return enumerateResource(projectId, QUERY_FOLDERS);
}
public List<String> enumerateDashboards(String projectId) {
return enumerateResource(projectId, QUERY_PROJECTDASHBOARDS);
}
protected List<String> enumerateResource(String projectId, String resource) {
l.debug("Enumerating attributes for project id=" + projectId);
List<String> list = new ArrayList<String>();
String qUri = getProjectMdUrl(projectId) + QUERY_PREFIX + resource;
HttpMethod qGet = createGetMethod(qUri);
try {
String qr = executeMethodOk(qGet);
JSONObject q = JSONObject.fromObject(qr);
if (q.isNullObject()) {
l.debug("Enumerating "+resource+" for project id="+projectId+" failed.");
throw new RuntimeException(
"Enumerating "+resource+" for project id="+projectId+" failed.");
}
JSONObject qry = q.getJSONObject("query");
if (qry.isNullObject()) {
l.debug("Enumerating "+resource+" for project id="+projectId+" failed.");
throw new RuntimeException(
"Enumerating "+resource+" for project id="+projectId+" failed.");
}
JSONArray entries = qry.getJSONArray("entries");
if (entries == null) {
l.debug("Enumerating "+resource+" for project id="+projectId+" failed.");
throw new RuntimeException(
"Enumerating "+resource+" for project id="+projectId+" failed.");
}
for (Object oentry : entries) {
JSONObject entry = (JSONObject) oentry;
list.add(entry.getString("link"));
}
} finally {
qGet.releaseConnection();
}
return list;
}
public ProjectExportResult exportMDByUrl(String projectId, List<String> urls) {
l.debug("Exporting metadata objects with URls " + urls
+ " from project " + projectId);
PostMethod req = createPostMethod(getProjectMdUrl(projectId)
+ PROJECT_PARTIAL_EXPORT_URI);
JSONObject param = getMDExportStructureStrings(projectId, urls);
InputStreamRequestEntity request = new InputStreamRequestEntity(
new ByteArrayInputStream(param.toString().getBytes(
Charset.forName("UTF-8"))));
req.setRequestEntity(request);
ProjectExportResult result = null;
try {
String response = executeMethodOk(req);
result = new ProjectExportResult();
JSONObject responseObject = JSONObject.fromObject(response);
JSONObject exportArtifact = responseObject
.getJSONObject("partialMDArtifact");
JSONObject status = exportArtifact.getJSONObject("status");
result.setTaskUri(status.getString("uri"));
result.setExportToken(exportArtifact.getString("token"));
return result;
} catch (HttpMethodException ex) {
l.debug("Error exporting metadata objects with URls " + urls
+ " from project " + projectId, ex);
throw new GdcRestApiException(
"Error exporting metadata objects with URls " + urls
+ " from project " + projectId, ex);
} finally {
req.releaseConnection();
}
}
protected JSONObject getMDExportStructureStrings(String projectId,
List<String> urls) {
JSONObject param = new JSONObject();
JSONObject partialMDExport = new JSONObject();
JSONArray uris = new JSONArray();
for (String url : urls) {
uris.add(url);
}
partialMDExport.put("uris", uris);
param.put("partialMDExport", partialMDExport);
return param;
}
public NamePasswordConfiguration getNamePasswordConfiguration() {
return config;
}
/**
* Checks if report copying is finished. Workaround implementation due to
* wrong handling of status code.
*
* @param link
* the link returned from the start loading
* @return the loading status
*/
public String getCopyStatus(String link) {
l.debug("Getting Cloning Status status uri=" + link);
HttpMethod ptm = createGetMethod(getServerUrl() + link);
try {
String response = executeMethodOk(ptm);
if (response != null && !response.isEmpty()) {
JSONObject task = JSONObject.fromObject(response);
JSONObject state = task.getJSONObject("taskState");
if (state != null && !state.isNullObject() && !state.isEmpty()) {
String status = state.getString("status");
l.debug("TaskMan status=" + status);
return status;
} else {
l.debug("No wTaskStatus structure in the migration status!");
throw new GdcRestApiException(
"No wTaskStatus structure in the migration status!");
}
}
return "RUNNING";
} catch (HttpMethodException e) {
// workaround implementation due to wrong handling (at least for
// this status)
if (e instanceof HttpMethodNotFinishedYetException
|| (e.getCause() != null && e.getCause() instanceof HttpMethodNotFinishedYetException)) {
l.debug("getTaskManStatus: Waiting for status");
return "RUNNING";
}
throw e;
} finally {
ptm.releaseConnection();
}
}
/**
* Retrieves the project info by the project's name
*
* @param name
* the project name
* @return the GoodDataProjectInfo populated with the project's information
* @throws HttpMethodException
* @throws GdcProjectAccessException
*/
@Deprecated
public Project getProjectByName(String name) throws HttpMethodException,
GdcProjectAccessException {
l.debug("Getting project by name=" + name);
for (Iterator<JSONObject> linksIter = getProjectsLinks(); linksIter
.hasNext();) {
JSONObject link = linksIter.next();
String cat = link.getString("category");
if (!"project".equalsIgnoreCase(cat)) {
continue;
}
String title = link.getString("title");
if (title.equals(name)) {
Project proj = new Project(link);
l.debug("Got project by name=" + name);
return proj;
}
}
l.debug("The project name=" + name + " doesn't exists.");
throw new GdcProjectAccessException("The project name=" + name
+ " doesn't exists.");
}
/**
* Returns the existing projects links
*
* @return accessible projects links
* @throws com.gooddata.exception.HttpMethodException
*/
@Deprecated
@SuppressWarnings("unchecked")
private Iterator<JSONObject> getProjectsLinks() throws HttpMethodException {
l.debug("Getting project links.");
HttpMethod req = createGetMethod(getServerUrl() + MD_URI);
try {
String resp = executeMethodOk(req);
JSONObject parsedResp = JSONObject.fromObject(resp);
JSONObject about = parsedResp.getJSONObject("about");
JSONArray links = about.getJSONArray("links");
l.debug("Got project links " + links);
return links.iterator();
} finally {
req.releaseConnection();
}
}
/**
* Create a new GoodData project
*
* @param name
* project name
* @param desc
* project description
* @param templateUri
* project template uri
* @return the project Id
* @throws GdcRestApiException
*/
@Deprecated
public String createProject(String name, String desc, String templateUri)
throws GdcRestApiException {
return this.createProject(name, desc, templateUri, null, null);
}
/**
* Returns the List of GoodDataProjectInfo structures for the accessible
* projects
*
* @return the List of GoodDataProjectInfo structures for the accessible
* projects
* @throws HttpMethodException
*/
@Deprecated
public List<Project> listProjects() throws HttpMethodException {
l.debug("Listing projects.");
List<Project> list = new ArrayList<Project>();
for (Iterator<JSONObject> linksIter = getProjectsLinks(); linksIter
.hasNext();) {
JSONObject link = linksIter.next();
String cat = link.getString("category");
if (!"project".equalsIgnoreCase(cat)) {
continue;
}
Project proj = new Project(link);
list.add(proj);
}
l.debug("Found projects " + list);
return list;
}
/**
* Gets a report definition from the report uri (/gdc/obj...)
*
* @param reportUri report uri (/gdc/obj...)
* @return report definition
*/
@Deprecated
public String getReportDefinition(String reportUri) {
l.debug( "Getting report definition for report uri=" + reportUri );
String qUri = getServerUrl() + reportUri;
HttpMethod qGet = createGetMethod( qUri );
try {
String qr = executeMethodOk( qGet );
JSONObject q = JSONObject.fromObject( qr );
if (q.isNullObject()) {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
JSONObject report = q.getJSONObject("report");
if (report.isNullObject()) {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
JSONObject content = report.getJSONObject("content");
if (content.isNullObject()) {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
JSONArray definitions = content.getJSONArray("definitions");
if (definitions == null) {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
if (definitions.size() > 0) {
String lastDefUri = definitions.getString(definitions.size() - 1);
qUri = getServerUrl() + lastDefUri;
return lastDefUri;
}
else {
l.debug("Error getting report definition for report uri=" + reportUri);
throw new GdcProjectAccessException("Error getting report definition for report uri=" + reportUri);
}
} finally {
if (qGet != null)
qGet.releaseConnection();
}
}
public static class GraphExecutionResult {
public final static String OK = "OK";
private final String status;
private final String logUrl;
private GraphExecutionResult(final String status, final String logUrl) {
this.status = status;
this.logUrl = logUrl;
}
public String getStatus() {
return status;
}
public String getLogUrl() {
return logUrl;
}
}
}
| HTTP User Agent is "GoodData Agent/VERSION" now
(it was GoodData CL/1.2.68 before!)
| src/main/java/com/gooddata/agent/api/GdcRESTApiWrapper.java | HTTP User Agent is "GoodData Agent/VERSION" now |
|
Java | mit | 411d293fe9b9b65b8d8e126dd3111b2fae2cebb1 | 0 | smblott-github/intent_radio,smblott-github/intent_radio,smblott-github/intent_radio | package org.smblott.intentradio;
import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.content.Context;
import android.os.AsyncTask;;
import android.text.Html;
import android.text.method.LinkMovementMethod;
import android.text.Spanned;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
public class IntentRadio extends Activity
{
private static Context context = null;
private static AsyncTask<TextView, Void, Spanned> draw_task = null;
private static AsyncTask<Void, Void, String> install_task = null;
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
context = getApplicationContext();
Logger.init(context);
setContentView(R.layout.main);
TextView view = (TextView) findViewById(R.id.text);
view.setMovementMethod(LinkMovementMethod.getInstance());
view.setText("Loading...");
// Read file contents and build date for main screen asyncronously...
//
draw_task = new AsyncTask<TextView, Void, Spanned>()
{
TextView view = null;
@Override
protected Spanned doInBackground(TextView... v)
{
view = v[0];
return Html.fromHtml(
ReadRawTextFile.read(getApplicationContext(),R.raw.message)
+ "<p>Version: " + getString(R.string.version) + "<br>\n"
+ "Build: " + Build.getBuildDate(context) + "\n</p>\n"
);
}
@Override
protected void onPostExecute(Spanned html)
{
if ( ! isCancelled() )
view.setText(html);
}
};
draw_task.execute(view);
}
/* ********************************************************************
* Launch clip buttons...
*/
public void clip_buttons(View v)
{
Intent clipper = new Intent(IntentRadio.this, ClipButtons.class);
startActivity(clipper);
}
/* ********************************************************************
* Install sample Tasker project...
*
* This currently assumes that Tasker *always* stores projects in:
*
* - /sdcard/Tasker/projects
*
* Does it?
*
* File I/O is more blocking than anything else we're doing, so we'll do it
* asyncronously.
*/
private static final String project_file = "Tasker/projects/IntentRadio.prj.xml";
public void install_tasker(View v)
{
if ( install_task != null )
install_task.cancel(true);
install_task = new AsyncTask<Void, Void, String>()
{
@Override
protected String doInBackground(Void... unused)
{
return CopyResource.copy(context, R.raw.tasker, project_file);
}
@Override
protected void onPostExecute(String error)
{
// TODO: Checking whether the task has been cancelled is not
// sufficient. Must also check whether activity has been
// destroyed.
//
if ( ! isCancelled() )
{
if ( error == null )
{
toast("Project file installed...\n\n/sdcard/" + project_file);
toast("Next, import this project into Tasker.");
}
else
toast("Install error:\n" + error + "\n\n/sdcard/" + project_file);
}
}
};
install_task.execute();
}
/* ********************************************************************
* Toasts...
*/
static private void toast(String msg)
{ Logger.toast_long(msg); }
}
| src/org/smblott/intentradio/IntentRadio.java | package org.smblott.intentradio;
import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.content.Context;
import android.os.AsyncTask;;
import android.text.Html;
import android.text.method.LinkMovementMethod;
import android.text.Spanned;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
public class IntentRadio extends Activity
{
static Context context = null;
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
context = getApplicationContext();
Logger.init(context);
setContentView(R.layout.main);
TextView view = (TextView) findViewById(R.id.text);
view.setMovementMethod(LinkMovementMethod.getInstance());
view.setText("Loading...");
// Read file contents and build date for main screen asyncronously...
//
new AsyncTask<TextView, Void, Spanned>()
{
TextView view = null;
@Override
protected Spanned doInBackground(TextView... v)
{
view = v[0];
return Html.fromHtml(
ReadRawTextFile.read(getApplicationContext(),R.raw.message)
+ "<p>Version: " + getString(R.string.version) + "<br>\n"
+ "Build: " + Build.getBuildDate(context) + "\n</p>\n"
);
}
@Override
protected void onPostExecute(Spanned html)
{
if ( ! isCancelled() )
view.setText(html);
}
}.execute(view);
}
/* ********************************************************************
* Launch clip buttons...
*/
public void clip_buttons(View v)
{
Intent clipper = new Intent(IntentRadio.this, ClipButtons.class);
startActivity(clipper);
}
/* ********************************************************************
* Install sample Tasker project...
*
* This currently assumes that Tasker *always* stores projects in:
*
* - /sdcard/Tasker/projects
*
* Does it?
*
* File I/O is more blocking than anything else we're doing, so we'll do it
* asyncronously.
*/
private static final String project_file = "Tasker/projects/IntentRadio.prj.xml";
public void install_tasker(View v)
{
new AsyncTask<Void, Void, String>()
{
@Override
protected String doInBackground(Void... unused)
{
return CopyResource.copy(context, R.raw.tasker, project_file);
}
@Override
protected void onPostExecute(String error)
{
// TODO: Checking whether the task has been cancelled is not
// sufficient. Must also check whether activity has been
// destroyed.
//
if ( ! isCancelled() )
{
if ( error == null )
{
toast("Project file installed...\n\n/sdcard/" + project_file);
toast("Next, import this project into Tasker.");
}
else
toast("Install error:\n" + error + "\n\n/sdcard/" + project_file);
}
}
}.execute();
}
/* ********************************************************************
* Toasts...
*/
static private void toast(String msg)
{ Logger.toast_long(msg); }
}
| Begin task completion tests.
| src/org/smblott/intentradio/IntentRadio.java | Begin task completion tests. |
|
Java | mit | 63b09fa7de19f7edab8178303561bd79e557a574 | 0 | JCThePants/NucleusFramework,JCThePants/NucleusFramework | /*
* This file is part of NucleusFramework for Bukkit, licensed under the MIT License (MIT).
*
* Copyright (c) JCThePants (www.jcwhatever.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.jcwhatever.nucleus.utils;
import com.jcwhatever.nucleus.storage.DeserializeException;
import com.jcwhatever.nucleus.storage.IDataNode;
import com.jcwhatever.nucleus.storage.IDataNodeSerializable;
import com.jcwhatever.nucleus.utils.file.IBinarySerializable;
import com.jcwhatever.nucleus.utils.file.NucleusByteReader;
import com.jcwhatever.nucleus.utils.file.NucleusByteWriter;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.util.Vector;
import java.io.IOException;
import javax.annotation.Nullable;
/**
* 3D immutable integer coordinates with no {@link org.bukkit.World} context.
*/
public class Coords3Di implements IDataNodeSerializable, IBinarySerializable {
/**
* Get {@link Coords3Di} from a {@link org.bukkit.Location}.
*
* @param location The location to convert.
*/
public static Coords3Di fromLocation(Location location) {
return new Coords3Di(location.getBlockX(), location.getBlockY(), location.getBlockZ());
}
/**
* Get {@link Coords3Di} from a {@link org.bukkit.util.Vector}.
*
* @param vector The vector to convert.
*/
public static Coords3Di fromVector(Vector vector) {
return new Coords3Di(vector.getBlockX(), vector.getBlockY(), vector.getBlockZ());
}
private int _x;
private int _y;
private int _z;
/**
* Constructor.
*
* @param x The x coordinates.
* @param y The y coordinates.
* @param z The z coordinates.
*/
public Coords3Di(int x, int y, int z) {
_x = x;
_y = y;
_z = z;
}
/**
* Constructor.
*
* <p>Clones values from source coordinates.</p>
*
* @param source The source coordinates.
*/
public Coords3Di(Coords3Di source) {
_x = source._x;
_y = source._y;
_z = source._z;
}
/**
* Constructor.
*
* <p>Clones values from source coordinates and adds delta values.</p>
*
* @param source The source coordinates.
* @param deltaX The X coordinate values to add to the source coordinates.
* @param deltaY The Y coordinate values to add to the source coordinates.
* @param deltaZ The Z coordinate values to add to the source coordinates.
*/
public Coords3Di(Coords3Di source, int deltaX, int deltaY, int deltaZ) {
_x = source._x + deltaX;
_y = source._y + deltaY;
_z = source._z + deltaZ;
}
/**
* Protected constructor for serialization.
*/
protected Coords3Di() {}
/**
* Get the X coordinates.
*/
public int getX() {
return _x;
}
/**
* Get the Y coordinates.
*/
public int getY() {
return _y;
}
/**
* Get the Z coordinates.
*/
public int getZ() {
return _z;
}
/**
* Get the distance from this coordinates to another coordinates.
*
* @param coords The other coordinates.
*/
public double distance(Coords3D coords) {
PreCon.notNull(coords);
return Math.sqrt(distanceSquared(coords));
}
/**
* Get the distance from this coordinates to another coordinates.
*
* @param coords The other coordinates.
*/
public double distance(Coords3Di coords) {
PreCon.notNull(coords);
return Math.sqrt(distanceSquared(coords));
}
/**
* Get the distance from this coordinates to another coordinates squared.
*
* @param coords The other coordinates.
*/
public double distanceSquared(Coords3D coords) {
PreCon.notNull(coords);
double deltaX = coords.getX() - _x;
double deltaY = coords.getY() - _y;
double deltaZ = coords.getZ() - _z;
return deltaX * deltaX + deltaY * deltaY + deltaZ * deltaZ;
}
/**
* Get the distance from this coordinates to another coordinates squared.
*
* @param coords The other coordinates.
*/
public double distanceSquared(Coords3Di coords) {
PreCon.notNull(coords);
double deltaX = coords._x - _x;
double deltaY = coords._y - _y;
double deltaZ = coords._z - _z;
return deltaX * deltaX + deltaY * deltaY + deltaZ * deltaZ;
}
/**
* Create delta coordinates by subtracting other coordinates from
* this coordinates.
*
* @param coords The other coordinates.
*/
public Coords3Di getDelta(Coords3Di coords) {
PreCon.notNull(coords);
int deltaX = getX() - coords.getX();
int deltaY = getY() - coords.getY();
int deltaZ = getZ() - coords.getZ();
return new Coords3Di(deltaX, deltaY, deltaZ);
}
/**
* Get a {@link org.bukkit.block.Block} from the specified {@link org.bukkit.World}
* using this coordinates.
*
* @param world The {@link org.bukkit.World} the block is in.
*/
public Block getBlock(World world) {
PreCon.notNull(world);
return world.getBlockAt(getX(), getY(), getZ());
}
/**
* Create a new {@link org.bukkit.Location} from the coordinates.
*
* @param world The {@link org.bukkit.World} value of the new location.
*/
public Location toLocation(@Nullable World world) {
return toLocation(new Location(world, 0, 0, 0));
}
/**
* Copy coordinate values into an output {@link org.bukkit.Location}.
*
* @param output The output {@link org.bukkit.Location}.
*
* @return The output location.
*/
public Location toLocation(Location output) {
PreCon.notNull(output);
output.setX(_x);
output.setY(_y);
output.setZ(_z);
return output;
}
/**
* Create a new {@link org.bukkit.util.Vector} from the coordinates.
*/
public Vector toVector() {
return toVector(new Vector(0, 0, 0));
}
/**
* Copy coordinate values into an output {@link org.bukkit.util.Vector}.
*
* @param output The output {@link org.bukkit.util.Vector}.
*
* @return The output location.
*/
public Vector toVector(Vector output) {
output.setX(_x);
output.setY(_y);
output.setZ(_z);
return output;
}
/**
* Create a new {@link Coords3D} using the coordinate values.
*/
public Coords3D to3D() {
return new Coords3D(getX(), getY(), getZ());
}
/**
* Create a new {@link Coords2D} using the coordinate values.
*
* <p>Drops the Y coordinate.</p>
*/
public Coords2D to2D() {
return new Coords2D(getX(), getZ());
}
/**
* Create a new {@link Coords2Di} using the coordinate values.
*
* <p>Drops the Y coordinate.</p>
*/
public Coords2Di to2Di() {
return new Coords2Di(getX(), getZ());
}
@Override
public void serialize(IDataNode dataNode) {
dataNode.set("x", _x);
dataNode.set("y", _y);
dataNode.set("z", _z);
}
@Override
public void deserialize(IDataNode dataNode) throws DeserializeException {
_x = dataNode.getInteger("x");
_y = dataNode.getInteger("y");
_z = dataNode.getInteger("z");
}
@Override
public void serializeToBytes(NucleusByteWriter writer) throws IOException {
writer.write(_x);
writer.write(_y);
writer.write(_z);
}
@Override
public void deserializeFromBytes(NucleusByteReader reader)
throws IOException, ClassNotFoundException, InstantiationException {
_x = reader.getInteger();
_y = reader.getInteger();
_z = reader.getInteger();
}
@Override
public int hashCode() {
return _x ^ _y ^ _z;
}
@Override
public boolean equals(Object obj) {
if (obj == this)
return true;
if (obj instanceof Coords3Di) {
Coords3Di other = (Coords3Di)obj;
return other._x == _x &&
other._y == _y &&
other._z == _z;
}
return false;
}
@Override
public String toString() {
return getClass().getSimpleName() + " { x:" + _x + ", y:" + _y + ", z:" + _z + '}';
}
protected void deserialize(int x, int y, int z) {
if (_x == 0 && _y == 0 && _z == 0) {
_x = x;
_y = y;
_z = z;
}
else {
throw new IllegalStateException("Coords3Di is immutable.");
}
}
}
| src/com/jcwhatever/nucleus/utils/Coords3Di.java | /*
* This file is part of NucleusFramework for Bukkit, licensed under the MIT License (MIT).
*
* Copyright (c) JCThePants (www.jcwhatever.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.jcwhatever.nucleus.utils;
import com.jcwhatever.nucleus.storage.DeserializeException;
import com.jcwhatever.nucleus.storage.IDataNode;
import com.jcwhatever.nucleus.storage.IDataNodeSerializable;
import com.jcwhatever.nucleus.utils.file.IBinarySerializable;
import com.jcwhatever.nucleus.utils.file.NucleusByteReader;
import com.jcwhatever.nucleus.utils.file.NucleusByteWriter;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.util.Vector;
import java.io.IOException;
import javax.annotation.Nullable;
/**
* 3D immutable integer coordinates with no {@link org.bukkit.World} context.
*/
public class Coords3Di implements IDataNodeSerializable, IBinarySerializable {
/**
* Get {@link Coords3Di} from a {@link org.bukkit.Location}.
*
* @param location The location to convert.
*/
public static Coords3Di fromLocation(Location location) {
return new Coords3Di(location.getBlockX(), location.getBlockY(), location.getBlockZ());
}
/**
* Get {@link Coords3Di} from a {@link org.bukkit.util.Vector}.
*
* @param vector The vector to convert.
*/
public static Coords3Di fromVector(Vector vector) {
return new Coords3Di(vector.getBlockX(), vector.getBlockY(), vector.getBlockZ());
}
private int _x;
private int _y;
private int _z;
/**
* Constructor.
*
* @param x The x coordinates.
* @param y The y coordinates.
* @param z The z coordinates.
*/
public Coords3Di(int x, int y, int z) {
_x = x;
_y = y;
_z = z;
}
/**
* Constructor.
*
* <p>Clones values from source coordinates.</p>
*
* @param source The source coordinates.
*/
public Coords3Di(Coords3Di source) {
_x = source._x;
_y = source._y;
_z = source._z;
}
/**
* Constructor.
*
* <p>Clones values from source coordinates and adds delta values.</p>
*
* @param source The source coordinates.
* @param deltaX The X coordinate values to add to the source coordinates.
* @param deltaY The Y coordinate values to add to the source coordinates.
* @param deltaZ The Z coordinate values to add to the source coordinates.
*/
public Coords3Di(Coords3Di source, int deltaX, int deltaY, int deltaZ) {
_x = source._x + deltaX;
_y = source._y + deltaY;
_z = source._z + deltaZ;
}
/**
* Protected constructor for serialization.
*/
protected Coords3Di() {}
/**
* Get the X coordinates.
*/
public int getX() {
return _x;
}
/**
* Get the Y coordinates.
*/
public int getY() {
return _y;
}
/**
* Get the Z coordinates.
*/
public int getZ() {
return _z;
}
/**
* Get the distance from this coordinates to another coordinates.
*
* @param coords The other coordinates.
*/
public double distance(Coords3D coords) {
PreCon.notNull(coords);
return Math.sqrt(distanceSquared(coords));
}
/**
* Get the distance from this coordinates to another coordinates.
*
* @param coords The other coordinates.
*/
public double distance(Coords3Di coords) {
PreCon.notNull(coords);
return Math.sqrt(distanceSquared(coords));
}
/**
* Get the distance from this coordinates to another coordinates squared.
*
* @param coords The other coordinates.
*/
public double distanceSquared(Coords3D coords) {
PreCon.notNull(coords);
double deltaX = coords.getX() - _x;
double deltaY = coords.getY() - _y;
double deltaZ = coords.getZ() - _z;
return deltaX * deltaX + deltaY * deltaY + deltaZ * deltaZ;
}
/**
* Get the distance from this coordinates to another coordinates squared.
*
* @param coords The other coordinates.
*/
public double distanceSquared(Coords3Di coords) {
PreCon.notNull(coords);
double deltaX = coords._x - _x;
double deltaY = coords._y - _y;
double deltaZ = coords._z - _z;
return deltaX * deltaX + deltaY * deltaY + deltaZ * deltaZ;
}
/**
* Create delta coordinates by subtracting other coordinates from
* this coordinates.
*
* @param coords The other coordinates.
*/
public Coords3D getDelta(Coords3Di coords) {
PreCon.notNull(coords);
double deltaX = getX() - coords.getX();
double deltaY = getY() - coords.getY();
double deltaZ = getZ() - coords.getZ();
return new Coords3D(deltaX, deltaY, deltaZ);
}
/**
* Get a {@link org.bukkit.block.Block} from the specified {@link org.bukkit.World}
* using this coordinates.
*
* @param world The {@link org.bukkit.World} the block is in.
*/
public Block getBlock(World world) {
PreCon.notNull(world);
return world.getBlockAt(getX(), getY(), getZ());
}
/**
* Create a new {@link org.bukkit.Location} from the coordinates.
*
* @param world The {@link org.bukkit.World} value of the new location.
*/
public Location toLocation(@Nullable World world) {
return toLocation(new Location(world, 0, 0, 0));
}
/**
* Copy coordinate values into an output {@link org.bukkit.Location}.
*
* @param output The output {@link org.bukkit.Location}.
*
* @return The output location.
*/
public Location toLocation(Location output) {
PreCon.notNull(output);
output.setX(_x);
output.setY(_y);
output.setZ(_z);
return output;
}
/**
* Create a new {@link org.bukkit.util.Vector} from the coordinates.
*/
public Vector toVector() {
return toVector(new Vector(0, 0, 0));
}
/**
* Copy coordinate values into an output {@link org.bukkit.util.Vector}.
*
* @param output The output {@link org.bukkit.util.Vector}.
*
* @return The output location.
*/
public Vector toVector(Vector output) {
output.setX(_x);
output.setY(_y);
output.setZ(_z);
return output;
}
/**
* Create a new {@link Coords3D} using the coordinate values.
*/
public Coords3D to3D() {
return new Coords3D(getX(), getY(), getZ());
}
/**
* Create a new {@link Coords2D} using the coordinate values.
*
* <p>Drops the Y coordinate.</p>
*/
public Coords2D to2D() {
return new Coords2D(getX(), getZ());
}
/**
* Create a new {@link Coords2Di} using the coordinate values.
*
* <p>Drops the Y coordinate.</p>
*/
public Coords2Di to2Di() {
return new Coords2Di(getX(), getZ());
}
@Override
public void serialize(IDataNode dataNode) {
dataNode.set("x", _x);
dataNode.set("y", _y);
dataNode.set("z", _z);
}
@Override
public void deserialize(IDataNode dataNode) throws DeserializeException {
_x = dataNode.getInteger("x");
_y = dataNode.getInteger("y");
_z = dataNode.getInteger("z");
}
@Override
public void serializeToBytes(NucleusByteWriter writer) throws IOException {
writer.write(_x);
writer.write(_y);
writer.write(_z);
}
@Override
public void deserializeFromBytes(NucleusByteReader reader)
throws IOException, ClassNotFoundException, InstantiationException {
_x = reader.getInteger();
_y = reader.getInteger();
_z = reader.getInteger();
}
@Override
public int hashCode() {
return _x ^ _y ^ _z;
}
@Override
public boolean equals(Object obj) {
if (obj == this)
return true;
if (obj instanceof Coords3Di) {
Coords3Di other = (Coords3Di)obj;
return other._x == _x &&
other._y == _y &&
other._z == _z;
}
return false;
}
@Override
public String toString() {
return getClass().getSimpleName() + " { x:" + _x + ", y:" + _y + ", z:" + _z + '}';
}
protected void deserialize(int x, int y, int z) {
if (_x == 0 && _y == 0 && _z == 0) {
_x = x;
_y = y;
_z = z;
}
else {
throw new IllegalStateException("Coords3Di is immutable.");
}
}
}
| fix incorrect return type in Coords3Di#getDelta
| src/com/jcwhatever/nucleus/utils/Coords3Di.java | fix incorrect return type in Coords3Di#getDelta |
|
Java | mit | b944a9b49d43abaf9aa879f21165f3ce086b0381 | 0 | mym987/CPS308_Game_Final,mym987/CPS308_Game_Final | package voogasalad_GucciGames.gameData;
import java.io.File;
import java.text.DateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.ResourceBundle;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.DomDriver;
import voogasalad_GucciGames.gameData.wrapper.GameInfo;
import voogasalad_GucciGames.gameData.wrapper.GamePlayerSave;
import voogasalad_GucciGames.gameEngine.GameEngineToGamePlayerInterface;
public class XStreamGameEngine {
//@SuppressWarnings("resource")
// TODO: refactor constants to resource bundle
XStream serializer = new XStream(new DomDriver());
String currentTurn = "Current Turn: ";
private static FileLoader myLoader = new FileLoader();
private GameListManager myManager = new GameListManager();
private Object loader;
private final ResourceBundle myConfig = ResourceBundle.getBundle("voogasalad_GucciGames.gameData.config.GameData");
public XStreamGameEngine(){
}
public void saveGameInfo(GameInfo game, File file) {
try {
String gameXML = serializer.toXML(game);
myLoader.save(file, gameXML);
myManager.addGame(game.getGameName(), sanitizeGameName(game.getGameName())+ myConfig.getString("GameExtension"));
}
catch (Exception e) {
e.printStackTrace();
}
}
/**
* Saves game in the specified file path.
* @param game
* @param filePath
*/
public void saveGameInfo(GameInfo game, String filePath) {
saveGameInfo(game, new File(filePath));
}
/**
* Saves the GameInfo automatically based on the specified game's name.
* @param game
*/
public void saveGameInfo(GameInfo game){
saveGameInfo(game, new File(gameNameToFileName(game.getGameName())));
}
public void saveGameLoader(Object loader, GameInfo game) {
try {
String gameXML = serializer.toXML(loader);
myLoader.save(new File(gameNameToLoaderName(game.getGameName())), gameXML);
}
catch (Exception e) {
e.printStackTrace();
}
}
public void saveGameState(GamePlayerSave game, File file) {
try {
String gameXML = serializer.toXML(game);
myLoader.save(file, gameXML);
//myManager.addGame(game.getGameName(), sanitizeGameName(game.getGameName())+ myConfig.getString("GameExtension"));
}
catch (Exception e) {
e.printStackTrace();
}
}
public void saveGameState(GamePlayerSave game){
saveGameState(game, new File(gameNameToPathName(game.getInfo().getGameName()) + myConfig.getString("SaveDirectory") + DateFormat.getDateInstance().format(new Date()) +" "+ DateFormat.getTimeInstance().format(new Date()).replace(':', '-') + myConfig.getString("SaveExtension")));
System.out.println("SAVED");
}
public String gameNameToFileName(String name){
return sanitizeGameName(name).append(myConfig.getString("GameExtension")).toString();
}
public String gameNameToLoaderName(String name){
return sanitizeGameName(name).append(myConfig.getString("LoaderExtension")).toString();
}
public String gameNameToPathName(String name) {
return sanitizeGameName(name).append("/").toString();
}
private StringBuilder sanitizeGameName(String name) {
StringBuilder sanitizedName = new StringBuilder();
sanitizedName.append(myConfig.getString("GameStorageLocation")).append(name.replaceAll("[^a-zA-Z0-9\\._]+", "_"));
return sanitizedName;
}
// public GameEngineToGamePlayerInterface loadEngine() {
// return loadEngine("");
// }
public GameInfo loadGameByName(String name){
return loadGameInfo(new File(gameNameToFileName(name)));
}
public GameInfo loadGameInfo(String path) {
// if (path.isEmpty()) {
// path=defaultEngineLocation;
// }
return loadGameInfo(new File(path));
}
private GameInfo loadGameInfo(File file) {
// if (file==null || !file.canRead()) {
// file = new File(defaultEngineLocation);
// }
GameInfo game=null;
try {
List<String> path = Arrays.asList(file.getPath().split(File.separator));
String name=path.get(path.size()-1);
System.out.println("LOADING LOADER FROMMMMMMM "+name);
loader = myLoader.read(new File(gameNameToLoaderName(name)));
//loader stuff
}
catch (Exception e) {
e.printStackTrace();
}
System.out.println("Loading engine.");
try {
String gameXML = myLoader.read(file);
game = (GameInfo) serializer.fromXML(gameXML);
}
catch (Exception e) {
e.printStackTrace();
}
System.out.println("Load complete.");
return game;
}
public GamePlayerSave loadGameState (String saveName, String gameName) {
// TODO Auto-generated method stub
File file = new File(gameNameToPathName(gameName) + myConfig.getString("SaveDirectory") + saveName);
System.out.println("Loading SAVE."+saveName);
GamePlayerSave game=null;
try {
String gameXML = myLoader.read(file);
game = (GamePlayerSave) serializer.fromXML(gameXML);
}
catch (Exception e) {
e.printStackTrace();
}
System.out.println("Load complete.");
return game;
}
}
| src/voogasalad_GucciGames/gameData/XStreamGameEngine.java | package voogasalad_GucciGames.gameData;
import java.io.File;
import java.text.DateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.ResourceBundle;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.DomDriver;
import voogasalad_GucciGames.gameData.wrapper.GameInfo;
import voogasalad_GucciGames.gameData.wrapper.GamePlayerSave;
import voogasalad_GucciGames.gameEngine.GameEngineToGamePlayerInterface;
public class XStreamGameEngine {
//@SuppressWarnings("resource")
// TODO: refactor constants to resource bundle
XStream serializer = new XStream(new DomDriver());
String currentTurn = "Current Turn: ";
private static FileLoader myLoader = new FileLoader();
private GameListManager myManager = new GameListManager();
private Object loader;
private final ResourceBundle myConfig = ResourceBundle.getBundle("voogasalad_GucciGames.gameData.config.GameData");
public XStreamGameEngine(){
}
public void saveGameInfo(GameInfo game, File file) {
try {
String gameXML = serializer.toXML(game);
myLoader.save(file, gameXML);
myManager.addGame(game.getGameName(), sanitizeGameName(game.getGameName())+ myConfig.getString("GameExtension"));
}
catch (Exception e) {
e.printStackTrace();
}
}
/**
* Saves game in the specified file path.
* @param game
* @param filePath
*/
public void saveGameInfo(GameInfo game, String filePath) {
saveGameInfo(game, new File(filePath));
}
/**
* Saves the GameInfo automatically based on the specified game's name.
* @param game
*/
public void saveGameInfo(GameInfo game){
saveGameInfo(game, new File(gameNameToFileName(game.getGameName())));
}
public void saveGameLoader(Object loader, GameInfo game) {
try {
String gameXML = serializer.toXML(loader);
myLoader.save(new File(gameNameToLoaderName(game.getGameName())), gameXML);
}
catch (Exception e) {
e.printStackTrace();
}
}
public void saveGameState(GamePlayerSave game, File file) {
try {
String gameXML = serializer.toXML(game);
myLoader.save(file, gameXML);
//myManager.addGame(game.getGameName(), sanitizeGameName(game.getGameName())+ myConfig.getString("GameExtension"));
}
catch (Exception e) {
e.printStackTrace();
}
}
public void saveGameState(GamePlayerSave game){
saveGameState(game, new File(gameNameToPathName(game.getInfo().getGameName()) + myConfig.getString("SaveDirectory") + DateFormat.getDateInstance().format(new Date()) +" "+ DateFormat.getTimeInstance().format(new Date()).replace(':', '-') + myConfig.getString("SaveExtension")));
System.out.println("SAVED");
}
public String gameNameToFileName(String name){
return sanitizeGameName(name).append(myConfig.getString("GameExtension")).toString();
}
public String gameNameToLoaderName(String name){
return sanitizeGameName(name).append(myConfig.getString("LoaderExtension")).toString();
}
public String gameNameToPathName(String name) {
return sanitizeGameName(name).append("/").toString();
}
private StringBuilder sanitizeGameName(String name) {
StringBuilder sanitizedName = new StringBuilder();
sanitizedName.append(myConfig.getString("GameStorageLocation")).append(name.replaceAll("[^a-zA-Z0-9\\._]+", "_"));
return sanitizedName;
}
// public GameEngineToGamePlayerInterface loadEngine() {
// return loadEngine("");
// }
public GameInfo loadGameByName(String name){
return loadGameInfo(new File(gameNameToFileName(name)));
}
public GameInfo loadGameInfo(String path) {
// if (path.isEmpty()) {
// path=defaultEngineLocation;
// }
return loadGameInfo(new File(path));
}
private GameInfo loadGameInfo(File file) {
// if (file==null || !file.canRead()) {
// file = new File(defaultEngineLocation);
// }
System.out.println("Loading engine.");
GameInfo game=null;
try {
List<String> path = Arrays.asList(file.getPath().split(File.separator));
String name=path.get(path.size()-1);
System.out.println("LOADING LOADER FROMMMMMMM "+name);
loader = myLoader.read(new File(gameNameToLoaderName(name)));
//loader stuff
String gameXML = myLoader.read(file);
game = (GameInfo) serializer.fromXML(gameXML);
}
catch (Exception e) {
e.printStackTrace();
}
System.out.println("Load complete.");
return game;
}
public GamePlayerSave loadGameState (String saveName, String gameName) {
// TODO Auto-generated method stub
File file = new File(gameNameToPathName(gameName) + myConfig.getString("SaveDirectory") + saveName);
System.out.println("Loading SAVE."+saveName);
GamePlayerSave game=null;
try {
String gameXML = myLoader.read(file);
game = (GamePlayerSave) serializer.fromXML(gameXML);
}
catch (Exception e) {
e.printStackTrace();
}
System.out.println("Load complete.");
return game;
}
}
| made loading more robust
| src/voogasalad_GucciGames/gameData/XStreamGameEngine.java | made loading more robust |
|
Java | mit | 8b9b4ea89595b8b49f567a35752e6a6ce1db052f | 0 | seqcode/seqcode-core,seqcode/seqcode-core,seqcode/seqcode-core,seqcode/seqcode-core | package edu.psu.compbio.seqcode.projects.akshay.MultiSeq;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.MultiLogistic.ClassRelationStructure.Node;
import weka.classifiers.AbstractClassifier;
import weka.classifiers.pmml.producer.LogisticProducerHelper;
import weka.core.*;
import weka.core.Capabilities.Capability;
import weka.core.pmml.PMMLProducer;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.NominalToBinary;
import weka.filters.unsupervised.attribute.RemoveUseless;
import weka.filters.unsupervised.attribute.ReplaceMissingValues;
public class MultiLogistic extends AbstractClassifier implements OptionHandler, WeightedInstancesHandler, PMMLProducer{
// Model parameters compatible with a normal Multinomial logit model
/**
*
*/
private static final long serialVersionUID = -6260376163872744102L;
/** The coefficients of the optimized Structured Multinomial logit model. However, this only stores the leaf node parameters*/
protected double[][] m_Par; // [NumPredictors+Intercept][NumClasses]
/** The training data saved as a matrix */
protected double[][] m_Data; // [NumInstances][NumPredictors+Intercept]
/** The number of attributes in the model */
protected int m_NumPredictors;
/** The number of class lables or the number of leaf nodes */
protected int m_NumClasses;
/** The regularization parameter, Is multiplied to the logit part of the loss function */
protected double m_Ridge = 100;
/** The index of the class attribute. Usually the last attribute */
protected int m_ClassIndex;
/** An attribute filter */
private RemoveUseless m_AttFilter;
/** The filter used to make attributes numeric. */
private NominalToBinary m_NominalToBinary;
/** The filter used to get rid of missing values. */
private ReplaceMissingValues m_ReplaceMissingValues;
/** Log-likelihood of the searched model */
protected double m_LL;
/** The maximum number of iterations. */
private int m_MaxIts = -1;
/** Wether to use conjugate gradient descent rather than BFGS updates. */
private boolean m_useConjugateGradientDescent = false;
private Instances m_structure;
// Model parameters of the structured multinomial logit not compatible with the weka logit class
/** All model parameters including the internal non-leaf nodes */
protected double[][] sm_Par; //[NumPredictors+Intercept][NumNodes]
/** Total number of nodes in the structured representation of the classes. Including the root node */
protected int sm_NumNodes;
protected int sm_NumLayers;
/** Class Structure relationships */
public ClassRelationStructure sm_ClassStructure;
// Setters
/**
* Sets whether conjugate gradient descent is used.
*
* @param useConjugateGradientDescent true if CGD is to be used.
*/
public void setUseConjugateGradientDescent(boolean useConjugateGradientDescent) {
m_useConjugateGradientDescent = useConjugateGradientDescent;
}
/**
* Sets the Ridge parameter
* @param ridge
*/
public void setRidge(double ridge){
m_Ridge = ridge;
}
/**
* Sets the maximum nuber of training rounds while lerning
* @param newMaxIts
*/
public void setMaxIts(int newMaxIts){
m_MaxIts = newMaxIts;
}
//Gettors
/**
* Gets whether to use conjugate gradient descent rather than BFGS updates.
*
* @return true if CGD is used
*/
public boolean getUseConjugateGradientDescent() {
return m_useConjugateGradientDescent;
}
/**
* Returns the ridge co-eff of the model
* @return
*/
public double getRidge(){
return m_Ridge;
}
/**
* Return the value of m_MaxIts
* @return
*/
public int getMaxIts(){
return m_MaxIts;
}
/**
* Returns the coefficients of the leaf nodes of the model. The first dimension
* indexes the attributes, and the second the classes.
*
* @return the coefficients for this logistic model
*/
public double[][] coefficients() {
return m_Par;
}
/**
* Returns the coefficients of all the nodes of the trained model
* @return
*/
public double[][] coefficientsFull(){
return sm_Par;
}
/**
* Returns default capabilities of the classifier.
* @return the capabilities of this classifier
*/
@Override
public Capabilities getCapabilities() {
Capabilities result = super.getCapabilities();
result.disableAll();
// attributes
result.enable(Capability.NOMINAL_ATTRIBUTES);
result.enable(Capability.NUMERIC_ATTRIBUTES);
result.enable(Capability.DATE_ATTRIBUTES);
result.enable(Capability.MISSING_VALUES);
// class
result.enable(Capability.NOMINAL_CLASS);
result.enable(Capability.MISSING_CLASS_VALUES);
return result;
}
@Override
public double[] distributionForInstance(Instance instance) throws Exception {
m_ReplaceMissingValues.input(instance);
instance = m_ReplaceMissingValues.output();
m_AttFilter.input(instance);
instance = m_AttFilter.output();
m_NominalToBinary.input(instance);
instance = m_NominalToBinary.output();
// Extract the predictor columns into an array
double[] instDat = new double[m_NumPredictors + 1];
int j = 1;
instDat[0] = 1;
for (int k = 0; k <= m_NumPredictors; k++) {
if (k != m_ClassIndex) {
instDat[j++] = instance.value(k);
}
}
double[] distribution = evaluateProbability(instDat);
return distribution;
}
private double[] evaluateProbability(double[] data) {
double[] prob = new double[m_NumClasses];
for(int i=0; i<m_NumClasses; i++){
double exp = 0.0;
for(int k=0; k<=m_NumPredictors; k++){
exp = exp+m_Par[k][i]*data[k];
}
exp = 1/(1+Math.exp(-1*exp));
prob[i] = exp;
}
// Now normalize (Not sure this is a good idea at the moment)
double sum=0.0;
for(int p=0; p<prob.length; p++){
sum=sum+prob[p];
}
for(int p=0; p<prob.length;p++){
prob[p] = prob[p]/sum;
}
return prob;
}
@Override
public void buildClassifier(Instances train) throws Exception {
// can classifier handle the data?
getCapabilities().testWithFail(train);
// remove instances with missing class
train = new Instances(train);
train.deleteWithMissingClass();
// Replace missing values
m_ReplaceMissingValues = new ReplaceMissingValues();
m_ReplaceMissingValues.setInputFormat(train);
train = Filter.useFilter(train, m_ReplaceMissingValues);
// Remove useless attributes
m_AttFilter = new RemoveUseless();
m_AttFilter.setInputFormat(train);
train = Filter.useFilter(train, m_AttFilter);
// Transform attributes
m_NominalToBinary = new NominalToBinary();
m_NominalToBinary.setInputFormat(train);
train = Filter.useFilter(train, m_NominalToBinary);
// Save the structure for printing the model
m_structure = new Instances(train, 0);
// Extract data
m_ClassIndex = train.classIndex();
m_NumClasses = train.numClasses();
int nK = m_NumClasses; // All K classes needed unlike Weka's logistic framework
int nR = m_NumPredictors = train.numAttributes() - 1; // Minus 1 to remove the class attribute
int nC = train.numInstances();
m_Data = new double[nC][nR + 1]; // Data values
int[] Y = new int[nC]; // Class labels
double[] xMean = new double[nR + 1]; // Attribute means
double[] xSD = new double[nR + 1]; // Attribute stddev's
double[] sY = new double[nK]; // Number of classes
double[] sm_sY = new double[sm_NumNodes]; // Number of instances under each node
double[] weights = new double[nC]; // Weights of instances
double totWeights = 0; // Total weights of the instances
m_Par = new double[nR + 1][nK]; // Optimized parameter values of the leaf nodes
sm_Par = new double[nR+1][sm_NumNodes]; // Optimized parameter values of all the nodes
for (int i = 0; i < nC; i++) {
// initialize X[][]
Instance current = train.instance(i);
Y[i] = (int) current.classValue(); // Class value starts from 0
weights[i] = current.weight(); // Dealing with weights
totWeights += weights[i];
m_Data[i][0] = 1;
int j = 1;
for (int k = 0; k <= nR; k++) {
if (k != m_ClassIndex) {
double x = current.value(k);
m_Data[i][j] = x;
xMean[j] += weights[i] * x;
xSD[j] += weights[i] * x * x;
j++;
}
}
// Class count
sY[Y[i]]++;
}
if ((totWeights <= 1) && (nC > 1)) {
throw new Exception(
"Sum of weights of instances less than 1, please reweight!");
}
xMean[0] = 0;
xSD[0] = 1;
for (int j = 1; j <= nR; j++) {
xMean[j] = xMean[j] / totWeights;
if (totWeights > 1) {
xSD[j] = Math.sqrt(Math.abs(xSD[j] - totWeights * xMean[j] * xMean[j])/ (totWeights - 1));
} else {
xSD[j] = 0;
}
}
// Normalise input data
for (int i = 0; i < nC; i++) {
for (int j = 0; j <= nR; j++) {
if (xSD[j] != 0) {
m_Data[i][j] = (m_Data[i][j] - xMean[j]) / xSD[j];
}
}
}
double x[] = new double[(nR + 1) * nK];
double[][] b = new double[2][x.length]; // Boundary constraints, N/A here
// Fill sm_sY
// First fill the leaf node
for(Node n : sm_ClassStructure.leafs){
sm_sY[n.nodeIndex] = sY[n.nodeIndex];
}
for(int l=1; l<sm_ClassStructure.numLayers; l++){
for(Node n :sm_ClassStructure.layers.get(l)){
for(Integer cind : n.children){
sm_sY[n.nodeIndex] = sm_sY[n.nodeIndex]+sm_sY[cind];
}
}
}
// Initialize
for (int p = 0; p < nK; p++) {
int offset = p * (nR + 1);
x[offset] = Math.log(sY[p] + 1.0) - Math.log(sY[nK] + 1.0); // Null model
b[0][offset] = Double.NaN;
b[1][offset] = Double.NaN;
for (int q = 1; q <= nR; q++) {
x[offset + q] = 0.0;
b[0][offset + q] = Double.NaN;
b[1][offset + q] = Double.NaN;
}
}
double[] sm_x = new double[(nR+1)*sm_NumNodes];
// Initialize the sm_x parameters
for(int p=0; p<sm_NumNodes; p++) {
int offset = p*(nR+1);
sm_x[offset] = Math.log(sm_sY[p] + 1.0) - Math.log(sm_sY[nK] + 1.0); // Null model
for (int q = 1; q <= nR; q++) {
sm_x[offset+q] = 0.0;
}
}
OptObject oO = new OptObject();
oO.setCls(Y);
oO.setWeights(weights);
oO.setsmX(sm_x);
Optimization opt = null;
if (m_useConjugateGradientDescent) {
opt = new OptEngCG(oO);
} else {
opt = new OptEng(oO);
}
if (m_MaxIts == -1) { // Search until convergence
x = opt.findArgmin(x, b);
while (x == null) {
x = opt.getVarbValues();
x = opt.findArgmin(x, b);
}
} else {
opt.setMaxIteration(m_MaxIts);
x = opt.findArgmin(x, b);
if (x == null) {
x = opt.getVarbValues();
}
}
m_LL = -opt.getMinFunction(); // Log-likelihood
// Don't need data matrix anymore
m_Data = null;
// Now update the m_Par and sm_Par with the learned parameters
for (int i = 0; i < nK; i++) {
m_Par[0][i] = x[i * (nR + 1)];
for (int j = 1; j <= nR; j++) {
m_Par[j][i] = x[i * (nR + 1) + j];
if (xSD[j] != 0) {
m_Par[j][i] /= xSD[j];
m_Par[0][i] -= m_Par[j][i] * xMean[j];
}
}
}
oO.updateInternalNodes(x);
//Now update the sm_Par with the learned parameters
for(Node n : sm_ClassStructure.allNodes.values()){
sm_Par[0][n.nodeIndex] = oO.sm_x[n.nodeIndex*(nR+1)];
for(int j=1; j<=nR; j++){
sm_Par[j][n.nodeIndex] = oO.sm_x[n.nodeIndex*(nR+1)+j];
if(xSD[j] != 0 ){
sm_Par[j][n.nodeIndex] /=xSD[j];
sm_Par[0][n.nodeIndex] -= sm_Par[j][n.nodeIndex]*xMean[j];
}
}
}
}
@Override
public String toPMML(Instances train) {
return LogisticProducerHelper.toPMML(train, m_structure, m_Par,
m_NumClasses);
}
public String toString(){
StringBuffer temp = new StringBuffer();
String result = "";
temp.append("Structured MutiLogistic Regression with ridge parameter of " + m_Ridge);
if (sm_Par == null) {
return result + ": No model built yet.";
}
// find longest attribute name
int attLength = 0;
for (int i = 0; i < m_structure.numAttributes(); i++) {
if (i != m_structure.classIndex() && m_structure.attribute(i).name().length() > attLength) {
attLength = m_structure.attribute(i).name().length();
}
}
if ("Intercept".length() > attLength) {
attLength = "Intercept".length();
}
if ("Variable".length() > attLength) {
attLength = "Variable".length();
}
attLength += 2;
int colWidth = 0;
// check length of class names
for (Node n : sm_ClassStructure.allNodes.values()) {
if (n.nodeName.length() > colWidth) {
colWidth = n.nodeName.length();
}
}
// check against coefficients and odds ratios
for (int j = 1; j <= m_NumPredictors; j++) {
for (Node n : sm_ClassStructure.allNodes.values() ) {
if (Utils.doubleToString(sm_Par[j][n.nodeIndex], 12, 4).trim().length() > colWidth) {
colWidth = Utils.doubleToString(sm_Par[j][n.nodeIndex], 12, 4).trim().length();
}
double ORc = Math.exp(m_Par[j][n.nodeIndex]);
String t = " "
+ ((ORc > 1e10) ? "" + ORc : Utils.doubleToString(ORc, 12, 4));
if (t.trim().length() > colWidth) {
colWidth = t.trim().length();
}
}
}
if ("Class".length() > colWidth) {
colWidth = "Class".length();
}
colWidth += 2;
temp.append("\nCoefficients...\n");
temp.append(Utils.padLeft(" ", attLength)
+ Utils.padLeft("Class", colWidth) + "\n");
temp.append(Utils.padRight("Variable", attLength));
for (Node n : sm_ClassStructure.allNodes.values()) {
String className = n.nodeName;
temp.append(Utils.padLeft(className, colWidth));
}
temp.append("\n");
int separatorL = attLength + ((sm_NumNodes) * colWidth);
for (int i = 0; i < separatorL; i++) {
temp.append("=");
}
temp.append("\n");
int j = 1;
for (int i = 0; i < m_structure.numAttributes(); i++) {
if (i != m_structure.classIndex()) {
temp.append(Utils.padRight(m_structure.attribute(i).name(), attLength));
for (Node n : sm_ClassStructure.allNodes.values()) {
temp.append(Utils.padLeft(Utils.doubleToString(sm_Par[j][n.nodeIndex], 12, 4).trim(), colWidth));
}
temp.append("\n");
j++;
}
}
temp.append(Utils.padRight("Intercept", attLength));
for (Node n : sm_ClassStructure.allNodes.values()) {
temp.append(Utils.padLeft(Utils.doubleToString(sm_Par[0][n.nodeIndex], 10, 4).trim(), colWidth));
}
temp.append("\n");
temp.append("\n\nOdds Ratios...\n");
temp.append(Utils.padLeft(" ", attLength)
+ Utils.padLeft("Class", colWidth) + "\n");
temp.append(Utils.padRight("Variable", attLength));
for (Node n: sm_ClassStructure.allNodes.values()) {
String className = n.nodeName;
temp.append(Utils.padLeft(className, colWidth));
}
temp.append("\n");
for (int i = 0; i < separatorL; i++) {
temp.append("=");
}
temp.append("\n");
j = 1;
for (int i = 0; i < m_structure.numAttributes(); i++) {
if (i != m_structure.classIndex()) {
temp.append(Utils.padRight(m_structure.attribute(i).name(), attLength));
for (Node n : sm_ClassStructure.allNodes.values()) {
double ORc = Math.exp(sm_Par[j][n.nodeIndex]);
String ORs = " "+ ((ORc > 1e10) ? "" + ORc : Utils.doubleToString(ORc, 12, 4));
temp.append(Utils.padLeft(ORs.trim(), colWidth));
}
temp.append("\n");
j++;
}
}
return temp.toString();
}
private class OptEng extends Optimization {
OptObject m_oO = null;
private OptEng(OptObject oO) {
m_oO = oO;
}
@Override
protected double objectiveFunction(double[] x) {
return m_oO.objectiveFunction(x);
}
@Override
protected double[] evaluateGradient(double[] x) {
return m_oO.evaluateGradient(x);
}
@Override
public String getRevision() {
return RevisionUtils.extract("$Revision: 11247 $");
}
}
private class OptEngCG extends ConjugateGradientOptimization {
OptObject m_oO = null;
private OptEngCG(OptObject oO) {
m_oO = oO;
}
@Override
protected double objectiveFunction(double[] x) {
return m_oO.objectiveFunction(x);
}
@Override
protected double[] evaluateGradient(double[] x) {
return m_oO.evaluateGradient(x);
}
@Override
public String getRevision() {
return RevisionUtils.extract("$Revision: 11247 $");
}
}
private class OptObject {
/** Weights of the instances */
protected double[] weights;
/** Current values of variable for all the nodes */
/** Updated when evaluage gradient is called */
protected double[] sm_x;
/** Instance class membership */
protected int[] cls;
//Settors
public void setWeights(double[] w){weights = w;}
public void setCls(int[] c){cls=c;}
public void setsmX(double[] smx){sm_x=smx;}
/**
* Fix the leaf node values
* Updates all odd numbered layers
* Update all even numbered layers except the leaf
* (The gradient for the leaf layer is computed in the evaluateGradient code)
* @param x // Leaf layer nodes
*/
protected void updateInternalNodes(double[] x){
int dim = m_NumPredictors+1;
// Copy the current x(leaf params) to sm_x
for(Node l: sm_ClassStructure.leafs){
int offset = l.nodeIndex*dim;
for(int w=0; w<dim;w++){
sm_x[offset+w] = x[offset+w];
}
}
//First update all odd-numbered layrers
for(int l=1; l<sm_ClassStructure.numLayers;l+=2){
for(Node n : sm_ClassStructure.layers.get(l)){// Get nodes in this layer
int offset = n.nodeIndex*dim;
double den = n.parents.size()+n.children.size();
for(int w=0; w<dim; w++){
double num = 0.0;
for(int pind: n.parents){
num = num+sm_x[pind*dim+w];
}
for(int cind: n.children){
num=num+sm_x[cind*dim+w];
}
sm_x[offset+w] = num/den;
}
}
}
// Now update all the even numbered layers except the leaf layer
for(int l=2; l<sm_ClassStructure.numLayers; l+=2){
for(Node n: sm_ClassStructure.layers.get(l)){
int offset = n.nodeIndex*dim;
double den = n.parents.size()+n.children.size();
for(int w=0; w<dim; w++){
double num = 0;
for(int pind : n.parents){
num=num+sm_x[pind*dim+w];
}
for(int cind : n.parents){
num=num+sm_x[cind*dim+w];
}
sm_x[offset+w] = num/den;
}
}
}
}
public double[] evaluateGradient(double[] x){
// Update the internal nodes first
updateInternalNodes(x);
// Class membership indicator variable y
int[][] y = new int[cls.length][m_NumClasses];
for(int i=0; i<cls.length; i++){
for(int j=0; j<m_NumClasses; j++){
if(cls[i] == j)
y[i][j] = 1;
else
y[i][j] = 0;
}
}
double[] grad = new double[x.length];
int dim = m_NumPredictors+1;
for(int m=0; m<cls.length; m++){
for(Node n: sm_ClassStructure.leafs){
int offset = n.nodeIndex*dim;
double exp=0;
for(int i=0; i<dim; i++){
exp=exp+y[m][n.nodeIndex]*sm_x[offset+i]*m_Data[m][i];
}
exp = Math.exp(exp);
for(int i=0; i<dim;i++){
grad[offset+i]= -1*m_Ridge*weights[m]*(1*y[m][n.nodeIndex]*m_Data[m][i])/(1+exp);
}
}
}
// Now add the regularization part
for(Node n : sm_ClassStructure.leafs){
int offset = n.nodeIndex*dim;
for(int i=0; i<dim; i++){
grad[offset+i] = grad[offset+i]+sm_x[offset+i];
for(int pind: n.parents){
grad[offset+i] = grad[offset+i]-sm_x[pind*dim+i];
}
}
}
return grad;
}
public double objectiveFunction(double[] x){
double nll=0.0;
double firstpart=0.0;
double secondpart=0.0;
// Class membership indicator variable y
int[][] y = new int[cls.length][m_NumClasses];
for(int i=0; i<cls.length; i++){
for(int j=0; j<m_NumClasses; j++){
if(cls[i] == j)
y[i][j] = 1;
else
y[i][j] = 0;
}
}
int dim = m_NumPredictors+1;
// update all the internal nodes again
updateInternalNodes(x);
//Calculate the first part
for(Node n: sm_ClassStructure.allNodes.values()){
int offset = n.nodeIndex*dim;
for(Integer pind: n.parents){
int p_offset = pind*dim;
for(int w=0; w<dim; w++){
firstpart=firstpart+(1/2*(sm_x[offset+w]-sm_x[p_offset+w])*(sm_x[offset+w]-sm_x[p_offset+w]));
}
}
}
//Calculate the second part
for(int c=0; c<cls.length;c++){ // Overall all training instances
double exp=0.0;
for(Node l : sm_ClassStructure.leafs){// Over all leaf node
int offset = l.nodeIndex*dim;
for(int d=0; d<dim; d++){
exp = exp + sm_x[offset+d]*m_Data[c][d];
}
exp = exp*y[c][l.nodeIndex]*-1;
exp = Math.log(1+Math.exp(exp));
}
exp=exp*weights[c]*m_Ridge;
secondpart = secondpart +exp;
}
nll = firstpart+secondpart;
return nll;
}
}
protected class ClassRelationStructure implements Serializable{
/**
*
*/
private static final long serialVersionUID = -4392515471314248045L;
protected List<Node> leafs = new ArrayList<Node>();
protected Node root;
protected List<List<Node>> layers = new ArrayList<List<Node>>();
protected Map<Integer,Node> allNodes = new HashMap<Integer,Node>();
protected int numLayers;
public ClassRelationStructure(File structureFile,int nLayers) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(structureFile));
String line;
numLayers=nLayers;
for(int i=0; i<numLayers; i++){
layers.add(new ArrayList<Node>());
}
while ((line = br.readLine()) != null) {
if(!line.startsWith("#")){
String[] pieces = line.split("\t");
if(pieces.length <6){System.err.println("Incorrect class structure format!!!"); System.exit(1);}
Node currNode = new Node(Integer.parseInt(pieces[0]), pieces[1],Integer.parseInt(pieces[2]) == 1? true : false);
// Add parents indexes
if(!currNode.nodeName.contains("Root")){
String[] pInds = pieces[3].split(",");
for(String s : pInds){
currNode.addParent(Integer.parseInt(s));
}
}else{
currNode.isRoot = true;
}
// Add children indexes
if(!currNode.isLeaf){
String[] cInds = pieces[4].split(",");
for(String s : cInds){
currNode.addChild(Integer.parseInt(s));
}
}
allNodes.put(currNode.nodeIndex, currNode);
if(currNode.isRoot)
root = currNode;
if(currNode.isLeaf)
leafs.add(currNode);
layers.get(Integer.parseInt(pieces[5])).add(currNode);
}
}br.close();
}
protected class Node implements Comparable<Node>{
protected int nodeIndex;
protected String nodeName;
protected boolean isLeaf=false;
protected int level;
protected boolean isRoot=false;
protected List<Integer> parents = new ArrayList<Integer>();
protected List<Integer> children = new ArrayList<Integer>();
protected Node(int nInd, String nName, boolean leaf) {
nodeIndex = nInd;
nodeName = nName;
isLeaf = leaf;
}
protected void addParent(Integer pInd){
parents.add(pInd);
}
protected void addChild(Integer cInd){
children.add(cInd);
}
@Override
public int compareTo(Node o) {
if(o.nodeIndex == nodeIndex){return 0;}
else if(nodeIndex > o.nodeIndex){return 1;}
else{return -1;}
}
}
}
@Override
public Enumeration<Option> listOptions() {
Vector<Option> newVector = new Vector<Option>(6);
newVector.addElement(new Option(
"\tUse conjugate gradient descent rather than BFGS updates.", "C", 0,
"-C"));
newVector.addElement(new Option("\tSet the ridge in the log-likelihood.",
"R", 1, "-R <ridge>"));
newVector.addElement(new Option("\tSet the maximum number of iterations"
+ " (default -1, until convergence).", "M", 1, "-M <number>"));
newVector.addElement(new Option("\tProvide the class structure file for the multiclasses","CLS",0,"-CLS <File name>"));
newVector.addElement(new Option("\tProvide the number of layers in the class structur","NL",0,"-NL <Num of Layers>"));
newVector.addAll(Collections.list(super.listOptions()));
return newVector.elements();
}
@Override
public void setOptions(String[] options) throws Exception {
setUseConjugateGradientDescent(Utils.getFlag('C', options));
String ridgeString = Utils.getOption('R', options);
if (ridgeString.length() != 0) {
m_Ridge = Double.parseDouble(ridgeString);
} else {
m_Ridge = 1.0e-8;
}
String maxItsString = Utils.getOption('M', options);
if (maxItsString.length() != 0) {
m_MaxIts = Integer.parseInt(maxItsString);
} else {
m_MaxIts = -1;
}
String numLayerString = Utils.getOption("NL", options);
if(numLayerString == ""){
System.err.println("Please provide number of layers!!");
System.exit(1);
}
sm_NumLayers = Integer.parseInt(numLayerString);
String classStructureFilename = Utils.getOption("CLS", options);
if(classStructureFilename == ""){
System.err.println("Please provide the class structure file!!");
System.exit(1);
}else{
File f = new File(classStructureFilename);
sm_ClassStructure = new ClassRelationStructure(f,sm_NumLayers);
sm_NumNodes = sm_ClassStructure.allNodes.size();
}
super.setOptions(options);
Utils.checkForRemainingOptions(options);
}
@Override
public String[] getOptions() {
Vector<String> options = new Vector<String>();
if (getUseConjugateGradientDescent()) {
options.add("-C");
}
options.add("-R");
options.add("" + m_Ridge);
options.add("-M");
options.add("" + m_MaxIts);
options.add("-CLS");
options.add("-NL");
Collections.addAll(options, super.getOptions());
return options.toArray(new String[0]);
}
/**
* Main method for testing this class.
*
* @param argv should contain the command line arguments to the scheme (see
* Evaluation)
*/
public static void main(String[] argv) {
runClassifier(new MultiLogistic(), argv);
}
}
| src/edu/psu/compbio/seqcode/projects/akshay/MultiSeq/MultiLogistic.java | package edu.psu.compbio.seqcode.projects.akshay.MultiSeq;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.MultiLogistic.ClassRelationStructure.Node;
import weka.classifiers.AbstractClassifier;
import weka.classifiers.pmml.producer.LogisticProducerHelper;
import weka.core.*;
import weka.core.Capabilities.Capability;
import weka.core.pmml.PMMLProducer;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.NominalToBinary;
import weka.filters.unsupervised.attribute.RemoveUseless;
import weka.filters.unsupervised.attribute.ReplaceMissingValues;
@SuppressWarnings("serial")
public class MultiLogistic extends AbstractClassifier implements OptionHandler, WeightedInstancesHandler, PMMLProducer{
// Model parameters compatible with a normal Multinomial logit model
/** The coefficients of the optimized Structured Multinomial logit model. However, this only stores the leaf node parameters*/
protected double[][] m_Par; // [NumPredictors+Intercept][NumClasses]
/** The training data saved as a matrix */
protected double[][] m_Data; // [NumInstances][NumPredictors+Intercept]
/** The number of attributes in the model */
protected int m_NumPredictors;
/** The number of class lables or the number of leaf nodes */
protected int m_NumClasses;
/** The regularization parameter, Is multiplied to the logit part of the loss function */
protected double m_Ridge = 100;
/** The index of the class attribute. Usually the last attribute */
protected int m_ClassIndex;
/** An attribute filter */
private RemoveUseless m_AttFilter;
/** The filter used to make attributes numeric. */
private NominalToBinary m_NominalToBinary;
/** The filter used to get rid of missing values. */
private ReplaceMissingValues m_ReplaceMissingValues;
/** Log-likelihood of the searched model */
protected double m_LL;
/** The maximum number of iterations. */
private int m_MaxIts = -1;
/** Wether to use conjugate gradient descent rather than BFGS updates. */
private boolean m_useConjugateGradientDescent = false;
private Instances m_structure;
// Model parameters of the structured multinomial logit not compatible with the weka logit class
/** All model parameters including the internal non-leaf nodes */
protected double[][] sm_Par; //[NumPredictors+Intercept][NumNodes]
/** Total number of nodes in the structured representation of the classes. Including the root node */
protected int sm_NumNodes;
protected int sm_NumLayers;
/** Class Structure relationships */
public ClassRelationStructure sm_ClassStructure;
// Setters
/**
* Sets whether conjugate gradient descent is used.
*
* @param useConjugateGradientDescent true if CGD is to be used.
*/
public void setUseConjugateGradientDescent(boolean useConjugateGradientDescent) {
m_useConjugateGradientDescent = useConjugateGradientDescent;
}
/**
* Sets the Ridge parameter
* @param ridge
*/
public void setRidge(double ridge){
m_Ridge = ridge;
}
/**
* Sets the maximum nuber of training rounds while lerning
* @param newMaxIts
*/
public void setMaxIts(int newMaxIts){
m_MaxIts = newMaxIts;
}
//Gettors
/**
* Gets whether to use conjugate gradient descent rather than BFGS updates.
*
* @return true if CGD is used
*/
public boolean getUseConjugateGradientDescent() {
return m_useConjugateGradientDescent;
}
/**
* Returns the ridge co-eff of the model
* @return
*/
public double getRidge(){
return m_Ridge;
}
/**
* Return the value of m_MaxIts
* @return
*/
public int getMaxIts(){
return m_MaxIts;
}
/**
* Returns the coefficients of the leaf nodes of the model. The first dimension
* indexes the attributes, and the second the classes.
*
* @return the coefficients for this logistic model
*/
public double[][] coefficients() {
return m_Par;
}
/**
* Returns the coefficients of all the nodes of the trained model
* @return
*/
public double[][] coefficientsFull(){
return sm_Par;
}
/**
* Returns default capabilities of the classifier.
* @return the capabilities of this classifier
*/
@Override
public Capabilities getCapabilities() {
Capabilities result = super.getCapabilities();
result.disableAll();
// attributes
result.enable(Capability.NOMINAL_ATTRIBUTES);
result.enable(Capability.NUMERIC_ATTRIBUTES);
result.enable(Capability.DATE_ATTRIBUTES);
result.enable(Capability.MISSING_VALUES);
// class
result.enable(Capability.NOMINAL_CLASS);
result.enable(Capability.MISSING_CLASS_VALUES);
return result;
}
@Override
public double[] distributionForInstance(Instance instance) throws Exception {
m_ReplaceMissingValues.input(instance);
instance = m_ReplaceMissingValues.output();
m_AttFilter.input(instance);
instance = m_AttFilter.output();
m_NominalToBinary.input(instance);
instance = m_NominalToBinary.output();
// Extract the predictor columns into an array
double[] instDat = new double[m_NumPredictors + 1];
int j = 1;
instDat[0] = 1;
for (int k = 0; k <= m_NumPredictors; k++) {
if (k != m_ClassIndex) {
instDat[j++] = instance.value(k);
}
}
double[] distribution = evaluateProbability(instDat);
return distribution;
}
private double[] evaluateProbability(double[] data) {
double[] prob = new double[m_NumClasses];
for(int i=0; i<m_NumClasses; i++){
double exp = 0.0;
for(int k=0; k<=m_NumPredictors; k++){
exp = exp+m_Par[k][i]*data[k];
}
exp = 1/(1+Math.exp(-1*exp));
prob[i] = exp;
}
// Now normalize (Not sure this is a good idea at the moment)
double sum=0.0;
for(int p=0; p<prob.length; p++){
sum=sum+prob[p];
}
for(int p=0; p<prob.length;p++){
prob[p] = prob[p]/sum;
}
return prob;
}
@Override
public void buildClassifier(Instances train) throws Exception {
// can classifier handle the data?
getCapabilities().testWithFail(train);
// remove instances with missing class
train = new Instances(train);
train.deleteWithMissingClass();
// Replace missing values
m_ReplaceMissingValues = new ReplaceMissingValues();
m_ReplaceMissingValues.setInputFormat(train);
train = Filter.useFilter(train, m_ReplaceMissingValues);
// Remove useless attributes
m_AttFilter = new RemoveUseless();
m_AttFilter.setInputFormat(train);
train = Filter.useFilter(train, m_AttFilter);
// Transform attributes
m_NominalToBinary = new NominalToBinary();
m_NominalToBinary.setInputFormat(train);
train = Filter.useFilter(train, m_NominalToBinary);
// Save the structure for printing the model
m_structure = new Instances(train, 0);
// Extract data
m_ClassIndex = train.classIndex();
m_NumClasses = train.numClasses();
int nK = m_NumClasses; // All K classes needed unlike Weka's logistic framework
int nR = m_NumPredictors = train.numAttributes() - 1; // Minus 1 to remove the class attribute
int nC = train.numInstances();
m_Data = new double[nC][nR + 1]; // Data values
int[] Y = new int[nC]; // Class labels
double[] xMean = new double[nR + 1]; // Attribute means
double[] xSD = new double[nR + 1]; // Attribute stddev's
double[] sY = new double[nK]; // Number of classes
double[] sm_sY = new double[sm_NumNodes]; // Number of instances under each node
double[] weights = new double[nC]; // Weights of instances
double totWeights = 0; // Total weights of the instances
m_Par = new double[nR + 1][nK]; // Optimized parameter values of the leaf nodes
sm_Par = new double[nR+1][sm_NumNodes]; // Optimized parameter values of all the nodes
for (int i = 0; i < nC; i++) {
// initialize X[][]
Instance current = train.instance(i);
Y[i] = (int) current.classValue(); // Class value starts from 0
weights[i] = current.weight(); // Dealing with weights
totWeights += weights[i];
m_Data[i][0] = 1;
int j = 1;
for (int k = 0; k <= nR; k++) {
if (k != m_ClassIndex) {
double x = current.value(k);
m_Data[i][j] = x;
xMean[j] += weights[i] * x;
xSD[j] += weights[i] * x * x;
j++;
}
}
// Class count
sY[Y[i]]++;
}
if ((totWeights <= 1) && (nC > 1)) {
throw new Exception(
"Sum of weights of instances less than 1, please reweight!");
}
xMean[0] = 0;
xSD[0] = 1;
for (int j = 1; j <= nR; j++) {
xMean[j] = xMean[j] / totWeights;
if (totWeights > 1) {
xSD[j] = Math.sqrt(Math.abs(xSD[j] - totWeights * xMean[j] * xMean[j])/ (totWeights - 1));
} else {
xSD[j] = 0;
}
}
// Normalise input data
for (int i = 0; i < nC; i++) {
for (int j = 0; j <= nR; j++) {
if (xSD[j] != 0) {
m_Data[i][j] = (m_Data[i][j] - xMean[j]) / xSD[j];
}
}
}
double x[] = new double[(nR + 1) * nK];
double[][] b = new double[2][x.length]; // Boundary constraints, N/A here
// Fill sm_sY
// First fill the leaf node
for(Node n : sm_ClassStructure.leafs){
sm_sY[n.nodeIndex] = sY[n.nodeIndex];
}
for(int l=1; l<sm_ClassStructure.numLayers; l++){
for(Node n :sm_ClassStructure.layers.get(l)){
for(Integer cind : n.children){
sm_sY[n.nodeIndex] = sm_sY[n.nodeIndex]+sm_sY[cind];
}
}
}
// Initialize
for (int p = 0; p < nK; p++) {
int offset = p * (nR + 1);
x[offset] = Math.log(sY[p] + 1.0) - Math.log(sY[nK] + 1.0); // Null model
b[0][offset] = Double.NaN;
b[1][offset] = Double.NaN;
for (int q = 1; q <= nR; q++) {
x[offset + q] = 0.0;
b[0][offset + q] = Double.NaN;
b[1][offset + q] = Double.NaN;
}
}
double[] sm_x = new double[(nR+1)*sm_NumNodes];
// Initialize the sm_x parameters
for(int p=0; p<sm_NumNodes; p++) {
int offset = p*(nR+1);
sm_x[offset] = Math.log(sm_sY[p] + 1.0) - Math.log(sm_sY[nK] + 1.0); // Null model
for (int q = 1; q <= nR; q++) {
sm_x[offset+q] = 0.0;
}
}
OptObject oO = new OptObject();
oO.setCls(Y);
oO.setWeights(weights);
oO.setsmX(sm_x);
Optimization opt = null;
if (m_useConjugateGradientDescent) {
opt = new OptEngCG(oO);
} else {
opt = new OptEng(oO);
}
if (m_MaxIts == -1) { // Search until convergence
x = opt.findArgmin(x, b);
while (x == null) {
x = opt.getVarbValues();
x = opt.findArgmin(x, b);
}
} else {
opt.setMaxIteration(m_MaxIts);
x = opt.findArgmin(x, b);
if (x == null) {
x = opt.getVarbValues();
}
}
m_LL = -opt.getMinFunction(); // Log-likelihood
// Don't need data matrix anymore
m_Data = null;
// Now update the m_Par and sm_Par with the learned parameters
for (int i = 0; i < nK; i++) {
m_Par[0][i] = x[i * (nR + 1)];
for (int j = 1; j <= nR; j++) {
m_Par[j][i] = x[i * (nR + 1) + j];
if (xSD[j] != 0) {
m_Par[j][i] /= xSD[j];
m_Par[0][i] -= m_Par[j][i] * xMean[j];
}
}
}
oO.updateInternalNodes(x);
//Now update the sm_Par with the learned parameters
for(Node n : sm_ClassStructure.allNodes.values()){
sm_Par[0][n.nodeIndex] = oO.sm_x[n.nodeIndex*(nR+1)];
for(int j=1; j<=nR; j++){
sm_Par[j][n.nodeIndex] = oO.sm_x[n.nodeIndex*(nR+1)+j];
if(xSD[j] != 0 ){
sm_Par[j][n.nodeIndex] /=xSD[j];
sm_Par[0][n.nodeIndex] -= sm_Par[j][n.nodeIndex]*xMean[j];
}
}
}
}
@Override
public String toPMML(Instances train) {
return LogisticProducerHelper.toPMML(train, m_structure, m_Par,
m_NumClasses);
}
public String toString(){
StringBuffer temp = new StringBuffer();
String result = "";
temp.append("Structured MutiLogistic Regression with ridge parameter of " + m_Ridge);
if (sm_Par == null) {
return result + ": No model built yet.";
}
// find longest attribute name
int attLength = 0;
for (int i = 0; i < m_structure.numAttributes(); i++) {
if (i != m_structure.classIndex() && m_structure.attribute(i).name().length() > attLength) {
attLength = m_structure.attribute(i).name().length();
}
}
if ("Intercept".length() > attLength) {
attLength = "Intercept".length();
}
if ("Variable".length() > attLength) {
attLength = "Variable".length();
}
attLength += 2;
int colWidth = 0;
// check length of class names
for (Node n : sm_ClassStructure.allNodes.values()) {
if (n.nodeName.length() > colWidth) {
colWidth = n.nodeName.length();
}
}
// check against coefficients and odds ratios
for (int j = 1; j <= m_NumPredictors; j++) {
for (Node n : sm_ClassStructure.allNodes.values() ) {
if (Utils.doubleToString(sm_Par[j][n.nodeIndex], 12, 4).trim().length() > colWidth) {
colWidth = Utils.doubleToString(sm_Par[j][n.nodeIndex], 12, 4).trim().length();
}
double ORc = Math.exp(m_Par[j][n.nodeIndex]);
String t = " "
+ ((ORc > 1e10) ? "" + ORc : Utils.doubleToString(ORc, 12, 4));
if (t.trim().length() > colWidth) {
colWidth = t.trim().length();
}
}
}
if ("Class".length() > colWidth) {
colWidth = "Class".length();
}
colWidth += 2;
temp.append("\nCoefficients...\n");
temp.append(Utils.padLeft(" ", attLength)
+ Utils.padLeft("Class", colWidth) + "\n");
temp.append(Utils.padRight("Variable", attLength));
for (Node n : sm_ClassStructure.allNodes.values()) {
String className = n.nodeName;
temp.append(Utils.padLeft(className, colWidth));
}
temp.append("\n");
int separatorL = attLength + ((sm_NumNodes) * colWidth);
for (int i = 0; i < separatorL; i++) {
temp.append("=");
}
temp.append("\n");
int j = 1;
for (int i = 0; i < m_structure.numAttributes(); i++) {
if (i != m_structure.classIndex()) {
temp.append(Utils.padRight(m_structure.attribute(i).name(), attLength));
for (Node n : sm_ClassStructure.allNodes.values()) {
temp.append(Utils.padLeft(Utils.doubleToString(sm_Par[j][n.nodeIndex], 12, 4).trim(), colWidth));
}
temp.append("\n");
j++;
}
}
temp.append(Utils.padRight("Intercept", attLength));
for (Node n : sm_ClassStructure.allNodes.values()) {
temp.append(Utils.padLeft(Utils.doubleToString(sm_Par[0][n.nodeIndex], 10, 4).trim(), colWidth));
}
temp.append("\n");
temp.append("\n\nOdds Ratios...\n");
temp.append(Utils.padLeft(" ", attLength)
+ Utils.padLeft("Class", colWidth) + "\n");
temp.append(Utils.padRight("Variable", attLength));
for (Node n: sm_ClassStructure.allNodes.values()) {
String className = n.nodeName;
temp.append(Utils.padLeft(className, colWidth));
}
temp.append("\n");
for (int i = 0; i < separatorL; i++) {
temp.append("=");
}
temp.append("\n");
j = 1;
for (int i = 0; i < m_structure.numAttributes(); i++) {
if (i != m_structure.classIndex()) {
temp.append(Utils.padRight(m_structure.attribute(i).name(), attLength));
for (Node n : sm_ClassStructure.allNodes.values()) {
double ORc = Math.exp(sm_Par[j][n.nodeIndex]);
String ORs = " "+ ((ORc > 1e10) ? "" + ORc : Utils.doubleToString(ORc, 12, 4));
temp.append(Utils.padLeft(ORs.trim(), colWidth));
}
temp.append("\n");
j++;
}
}
return temp.toString();
}
private class OptEng extends Optimization {
OptObject m_oO = null;
private OptEng(OptObject oO) {
m_oO = oO;
}
@Override
protected double objectiveFunction(double[] x) {
return m_oO.objectiveFunction(x);
}
@Override
protected double[] evaluateGradient(double[] x) {
return m_oO.evaluateGradient(x);
}
@Override
public String getRevision() {
return RevisionUtils.extract("$Revision: 11247 $");
}
}
private class OptEngCG extends ConjugateGradientOptimization {
OptObject m_oO = null;
private OptEngCG(OptObject oO) {
m_oO = oO;
}
@Override
protected double objectiveFunction(double[] x) {
return m_oO.objectiveFunction(x);
}
@Override
protected double[] evaluateGradient(double[] x) {
return m_oO.evaluateGradient(x);
}
@Override
public String getRevision() {
return RevisionUtils.extract("$Revision: 11247 $");
}
}
private class OptObject {
/** Weights of the instances */
protected double[] weights;
/** Current values of variable for all the nodes */
/** Updated when evaluage gradient is called */
protected double[] sm_x;
/** Instance class membership */
protected int[] cls;
//Settors
public void setWeights(double[] w){weights = w;}
public void setCls(int[] c){cls=c;}
public void setsmX(double[] smx){sm_x=smx;}
/**
* Fix the leaf node values
* Updates all odd numbered layers
* Update all even numbered layers except the leaf
* (The gradient for the leaf layer is computed in the evaluateGradient code)
* @param x // Leaf layer nodes
*/
protected void updateInternalNodes(double[] x){
int dim = m_NumPredictors+1;
// Copy the current x(leaf params) to sm_x
for(Node l: sm_ClassStructure.leafs){
int offset = l.nodeIndex*dim;
for(int w=0; w<dim;w++){
sm_x[offset+w] = x[offset+w];
}
}
//First update all odd-numbered layrers
for(int l=1; l<sm_ClassStructure.numLayers;l+=2){
for(Node n : sm_ClassStructure.layers.get(l)){// Get nodes in this layer
int offset = n.nodeIndex*dim;
double den = n.parents.size()+n.children.size();
for(int w=0; w<dim; w++){
double num = 0.0;
for(int pind: n.parents){
num = num+sm_x[pind*dim+w];
}
for(int cind: n.children){
num=num+sm_x[cind*dim+w];
}
sm_x[offset+w] = num/den;
}
}
}
// Now update all the even numbered layers except the leaf layer
for(int l=2; l<sm_ClassStructure.numLayers; l+=2){
for(Node n: sm_ClassStructure.layers.get(l)){
int offset = n.nodeIndex*dim;
double den = n.parents.size()+n.children.size();
for(int w=0; w<dim; w++){
double num = 0;
for(int pind : n.parents){
num=num+sm_x[pind*dim+w];
}
for(int cind : n.parents){
num=num+sm_x[cind*dim+w];
}
sm_x[offset+w] = num/den;
}
}
}
}
public double[] evaluateGradient(double[] x){
// Update the internal nodes first
updateInternalNodes(x);
// Class membership indicator variable y
int[][] y = new int[cls.length][m_NumClasses];
for(int i=0; i<cls.length; i++){
for(int j=0; j<m_NumClasses; j++){
if(cls[i] == j)
y[i][j] = 1;
else
y[i][j] = 0;
}
}
double[] grad = new double[x.length];
int dim = m_NumPredictors+1;
for(int m=0; m<cls.length; m++){
for(Node n: sm_ClassStructure.leafs){
int offset = n.nodeIndex*dim;
double exp=0;
for(int i=0; i<dim; i++){
exp=exp+y[m][n.nodeIndex]*sm_x[offset+i]*m_Data[m][i];
}
exp = Math.exp(exp);
for(int i=0; i<dim;i++){
grad[offset+i]= -1*m_Ridge*weights[m]*(1*y[m][n.nodeIndex]*m_Data[m][i])/(1+exp);
}
}
}
// Now add the regularization part
for(Node n : sm_ClassStructure.leafs){
int offset = n.nodeIndex*dim;
for(int i=0; i<dim; i++){
grad[offset+i] = grad[offset+i]+sm_x[offset+i];
for(int pind: n.parents){
grad[offset+i] = grad[offset+i]-sm_x[pind*dim+i];
}
}
}
return grad;
}
public double objectiveFunction(double[] x){
double nll=0.0;
double firstpart=0.0;
double secondpart=0.0;
// Class membership indicator variable y
int[][] y = new int[cls.length][m_NumClasses];
for(int i=0; i<cls.length; i++){
for(int j=0; j<m_NumClasses; j++){
if(cls[i] == j)
y[i][j] = 1;
else
y[i][j] = 0;
}
}
int dim = m_NumPredictors+1;
// update all the internal nodes again
updateInternalNodes(x);
//Calculate the first part
for(Node n: sm_ClassStructure.allNodes.values()){
int offset = n.nodeIndex*dim;
for(Integer pind: n.parents){
int p_offset = pind*dim;
for(int w=0; w<dim; w++){
firstpart=firstpart+(1/2*(sm_x[offset+w]-sm_x[p_offset+w])*(sm_x[offset+w]-sm_x[p_offset+w]));
}
}
}
//Calculate the second part
for(int c=0; c<cls.length;c++){ // Overall all training instances
double exp=0.0;
for(Node l : sm_ClassStructure.leafs){// Over all leaf node
int offset = l.nodeIndex*dim;
for(int d=0; d<dim; d++){
exp = exp + sm_x[offset+d]*m_Data[c][d];
}
exp = exp*y[c][l.nodeIndex]*-1;
exp = Math.log(1+Math.exp(exp));
}
exp=exp*weights[c]*m_Ridge;
secondpart = secondpart +exp;
}
nll = firstpart+secondpart;
return nll;
}
}
protected class ClassRelationStructure{
protected List<Node> leafs = new ArrayList<Node>();
protected Node root;
protected List<List<Node>> layers = new ArrayList<List<Node>>();
protected Map<Integer,Node> allNodes = new HashMap<Integer,Node>();
protected int numLayers;
public ClassRelationStructure(File structureFile,int nLayers) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(structureFile));
String line;
numLayers=nLayers;
for(int i=0; i<numLayers; i++){
layers.add(new ArrayList<Node>());
}
while ((line = br.readLine()) != null) {
if(!line.startsWith("#")){
String[] pieces = line.split("\t");
if(pieces.length <6){System.err.println("Incorrect class structure format!!!"); System.exit(1);}
Node currNode = new Node(Integer.parseInt(pieces[0]), pieces[1],Integer.parseInt(pieces[2]) == 1? true : false);
// Add parents indexes
if(!currNode.nodeName.contains("Root")){
String[] pInds = pieces[3].split("\t");
for(String s : pInds){
currNode.addParent(Integer.parseInt(s));
}
}else{
currNode.isRoot = true;
}
// Add children indexes
if(!currNode.isLeaf){
String[] cInds = pieces[4].split("\t");
for(String s : cInds){
currNode.addChild(Integer.parseInt(s));
}
}
allNodes.put(currNode.nodeIndex, currNode);
if(currNode.isRoot)
root = currNode;
if(currNode.isLeaf)
leafs.add(currNode);
layers.get(Integer.parseInt(pieces[5])).add(currNode);
}
}br.close();
}
protected class Node implements Comparable<Node>{
protected int nodeIndex;
protected String nodeName;
protected boolean isLeaf=false;
protected int level;
protected boolean isRoot=false;
protected List<Integer> parents = new ArrayList<Integer>();
protected List<Integer> children = new ArrayList<Integer>();
protected Node(int nInd, String nName, boolean leaf) {
nodeIndex = nInd;
nodeName = nName;
isLeaf = leaf;
}
protected void addParent(Integer pInd){
parents.add(pInd);
}
protected void addChild(Integer cInd){
children.add(cInd);
}
@Override
public int compareTo(Node o) {
if(o.nodeIndex == nodeIndex){return 0;}
else if(nodeIndex > o.nodeIndex){return 1;}
else{return -1;}
}
}
}
@Override
public Enumeration<Option> listOptions() {
Vector<Option> newVector = new Vector<Option>(6);
newVector.addElement(new Option(
"\tUse conjugate gradient descent rather than BFGS updates.", "C", 0,
"-C"));
newVector.addElement(new Option("\tSet the ridge in the log-likelihood.",
"R", 1, "-R <ridge>"));
newVector.addElement(new Option("\tSet the maximum number of iterations"
+ " (default -1, until convergence).", "M", 1, "-M <number>"));
newVector.addElement(new Option("\tProvide the class structure file for the multiclasses","CLS",0,"-CLS <File name>"));
newVector.addElement(new Option("\tProvide the number of layers in the class structur","NL",0,"-NL <Num of Layers>"));
newVector.addAll(Collections.list(super.listOptions()));
return newVector.elements();
}
@Override
public void setOptions(String[] options) throws Exception {
setUseConjugateGradientDescent(Utils.getFlag('C', options));
String ridgeString = Utils.getOption('R', options);
if (ridgeString.length() != 0) {
m_Ridge = Double.parseDouble(ridgeString);
} else {
m_Ridge = 1.0e-8;
}
String maxItsString = Utils.getOption('M', options);
if (maxItsString.length() != 0) {
m_MaxIts = Integer.parseInt(maxItsString);
} else {
m_MaxIts = -1;
}
String numLayerString = Utils.getOption("NL", options);
if(numLayerString == ""){
System.err.println("Please provide number of layers!!");
System.exit(1);
}
sm_NumLayers = Integer.parseInt(numLayerString);
String classStructureFilename = Utils.getOption("CLS", options);
if(classStructureFilename == ""){
System.err.println("Please provide the class structure file!!");
System.exit(1);
}else{
File f = new File(classStructureFilename);
sm_ClassStructure = new ClassRelationStructure(f,sm_NumLayers);
sm_NumNodes = sm_ClassStructure.allNodes.size();
}
super.setOptions(options);
Utils.checkForRemainingOptions(options);
}
@Override
public String[] getOptions() {
Vector<String> options = new Vector<String>();
if (getUseConjugateGradientDescent()) {
options.add("-C");
}
options.add("-R");
options.add("" + m_Ridge);
options.add("-M");
options.add("" + m_MaxIts);
options.add("-CLS");
options.add("-NL");
Collections.addAll(options, super.getOptions());
return options.toArray(new String[0]);
}
/**
* Main method for testing this class.
*
* @param argv should contain the command line arguments to the scheme (see
* Evaluation)
*/
public static void main(String[] argv) {
runClassifier(new MultiLogistic(), argv);
}
}
| Generated serialVersionUID's MultiLogistic class that implements
Serializable | src/edu/psu/compbio/seqcode/projects/akshay/MultiSeq/MultiLogistic.java | Generated serialVersionUID's MultiLogistic class that implements Serializable |
|
Java | mit | 2f143f498fab9e9af0716a965e8b6a396a73ff36 | 0 | ihongs/HongsCORE.new,ihongs/HongsCORE.new,ihongs/HongsCORE.new,ihongs/HongsCORE.new,ihongs/HongsCORE.new | package app.hongs.action.anno;
import app.hongs.Cnst;
import app.hongs.HongsException;
import app.hongs.action.ActionHelper;
import app.hongs.action.ActionRunner;
import app.hongs.action.FormSet;
import app.hongs.action.SelectHelper;
import app.hongs.util.Synt;
import java.lang.annotation.Annotation;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* 选项补充处理器
* <pre>
* ab 参数含义:
* !enum 表示不需要执行, ab将被重置为.enum
* .enum 表示要选项数据
* _enum 表示加选项文本
* _time 表示加数字时间
* _link 表示加完整链接
* </pre>
* @author Hong
*/
public class SelectInvoker implements FilterInvoker {
@Override
public void invoke(ActionHelper helper, ActionRunner chains, Annotation anno)
throws HongsException {
Select ann = (Select) anno;
String conf = ann.conf();
String form = ann.form();
byte mode = ann.mode();
if (mode == -1) {
Set ab = Synt.toTerms(helper.getRequestData().get(Cnst.AB_KEY));
if (ab != null) {
if (ab.contains("!enum")) {
mode = 0;
} else {
if (ab.contains(".enum")) {
mode += 1;
}
if (ab.contains("_enum")) {
mode += 2;
}
if (ab.contains("_time")) {
mode += 4;
}
if (ab.contains("_link")) {
mode += 8;
}
if (mode >= 0) {
mode += 1;
}
}
}
}
// 为 0 则不执行, 仅取 enum 数据
Map rsp;
if (mode == 0) {
mode = 1;
rsp = new HashMap();
} else
if (mode == -1) {
chains.doAction( );
return;
} else {
chains.doAction( );
rsp = helper.getResponseData( );
if (! Synt.declare(rsp.get("ok"), false)) {
return;
}
}
// 识别路径
if (form.length() == 0) {
form = chains.getEntity();
}
if (conf.length() == 0) {
conf = chains.getModule();
// 照顾 Module Action 的配置规则
if (FormSet.hasConfFile(conf+"/"+form)) {
conf = conf+"/"+form ;
}
}
// 填充数据
try {
Map data = (Map) helper.getAttribute("form:"+conf+"."+form);
if (data == null) {
data = FormSet.getInstance(conf).getForm(form);
}
SelectHelper sup;
sup = new SelectHelper().addItemsByForm(conf, data);
sup.select ( rsp, mode );
} catch (HongsException ex) {
int ec = ex.getErrno();
if (ec != 0x10e8 && ec != 0x10e9 && ec != 0x10ea ) {
throw ex;
}
}
// 返回数据
helper.reply(rsp);
}
}
| hongs-core/src/main/java/app/hongs/action/anno/SelectInvoker.java | package app.hongs.action.anno;
import app.hongs.Cnst;
import app.hongs.HongsException;
import app.hongs.action.ActionHelper;
import app.hongs.action.ActionRunner;
import app.hongs.action.FormSet;
import app.hongs.action.SelectHelper;
import app.hongs.util.Synt;
import java.lang.annotation.Annotation;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* 选项补充处理器
* <pre>
* ab 参数含义:
* :enum 表示不需要执行, ab将被重置为.enum
* .enum 表示要选项数据
* _enum 表示加选项文本
* _time 表示加数字时间
* _link 表示加完整链接
* </pre>
* @author Hong
*/
public class SelectInvoker implements FilterInvoker {
@Override
public void invoke(ActionHelper helper, ActionRunner chains, Annotation anno)
throws HongsException {
Select ann = (Select) anno;
String conf = ann.conf();
String form = ann.form();
byte mode = ann.mode();
if (mode == -1) {
Set ab = Synt.toTerms(helper.getRequestData().get(Cnst.AB_KEY));
if (ab != null) {
if (ab.contains("!enum")) {
mode = 0;
} else {
if (ab.contains(".enum")) {
mode += 1;
}
if (ab.contains("_enum")) {
mode += 2;
}
if (ab.contains("_time")) {
mode += 4;
}
if (ab.contains("_link")) {
mode += 8;
}
if (mode >= 0) {
mode += 1;
}
}
}
}
// 为 0 则不执行, 仅取 enum 数据
Map rsp;
if (mode == 0) {
mode = 1;
rsp = new HashMap();
} else
if (mode == -1) {
chains.doAction( );
return;
} else {
chains.doAction( );
rsp = helper.getResponseData( );
if (! Synt.declare(rsp.get("ok"), false)) {
return;
}
}
// 识别路径
if (form.length() == 0) {
form = chains.getEntity();
}
if (conf.length() == 0) {
conf = chains.getModule();
// 照顾 Module Action 的配置规则
if (FormSet.hasConfFile(conf+"/"+form)) {
conf = conf+"/"+form ;
}
}
// 填充数据
try {
Map data = (Map) helper.getAttribute("form:"+conf+"."+form);
if (data == null) {
data = FormSet.getInstance(conf).getForm(form);
}
SelectHelper sup;
sup = new SelectHelper().addItemsByForm(conf, data);
sup.select ( rsp, mode );
} catch (HongsException ex) {
int ec = ex.getErrno();
if (ec != 0x10e8 && ec != 0x10e9 && ec != 0x10ea ) {
throw ex;
}
}
// 返回数据
helper.reply(rsp);
}
}
| 更改会话参数 | hongs-core/src/main/java/app/hongs/action/anno/SelectInvoker.java | 更改会话参数 |
|
Java | mit | ff3989da1883cd0feb839f280847ee9c3d1fcc4f | 0 | opennars/opennars,opennars/opennars,opennars/opennars,opennars/opennars | /**
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opennars.entity;
import org.opennars.inference.BudgetFunctions;
import org.opennars.io.Symbols;
import org.opennars.io.Texts;
import java.io.Serializable;
import static org.opennars.inference.UtilityFunctions.*;
import org.opennars.main.Parameters;
/**
* A triple of priority (current), durability (decay), and quality (long-term average).
*/
public class BudgetValue implements Cloneable, Serializable {
/** character that marks the two ends of a budget value */
private static final char MARK = Symbols.BUDGET_VALUE_MARK;
/** character that separates the factors in a budget value */
private static final char SEPARATOR = Symbols.VALUE_SEPARATOR;
/** relative share of time resource to be allocated */
private float priority;
/**
* The percent of priority to be kept in a constant period; All priority
* values "decay" over time, though at different rates. Each item is given a
* "durability" factor in (0, 1) to specify the percentage of priority level
* left after each reevaluation
*/
private float durability;
/** overall (context-independent) evaluation */
private float quality;
/** time at which this budget was last forgotten, for calculating accurate memory decay rates */
private long lastForgetTime = -1;
private Parameters narParameters;
public BudgetValue(final float p, final float d, final TruthValue qualityFromTruth, Parameters narParameters) {
this(p, d, BudgetFunctions.truthToQuality(qualityFromTruth), narParameters);
}
/**
* Constructor with initialization
* @param p Initial priority
* @param d Initial durability
* @param q Initial quality
*/
public BudgetValue(final float p, final float d, final float q, Parameters narParameters) {
this.narParameters = narParameters;
priority = p;
durability = d;
quality = q;
if(d>=1.0) {
durability=(float) (1.0-narParameters.TRUTH_EPSILON);
//throw new IllegalStateException("durability value above or equal 1");
}
if(p>1.0) {
priority=1.0f;
//throw new IllegalStateException("priority value above 1");
}
}
/**
* Cloning constructor
* @param v Budget value to be cloned
*/
public BudgetValue(final BudgetValue v) {
this(v.getPriority(), v.getDurability(), v.getQuality(), v.narParameters);
}
/**
* Cloning method
*/
@Override
public BudgetValue clone() {
return new BudgetValue(this.getPriority(), this.getDurability(), this.getQuality(), this.narParameters);
}
/**
* Get priority value
* @return The current priority
*/
public float getPriority() {
return priority;
}
/**
* Change priority value
* @param v The new priority
*/
public final void setPriority(final float v) {
if(v>1.0f) {
throw new IllegalStateException("Priority > 1.0: " + v);
//v=1.0f;
}
priority = v;
}
/**
* Increase priority value by a percentage of the remaining range
* @param v The increasing percent
*/
public void incPriority(final float v) {
setPriority( (float) Math.min(1.0, or(priority, v)));
}
/** AND's (multiplies) priority with another value */
public void andPriority(final float v) {
setPriority( and(priority, v) );
}
/**
* Decrease priority value by a percentage of the remaining range
* @param v The decreasing percent
*/
public void decPriority(final float v) {
setPriority( and(priority, v) );
}
/**
* Get durability value
* @return The current durability
*/
public float getDurability() {
return durability;
}
/**
* Change durability value
* @param d The new durability
*/
public void setDurability(float d) {
if(d>=1.0f) {
d=1.0f-this.narParameters.TRUTH_EPSILON;
}
durability = d;
}
/**
* Increase durability value by a percentage of the remaining range
* @param v The increasing percent
*/
public void incDurability(final float v) {
float durability2 = or(durability, v);
if(durability2>=1.0f) {
durability2=1.0f-this.narParameters.TRUTH_EPSILON; //put into allowed range
}
durability=durability2;
}
/**
* Decrease durability value by a percentage of the remaining range
* @param v The decreasing percent
*/
public void decDurability(final float v) {
durability = and(durability, v);
}
/**
* Get quality value
* @return The current quality
*/
public float getQuality() {
return quality;
}
/**
* Change quality value
* @param v The new quality
*/
public void setQuality(final float v) {
quality = v;
}
/**
* Increase quality value by a percentage of the remaining range
* @param v The increasing percent
*/
public void incQuality(final float v) {
quality = or(quality, v);
}
/**
* Decrease quality value by a percentage of the remaining range
* @param v The decreasing percent
*/
public void decQuality(final float v) {
quality = and(quality, v);
}
/**
* Merge one BudgetValue into another
* @param that The other Budget
*/
public void merge(final BudgetValue that) {
BudgetFunctions.merge(this, that);
}
/**
* @param rhs compared truth value
* @return if this budget is greater in all quantities than another budget,
*/
// used to prevent a merge that would have no consequence
public boolean greaterThan(final BudgetValue rhs) {
return (getPriority() - rhs.getPriority() > this.narParameters.BUDGET_THRESHOLD) &&
(getDurability()- rhs.getDurability()> this.narParameters.BUDGET_THRESHOLD) &&
(getQuality() - rhs.getQuality() > this.narParameters.BUDGET_THRESHOLD);
}
/**
* To summarize a BudgetValue into a single number in [0, 1]
* @return The summary value
*/
public float summary() {
return aveGeo(priority, durability, quality);
}
public boolean equalsByPrecision(final Object that) {
if (that instanceof BudgetValue) {
final BudgetValue t = ((BudgetValue) that);
final float dPrio = Math.abs(getPriority() - t.getPriority());
if (dPrio >= this.narParameters.TRUTH_EPSILON) return false;
final float dDura = Math.abs(getDurability() - t.getDurability());
if (dDura >= this.narParameters.TRUTH_EPSILON) return false;
final float dQual = Math.abs(getQuality() - t.getQuality());
return dQual < this.narParameters.TRUTH_EPSILON;
}
return false;
}
/**
* Whether the budget should get any processing at all
* <p>
* to be revised to depend on how busy the system is
* @return The decision on whether to process the Item
*/
public boolean aboveThreshold() {
return (summary() >= this.narParameters.BUDGET_THRESHOLD);
}
/**
* Fully display the BudgetValue
* @return String representation of the value
*/
@Override
public String toString() {
return MARK + Texts.n4(priority) + SEPARATOR + Texts.n4(durability) + SEPARATOR + Texts.n4(quality) + MARK;
}
/**
* Briefly display the BudgetValue
* @return String representation of the value with 2-digit accuracy
*/
public String toStringExternal() {
//return MARK + priority.toStringBrief() + SEPARATOR + durability.toStringBrief() + SEPARATOR + quality.toStringBrief() + MARK;
final CharSequence priorityString = Texts.n2(priority);
final CharSequence durabilityString = Texts.n2(durability);
final CharSequence qualityString = Texts.n2(quality);
return new StringBuilder(1 + priorityString.length() + 1 + durabilityString.length() + 1 + qualityString.length() + 1)
.append(MARK)
.append(priorityString).append(SEPARATOR)
.append(durabilityString).append(SEPARATOR)
.append(qualityString)
.append(MARK)
.toString();
}
/**
* computes the period and sets the current time to the period
*
* @return period in time: currentTime - lastForgetTime
*/
// TODO< split this into two methods >
public long setLastForgetTime(final long currentTime) {
final long period;
if (this.lastForgetTime == -1)
period = 0;
else
period = currentTime - lastForgetTime;
lastForgetTime = currentTime;
return period;
}
public long getLastForgetTime() {
return lastForgetTime;
}
}
| src/main/java/org/opennars/entity/BudgetValue.java | /**
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opennars.entity;
import org.opennars.inference.BudgetFunctions;
import org.opennars.io.Symbols;
import org.opennars.io.Texts;
import java.io.Serializable;
import static org.opennars.inference.UtilityFunctions.*;
import org.opennars.main.Parameters;
/**
* A triple of priority (current), durability (decay), and quality (long-term average).
*/
public class BudgetValue implements Cloneable, Serializable {
/** The character that marks the two ends of a budget value */
private static final char MARK = Symbols.BUDGET_VALUE_MARK;
/** The character that separates the factors in a budget value */
private static final char SEPARATOR = Symbols.VALUE_SEPARATOR;
/** The relative share of time resource to be allocated */
private float priority;
/**
* The percent of priority to be kept in a constant period; All priority
* values "decay" over time, though at different rates. Each item is given a
* "durability" factor in (0, 1) to specify the percentage of priority level
* left after each reevaluation
*/
private float durability;
/** The overall (context-independent) evaluation */
private float quality;
/** time at which this budget was last forgotten, for calculating accurate memory decay rates */
private long lastForgetTime = -1;
private Parameters narParameters;
public BudgetValue(final float p, final float d, final TruthValue qualityFromTruth, Parameters narParameters) {
this(p, d, BudgetFunctions.truthToQuality(qualityFromTruth), narParameters);
}
/**
* Constructor with initialization
* @param p Initial priority
* @param d Initial durability
* @param q Initial quality
*/
public BudgetValue(final float p, final float d, final float q, Parameters narParameters) {
this.narParameters = narParameters;
priority = p;
durability = d;
quality = q;
if(d>=1.0) {
durability=(float) (1.0-narParameters.TRUTH_EPSILON);
//throw new IllegalStateException("durability value above or equal 1");
}
if(p>1.0) {
priority=1.0f;
//throw new IllegalStateException("priority value above 1");
}
}
/**
* Cloning constructor
* @param v Budget value to be cloned
*/
public BudgetValue(final BudgetValue v) {
this(v.getPriority(), v.getDurability(), v.getQuality(), v.narParameters);
}
/**
* Cloning method
*/
@Override
public BudgetValue clone() {
return new BudgetValue(this.getPriority(), this.getDurability(), this.getQuality(), this.narParameters);
}
/**
* Get priority value
* @return The current priority
*/
public float getPriority() {
return priority;
}
/**
* Change priority value
* @param v The new priority
*/
public final void setPriority(final float v) {
if(v>1.0f) {
throw new IllegalStateException("Priority > 1.0: " + v);
//v=1.0f;
}
priority = v;
}
/**
* Increase priority value by a percentage of the remaining range
* @param v The increasing percent
*/
public void incPriority(final float v) {
setPriority( (float) Math.min(1.0, or(priority, v)));
}
/** AND's (multiplies) priority with another value */
public void andPriority(final float v) {
setPriority( and(priority, v) );
}
/**
* Decrease priority value by a percentage of the remaining range
* @param v The decreasing percent
*/
public void decPriority(final float v) {
setPriority( and(priority, v) );
}
/**
* Get durability value
* @return The current durability
*/
public float getDurability() {
return durability;
}
/**
* Change durability value
* @param d The new durability
*/
public void setDurability(float d) {
if(d>=1.0f) {
d=1.0f-this.narParameters.TRUTH_EPSILON;
}
durability = d;
}
/**
* Increase durability value by a percentage of the remaining range
* @param v The increasing percent
*/
public void incDurability(final float v) {
float durability2 = or(durability, v);
if(durability2>=1.0f) {
durability2=1.0f-this.narParameters.TRUTH_EPSILON; //put into allowed range
}
durability=durability2;
}
/**
* Decrease durability value by a percentage of the remaining range
* @param v The decreasing percent
*/
public void decDurability(final float v) {
durability = and(durability, v);
}
/**
* Get quality value
* @return The current quality
*/
public float getQuality() {
return quality;
}
/**
* Change quality value
* @param v The new quality
*/
public void setQuality(final float v) {
quality = v;
}
/**
* Increase quality value by a percentage of the remaining range
* @param v The increasing percent
*/
public void incQuality(final float v) {
quality = or(quality, v);
}
/**
* Decrease quality value by a percentage of the remaining range
* @param v The decreasing percent
*/
public void decQuality(final float v) {
quality = and(quality, v);
}
/**
* Merge one BudgetValue into another
* @param that The other Budget
*/
public void merge(final BudgetValue that) {
BudgetFunctions.merge(this, that);
}
/**
* @param rhs compared truth value
* @return if this budget is greater in all quantities than another budget,
*/
// used to prevent a merge that would have no consequence
public boolean greaterThan(final BudgetValue rhs) {
return (getPriority() - rhs.getPriority() > this.narParameters.BUDGET_THRESHOLD) &&
(getDurability()- rhs.getDurability()> this.narParameters.BUDGET_THRESHOLD) &&
(getQuality() - rhs.getQuality() > this.narParameters.BUDGET_THRESHOLD);
}
/**
* To summarize a BudgetValue into a single number in [0, 1]
* @return The summary value
*/
public float summary() {
return aveGeo(priority, durability, quality);
}
public boolean equalsByPrecision(final Object that) {
if (that instanceof BudgetValue) {
final BudgetValue t = ((BudgetValue) that);
final float dPrio = Math.abs(getPriority() - t.getPriority());
if (dPrio >= this.narParameters.TRUTH_EPSILON) return false;
final float dDura = Math.abs(getDurability() - t.getDurability());
if (dDura >= this.narParameters.TRUTH_EPSILON) return false;
final float dQual = Math.abs(getQuality() - t.getQuality());
return dQual < this.narParameters.TRUTH_EPSILON;
}
return false;
}
/**
* Whether the budget should get any processing at all
* <p>
* to be revised to depend on how busy the system is
* @return The decision on whether to process the Item
*/
public boolean aboveThreshold() {
return (summary() >= this.narParameters.BUDGET_THRESHOLD);
}
/**
* Fully display the BudgetValue
* @return String representation of the value
*/
@Override
public String toString() {
return MARK + Texts.n4(priority) + SEPARATOR + Texts.n4(durability) + SEPARATOR + Texts.n4(quality) + MARK;
}
/**
* Briefly display the BudgetValue
* @return String representation of the value with 2-digit accuracy
*/
public String toStringExternal() {
//return MARK + priority.toStringBrief() + SEPARATOR + durability.toStringBrief() + SEPARATOR + quality.toStringBrief() + MARK;
final CharSequence priorityString = Texts.n2(priority);
final CharSequence durabilityString = Texts.n2(durability);
final CharSequence qualityString = Texts.n2(quality);
return new StringBuilder(1 + priorityString.length() + 1 + durabilityString.length() + 1 + qualityString.length() + 1)
.append(MARK)
.append(priorityString).append(SEPARATOR)
.append(durabilityString).append(SEPARATOR)
.append(qualityString)
.append(MARK)
.toString();
}
/**
* computes the period and sets the current time to the period
*
* @return period in time: currentTime - lastForgetTime
*/
// TODO< split this into two methods >
public long setLastForgetTime(final long currentTime) {
final long period;
if (this.lastForgetTime == -1)
period = 0;
else
period = currentTime - lastForgetTime;
lastForgetTime = currentTime;
return period;
}
public long getLastForgetTime() {
return lastForgetTime;
}
}
| Docs: removed 'the'
| src/main/java/org/opennars/entity/BudgetValue.java | Docs: removed 'the' |
|
Java | agpl-3.0 | 600bea970d7058348c141f918f669f253ec842b7 | 0 | agentcontest/massim,agentcontest/massim,agentcontest/massim,agentcontest/massim | package massim.scenario.city;
import massim.config.TeamConfig;
import massim.protocol.messagecontent.Action;
import massim.protocol.messagecontent.RequestAction;
import massim.protocol.messagecontent.SimStart;
import massim.protocol.scenario.city.data.ActionData;
import massim.protocol.scenario.city.data.JobData;
import massim.protocol.scenario.city.percept.CityInitialPercept;
import massim.protocol.scenario.city.percept.CityStepPercept;
import massim.scenario.city.data.*;
import massim.scenario.city.data.facilities.*;
import massim.util.IOUtil;
import massim.util.Log;
import massim.util.RNG;
import org.json.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
/**
* (Integration) Testing (important aspects of) the City scenario.
*/
public class CitySimulationTest {
/**
* The shared simulation object (since creating a new one for each test is kind of expensive)
*/
private static CitySimulation sim;
private static int seed = 17;
private static int agentsPerTeam = 30;
private static int steps = 10000;
private static int step = 0;
/**
* Sets up a blank initialized simulation that can be used for all tests.
*/
@BeforeClass
public static void setup() throws IOException {
RNG.initialize(seed);
sim = new CitySimulation();
// create config
JSONObject matchConf = IOUtil.readJSONObject("conf/QuickTest.json").getJSONArray("match").getJSONObject(0);
// setup teams
Set<TeamConfig> teams = new HashSet<>(Arrays.asList(new TeamConfig("A"), new TeamConfig("B")));
for(int i = 1; i <= agentsPerTeam; i++){
for (TeamConfig team : teams) {
team.addAgent("agent" + team.getName() + i, "1");
}
}
Map<String, SimStart> initialPercepts = sim.init(steps, matchConf, teams);
SimStart percept = initialPercepts.get("agentA1");
// Log.log(Log.Level.NORMAL, Conversions.docToString(new Message(System.currentTimeMillis(), percept).toXML(), true));
assert percept instanceof CityInitialPercept;
CityInitialPercept initialPercept = (CityInitialPercept) percept;
assert initialPercept.getItemData().size() > 0;
assert initialPercept.getMapName() != null;
assert !initialPercept.getMapName().equals("");
assert initialPercept.getRoleData() != null;
}
/**
* Logs info before each test
*/
@Before
public void logInfo(){
Log.log(Log.Level.NORMAL, "Current step: " + step);
}
/**
* Make sure each test method uses a new step number.
*/
@After
public void incrementStep(){
step++;
}
@Test
public void actionIsPerceived() {
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("give", "agentA2", "item0", "1"));
sim.preStep(step);
sim.step(step++, actions);
Map<String, RequestAction> percepts = sim.preStep(step);
ActionData action = getPercept("agentA1", percepts).getSelfData().getLastAction();
assert(action.getType().equals("give"));
assert(action.getParams().get(0).equals("agentA2"));
assert(action.getParams().get(1).equals("item0"));
assert(action.getParams().get(2).equals("1"));
assert(action.getResult().equals("failed_counterpart"));
sim.step(step, buildActionMap());
}
/**
* Checks whether most things are included in the percept as expected.
*/
@Test
public void perceptIsComplete(){
// add some perceivable jobs
Storage storage = (Storage) sim.getWorldState().getFacility("storage1");
TeamState teamA = sim.getWorldState().getTeam("A");
Item item = sim.getWorldState().getItemByName("item0");
ItemBox items = new ItemBox();
items.store(item, 3);
Mission mission = new Mission(1000, storage, step + 1, step + 100, 1000, items, teamA, "myMission");
sim.getWorldState().addJob(mission);
AuctionJob auction = new AuctionJob(1001, storage, step + 1, step + 100, items, 2, 10002);
sim.getWorldState().addJob(auction);
Job job = new Job(777, storage, step + 1, step + 100, items, JobData.POSTER_SYSTEM);
sim.getWorldState().addJob(job);
// store something
storage.store(item, 2, "A");
// move agent to resource node
Entity e1 = sim.getWorldState().getEntity("agentA1");
ResourceNode node = (ResourceNode) sim.getWorldState().getFacility("node1");
e1.setLocation(node.getLocation());
//move another entity in perception range
Entity e2 = sim.getWorldState().getEntity("agentA2");
e2.setLocation(e1.getLocation());
// give an item to the agent
e1.addItem(item, 1);
e1.addItem(sim.getWorldState().getItemByName("item1"), 1);
// one step for activating jobs
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
sim.step(step, actions);
step++;
sim.preStep(step);
// let the agent execute an action
actions = buildActionMap();
actions.put("agentA1", new Action("goto", "shop1"));
sim.step(step, actions);
step++;
// one step for getting the final percept(s)
Map<String, RequestAction> percepts = sim.preStep(step);
CityStepPercept percept = (CityStepPercept) percepts.get("agentA1");
// uncomment to print example request-action message
// Log.log(Log.Level.NORMAL, Conversions.docToString(new Message(System.currentTimeMillis(), percept).toXML(), true));
// check if percept contains the important things
assert percept.getShopData().size() > 0;
assert percept.getWorkshops().size() > 0;
assert percept.getDumps().size() > 0;
assert percept.getChargingStations().size() > 0;
assert percept.getStorage().size() > 0;
assert percept.getAuctions().size() > 0;
assert percept.getJobs().size() > 0;
assert percept.getMissions().size() > 0;
assert percept.getEntityData().size() > 0;
assert percept.getSelfData() != null;
assert percept.getSimData() != null;
assert percept.getSelfData().getCharge() == e1.getCurrentBattery();
assert percept.getSelfData().getItems().size() > 0;
sim.step(step, buildActionMap());
}
@Test
public void gotoWorks(){
// determine a shop as goto target
Shop shop = sim.getWorldState().getShops().iterator().next();
// let all agents go somewhere
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("goto",
String.valueOf(shop.getLocation().getLat()), String.valueOf(shop.getLocation().getLon())));
actions.put("agentA2", new Action("goto", "resourceNode1"));
actions.put("agentA3", new Action("goto", shop.getName()));
sim.step(step, actions);
// check results and new locations
assert(sim.getWorldState().getEntity("agentA1").getLocation().equals(shop.getLocation()));
assert(sim.getWorldState().getEntity("agentA2").getLastActionResult().equals("failed_unknown_facility"));
assert(sim.getWorldState().getEntity("agentA3").getLocation().equals(shop.getLocation()));
}
@Test
public void giveReceiveWorks(){
// move one agent to another and give her some item
Entity e4 = sim.getWorldState().getEntity("agentA4");
Entity e5 = sim.getWorldState().getEntity("agentA5");
Item item = sim.getWorldState().getItems().get(0);
e4.setLocation(e5.getLocation());
e4.clearInventory();
e5.clearInventory();
e4.addItem(item, 1);
// assert preconditions
assert(e4.getItemCount(item) == 1);
assert(e5.getItemCount(item) == 0);
// give and receive some items
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA4", new Action("give", "agentA5", item.getName(), "1"));
actions.put("agentA5", new Action("receive"));
sim.step(step, actions);
assert(e4.getItemCount(item) == 0);
assert(e5.getItemCount(item) == 1);
}
@Test
public void storeRetrieveWorks(){
WorldState world = sim.getWorldState();
Entity e2 = world.getEntity("agentA2");
Item item = world.getItems().get(0);
Storage storage = world.getStorages().iterator().next();
e2.setLocation(storage.getLocation());
e2.clearInventory();
e2.addItem(item, 2);
storage.removeStored(item, 100, "A");
// preconditions
assert(storage.getStored(item, "A") == 0);
assert(e2.getItemCount(item) == 2);
// store something
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA2", new Action("store", item.getName(), "1"));
sim.step(step, actions);
assert(storage.getStored(item, "A") == 1);
assert(e2.getItemCount(item) == 1);
step++;
// retrieve it
sim.preStep(step);
actions = buildActionMap();
actions.put("agentA2", new Action("retrieve", item.getName(), "1"));
sim.step(step, actions);
assert(storage.getStored(item, "A") == 0);
assert(e2.getItemCount(item) == 2);
step++;
// retrieve too much
sim.preStep(step);
sim.step(step, actions); // actions can be reused
assert(storage.getStored(item, "A") == 0);
assert(e2.getItemCount(item) == 2);
assert(e2.getLastActionResult().equals("failed_item_amount"));
step++;
// store too much
int fill = storage.getCapacity() / item.getVolume();
storage.store(item, fill, "A");
e2.addItem(item, 1);
int carrying = e2.getItemCount(item);
sim.preStep(step);
actions = buildActionMap();
actions.put("agentA2", new Action("store", item.getName(), "1"));
sim.step(step, actions);
assert(storage.getStored(item, "A") == fill);
assert(e2.getItemCount(item) == carrying);
assert(e2.getLastActionResult().equals("failed_capacity"));
}
@Test
public void assembleWorks(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
Entity e2 = world.getEntity("agentA2");
Entity e3 = world.getEntity("agentA3");
Entity e4 = world.getEntity("agentA20");
Workshop workshop = world.getWorkshops().iterator().next();
Optional<Item> optItem = world.getItems().stream() // find item that needs roles and materials
.filter(item -> item.getRequiredItems().size() > 1 && item.getRequiredRoles().size() > 0)
.findAny();
assert optItem.isPresent();
Item item = optItem.get();
e1.clearInventory();
e1.setLocation(workshop.getLocation());
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("assemble", item.getName()));
for (int i = 2; i < 30; i++){
String agName = "agentA" + i;
Entity ent = world.getEntity(agName);
ent.clearInventory();
ent.setLocation(workshop.getLocation());
actions.put(agName, new Action("assist_assemble", "agentA1"));
}
Entity[] assistants = new Entity[]{e2, e3, e4};
List<Item> requiredItems = new ArrayList<>(item.getRequiredItems());
e1.addItem(requiredItems.get(0), 1);
for(int i = 1; i < requiredItems.size(); i++)
assistants[i%assistants.length].addItem(requiredItems.get(i), 1);
// check assembly
sim.preStep(step);
sim.step(step, actions);
assert e1.getLastActionResult().equals("successful");
assert e2.getLastActionResult().equals("successful");
assert e1.getItemCount(item) == 1;
item.getRequiredItems().forEach(req -> {
assert e1.getItemCount(req) == 0;
assert e2.getItemCount(req) == 0;
});
}
@Test
public void dumpWorks(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
Dump dump = world.getDumps().iterator().next();
Item item = world.getItems().get(0);
e1.clearInventory();
e1.addItem(item, 7);
e1.setLocation(dump.getLocation());
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("dump", item.getName(), "4"));
sim.step(step, actions);
assert(e1.getItemCount(item) == 3);
}
@Test
public void chargeWorks(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
ChargingStation station = world.getChargingStations().iterator().next();
e1.discharge();
e1.setLocation(station.getLocation());
assert e1.getCurrentBattery() == 0;
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("charge"));
sim.step(step, actions);
assert e1.getCurrentBattery() == Math.min(e1.getRole().getMaxBattery(), station.getRate());
}
@Test
public void rechargeWorks(){
WorldState world = sim.getWorldState();
Entity e2 = world.getEntity("agentA2");
Shop shop = world.getShops().iterator().next();
e2.setLocation(shop.getLocation()); // make sure the agent is not in a charging station
e2.discharge();
assert e2.getCurrentBattery() == 0;
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA2", new Action("recharge"));
sim.step(step, actions);
assert e2.getCurrentBattery() > 0; // actual amount is unpredictable
}
@Test
public void gatherWorks(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
ResourceNode node = world.getResourceNodes().iterator().next();
Item item = node.getResource();
e1.clearInventory();
e1.setLocation(node.getLocation());
assert e1.getItemCount(item) == 0;
// check if the agent gathers at least once in 10 steps
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("gather"));
for(int i = 0; i < 10; i++){
sim.preStep(step);
sim.step(step, actions);
step++;
}
assert e1.getItemCount(item) > 0;
}
@Test
public void jobActionsWork(){
Map<String, Action> actions = buildActionMap();
WorldState world = sim.getWorldState();
Storage storage = world.getStorages().iterator().next();
Entity eA = world.getEntity("agentA1");
Entity eB = world.getEntity("agentB1");
Item item = world.getItems().get(0);
long moneyA = world.getTeam("A").getMassium();
long moneyB = world.getTeam("B").getMassium();
int reward = 77777;
ItemBox requirements = new ItemBox();
requirements.store(item, 5);
eA.clearInventory();
eB.clearInventory();
eA.setLocation(storage.getLocation());
eB.setLocation(storage.getLocation());
eA.addItem(item, 3);
eB.addItem(item, 1);
storage.removeDelivered(item, 10000, "A");
storage.removeDelivered(item, 10000, "B");
Job job = new Job(reward, storage, step + 1, step + 4, requirements, JobData.POSTER_SYSTEM);
world.addJob(job);
// activate job
sim.preStep(step);
sim.step(step++, actions);
// check delivering (partial)
actions = buildActionMap();
actions.put("agentA1", new Action("deliver_job", job.getName()));
actions.put("agentB1", new Action("deliver_job", job.getName()));
sim.preStep(step);
sim.step(step++, actions);
assert eA.getItemCount(item) == 0;
assert eB.getItemCount(item) == 0;
assert eA.getLastActionResult().equals("successful_partial");
assert eB.getLastActionResult().equals("successful_partial");
// check completion
eA.addItem(item, 3);
sim.preStep(step);
sim.step(step, actions);
assert eA.getItemCount(item) == 1;
assert eA.getLastActionResult().equals("successful");
assert world.getTeam("A").getMassium() == moneyA + reward;
assert storage.getDelivered(item, "A") == 0;
assert storage.getDelivered(item, "B") == 1;
step++;
// retrieve delivery
actions = buildActionMap();
actions.put("agentB1", new Action("retrieve_delivered", item.getName(), "1"));
sim.preStep(step);
sim.step(step, actions);
assert eB.getItemCount(item) == 1;
}
@Test
public void bidWorks(){
WorldState world = sim.getWorldState();
Storage storage = world.getStorages().iterator().next();
Item item = world.getItems().get(0);
ItemBox itemsRequired = new ItemBox();
itemsRequired.store(item, 1);
AuctionJob auction = new AuctionJob(999, storage, step + 1, step + 4, itemsRequired, 2, 888);
AuctionJob auction2 = new AuctionJob(999, storage, step + 1, step + 4, itemsRequired, 2, 888);
world.addJob(auction);
world.addJob(auction2);
long moneyA = world.getTeam("A").getMassium();
long moneyB = world.getTeam("B").getMassium();
Entity eB = world.getEntity("agentB1");
Map<String, Action> actions = buildActionMap();
sim.preStep(step);
sim.step(step, actions); // let auctions get names and be registered
step++;
actions.put("agentA1", new Action("bid_for_job", auction.getName(), "1000"));
actions.put("agentB1", new Action("bid_for_job", auction2.getName(), "998"));
sim.preStep(step);
sim.step(step, actions);
assert auction.getLowestBid() == null;
assert auction2.getLowestBid() == 998;
step++;
actions = buildActionMap();
actions.put("agentA1", new Action("bid_for_job", auction.getName(), "778"));
sim.preStep(step);
sim.step(step, actions);
assert auction.getLowestBid() == 778;
step++;
// complete auction for team B
eB.addItem(item, 1);
eB.setLocation(storage.getLocation());
actions = buildActionMap();
actions.put("agentB1", new Action("deliver_job", auction2.getName()));
sim.preStep(step);
sim.step(step, actions);
assert eB.getLastActionResult().equals("successful");
step++;
// check if team A paid the fine and B got the reward
sim.preStep(step);
sim.step(step, buildActionMap());
assert world.getTeam("A").getMassium() == moneyA - auction.getFine();
assert world.getTeam("B").getMassium() == moneyB + auction2.getLowestBid();
}
// TODO test facility creation/generation
@Test
public void chargingStationsWork(){
//there is at least one charging station in the simulation
assert !sim.getWorldState().getChargingStations().isEmpty();
}
@Test
public void shopsWork(){
WorldState world = sim.getWorldState();
Item resource = world.getResources().get(0);
Item assembledItem = world.getAssembledItems().get(0);
Shop shop = world.getShops().get(0);
Entity agentA1 = world.getEntity("agentA1");
Entity agentB1 = world.getEntity("agentB1");
agentA1.setLocation(shop.getLocation());
agentB1.setLocation(shop.getLocation());
agentA1.clearInventory();
agentB1.clearInventory();
agentA1.addItem(resource, 3);
agentB1.addItem(assembledItem, 3);
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("trade", resource.getName(), "2"));
actions.put("agentB1", new Action("trade", assembledItem.getName(), "2"));
sim.preStep(step);
sim.step(step++, actions);
assert agentA1.getItemCount(resource) == 3;
assert agentB1.getItemCount(assembledItem) == 1;
}
@Test
public void dumpsWork(){
//there is at least one dump in the simulation
assert !sim.getWorldState().getDumps().isEmpty();
}
@Test
public void workshopsWork(){
//there is at least one workshop in the simulation
assert !sim.getWorldState().getWorkshops().isEmpty();
}
@Test
public void storageWork(){
//there is at least one storage in the simulation
assert !sim.getWorldState().getStorages().isEmpty();
}
@Test
public void resourceNodesWork(){
// TODO
}
// @Test
// public void jobsWork(){
// WorldState world = sim.getWorldState();
// Set<Job> jobs = new HashSet<>();
// for(int i=0; i<20; i++){
// jobs = world.getGenerator().generateJobs(i,world);
// if(!jobs.isEmpty()){
// break;
// }
// }
// if(!jobs.isEmpty()){
// Job job = jobs.iterator().next();
// assert !job.getRequiredItems().getStoredTypes().isEmpty();
// assert job.getReward()>0;
// }
// }
@Test
public void stuckAgentsAreRescued(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
e1.setLocation(new Location(2.34953, 48.86091));
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("goto", "shop3"));
sim.step(step, actions);
assert e1.getLastActionResult().equals(ActionExecutor.FAILED_NO_ROUTE);
sim.preStep(step);
actions.put("agentA1", new Action("goto", "shop3"));
sim.step(step, actions);
assert !e1.getLastActionResult().equals(ActionExecutor.FAILED_NO_ROUTE);
}
/**
* @return a new action-map where each agent just skips
*/
private static Map<String, Action> buildActionMap(){
return sim.getWorldState().getAgents().stream()
.collect(Collectors.toMap(ag -> ag, ag -> new Action("skip")));
}
/**
* Checks if a request action contains the correct percept and returns it.
* @param agent name of an agent
* @param messages all req act messages received in preStep
* @return the req act message of the agent cast to the correct percept
*/
private static CityStepPercept getPercept(String agent, Map<String, RequestAction> messages){
RequestAction reqAct = messages.get(agent);
assert(reqAct != null && reqAct instanceof CityStepPercept);
return (CityStepPercept) reqAct;
}
} | server/src/test/java/massim/scenario/city/CitySimulationTest.java | package massim.scenario.city;
import massim.config.TeamConfig;
import massim.protocol.messagecontent.Action;
import massim.protocol.messagecontent.RequestAction;
import massim.protocol.messagecontent.SimStart;
import massim.protocol.scenario.city.data.ActionData;
import massim.protocol.scenario.city.data.JobData;
import massim.protocol.scenario.city.percept.CityInitialPercept;
import massim.protocol.scenario.city.percept.CityStepPercept;
import massim.scenario.city.data.*;
import massim.scenario.city.data.facilities.*;
import massim.util.IOUtil;
import massim.util.Log;
import massim.util.RNG;
import org.json.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
/**
* (Integration) Testing (important aspects of) the City scenario.
*/
public class CitySimulationTest {
/**
* The shared simulation object (since creating a new one for each test is kind of expensive)
*/
private static CitySimulation sim;
private static int seed = 17;
private static int agentsPerTeam = 30;
private static int steps = 10000;
private static int step = 0;
/**
* Sets up a blank initialized simulation that can be used for all tests.
*/
@BeforeClass
public static void setup() throws IOException {
RNG.initialize(seed);
sim = new CitySimulation();
// create config
JSONObject matchConf = IOUtil.readJSONObject("conf/QuickTest.json").getJSONArray("match").getJSONObject(0);
// setup teams
Set<TeamConfig> teams = new HashSet<>(Arrays.asList(new TeamConfig("A"), new TeamConfig("B")));
for(int i = 1; i <= agentsPerTeam; i++){
for (TeamConfig team : teams) {
team.addAgent("agent" + team.getName() + i, "1");
}
}
Map<String, SimStart> initialPercepts = sim.init(steps, matchConf, teams);
SimStart percept = initialPercepts.get("agentA1");
// Log.log(Log.Level.NORMAL, Conversions.docToString(new Message(System.currentTimeMillis(), percept).toXML(), true));
assert percept instanceof CityInitialPercept;
CityInitialPercept initialPercept = (CityInitialPercept) percept;
assert initialPercept.getItemData().size() > 0;
assert initialPercept.getMapName() != null;
assert !initialPercept.getMapName().equals("");
assert initialPercept.getRoleData() != null;
}
/**
* Logs info before each test
*/
@Before
public void logInfo(){
Log.log(Log.Level.NORMAL, "Current step: " + step);
}
/**
* Make sure each test method uses a new step number.
*/
@After
public void incrementStep(){
step++;
}
@Test
public void actionIsPerceived() {
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("give", "agentA2", "item0", "1"));
sim.preStep(step);
sim.step(step++, actions);
Map<String, RequestAction> percepts = sim.preStep(step);
ActionData action = getPercept("agentA1", percepts).getSelfData().getLastAction();
assert(action.getType().equals("give"));
assert(action.getParams().get(0).equals("agentA2"));
assert(action.getParams().get(1).equals("item0"));
assert(action.getParams().get(2).equals("1"));
assert(action.getResult().equals("failed_counterpart"));
sim.step(step, buildActionMap());
}
/**
* Checks whether most things are included in the percept as expected.
*/
@Test
public void perceptIsComplete(){
// add some perceivable jobs
Storage storage = (Storage) sim.getWorldState().getFacility("storage1");
TeamState teamA = sim.getWorldState().getTeam("A");
Item item = sim.getWorldState().getItemByName("item0");
ItemBox items = new ItemBox();
items.store(item, 3);
Mission mission = new Mission(1000, storage, step + 1, step + 100, 1000, items, teamA, "myMission");
sim.getWorldState().addJob(mission);
AuctionJob auction = new AuctionJob(1001, storage, step + 1, step + 100, items, 2, 10002);
sim.getWorldState().addJob(auction);
Job job = new Job(777, storage, step + 1, step + 100, items, JobData.POSTER_SYSTEM);
sim.getWorldState().addJob(job);
// store something
storage.store(item, 2, "A");
// move agent to resource node
Entity e1 = sim.getWorldState().getEntity("agentA1");
ResourceNode node = (ResourceNode) sim.getWorldState().getFacility("node1");
e1.setLocation(node.getLocation());
//move another entity in perception range
Entity e2 = sim.getWorldState().getEntity("agentA2");
e2.setLocation(e1.getLocation());
// give an item to the agent
e1.addItem(item, 1);
e1.addItem(sim.getWorldState().getItemByName("item1"), 1);
// one step for activating jobs
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
sim.step(step, actions);
step++;
sim.preStep(step);
// let the agent execute an action
actions = buildActionMap();
actions.put("agentA1", new Action("goto", "shop1"));
sim.step(step, actions);
step++;
// one step for getting the final percept(s)
Map<String, RequestAction> percepts = sim.preStep(step);
CityStepPercept percept = (CityStepPercept) percepts.get("agentA1");
// uncomment to print example request-action message
// Log.log(Log.Level.NORMAL, Conversions.docToString(new Message(System.currentTimeMillis(), percept).toXML(), true));
// check if percept contains the important things
assert percept.getShopData().size() > 0;
assert percept.getWorkshops().size() > 0;
assert percept.getDumps().size() > 0;
assert percept.getChargingStations().size() > 0;
assert percept.getStorage().size() > 0;
assert percept.getAuctions().size() > 0;
assert percept.getJobs().size() > 0;
assert percept.getMissions().size() > 0;
assert percept.getEntityData().size() > 0;
assert percept.getSelfData() != null;
assert percept.getSimData() != null;
assert percept.getSelfData().getCharge() == e1.getCurrentBattery();
assert percept.getSelfData().getItems().size() > 0;
sim.step(step, buildActionMap());
}
@Test
public void gotoWorks(){
// determine a shop as goto target
Shop shop = sim.getWorldState().getShops().iterator().next();
// let all agents go somewhere
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("goto",
String.valueOf(shop.getLocation().getLat()), String.valueOf(shop.getLocation().getLon())));
actions.put("agentA2", new Action("goto", "resourceNode1"));
actions.put("agentA3", new Action("goto", shop.getName()));
sim.step(step, actions);
// check results and new locations
assert(sim.getWorldState().getEntity("agentA1").getLocation().equals(shop.getLocation()));
assert(sim.getWorldState().getEntity("agentA2").getLastActionResult().equals("failed_unknown_facility"));
assert(sim.getWorldState().getEntity("agentA3").getLocation().equals(shop.getLocation()));
}
@Test
public void giveReceiveWorks(){
// move one agent to another and give her some item
Entity e4 = sim.getWorldState().getEntity("agentA4");
Entity e5 = sim.getWorldState().getEntity("agentA5");
Item item = sim.getWorldState().getItems().get(0);
e4.setLocation(e5.getLocation());
e4.clearInventory();
e5.clearInventory();
e4.addItem(item, 1);
// assert preconditions
assert(e4.getItemCount(item) == 1);
assert(e5.getItemCount(item) == 0);
// give and receive some items
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA4", new Action("give", "agentA5", item.getName(), "1"));
actions.put("agentA5", new Action("receive"));
sim.step(step, actions);
assert(e4.getItemCount(item) == 0);
assert(e5.getItemCount(item) == 1);
}
@Test
public void storeRetrieveWorks(){
WorldState world = sim.getWorldState();
Entity e2 = world.getEntity("agentA2");
Item item = world.getItems().get(0);
Storage storage = world.getStorages().iterator().next();
e2.setLocation(storage.getLocation());
e2.clearInventory();
e2.addItem(item, 2);
storage.removeStored(item, 100, "A");
// preconditions
assert(storage.getStored(item, "A") == 0);
assert(e2.getItemCount(item) == 2);
// store something
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA2", new Action("store", item.getName(), "1"));
sim.step(step, actions);
assert(storage.getStored(item, "A") == 1);
assert(e2.getItemCount(item) == 1);
step++;
// retrieve it
sim.preStep(step);
actions = buildActionMap();
actions.put("agentA2", new Action("retrieve", item.getName(), "1"));
sim.step(step, actions);
assert(storage.getStored(item, "A") == 0);
assert(e2.getItemCount(item) == 2);
step++;
// retrieve too much
sim.preStep(step);
sim.step(step, actions); // actions can be reused
assert(storage.getStored(item, "A") == 0);
assert(e2.getItemCount(item) == 2);
assert(e2.getLastActionResult().equals("failed_item_amount"));
step++;
// store too much
int fill = storage.getCapacity() / item.getVolume();
storage.store(item, fill, "A");
e2.addItem(item, 1);
int carrying = e2.getItemCount(item);
sim.preStep(step);
actions = buildActionMap();
actions.put("agentA2", new Action("store", item.getName(), "1"));
sim.step(step, actions);
assert(storage.getStored(item, "A") == fill);
assert(e2.getItemCount(item) == carrying);
assert(e2.getLastActionResult().equals("failed_capacity"));
}
@Test
public void assembleWorks(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
Entity e2 = world.getEntity("agentA2");
Entity e3 = world.getEntity("agentA3");
Entity e4 = world.getEntity("agentA20");
Workshop workshop = world.getWorkshops().iterator().next();
Optional<Item> optItem = world.getItems().stream() // find item that needs roles and materials
.filter(item -> item.getRequiredItems().size() > 1 && item.getRequiredRoles().size() > 0)
.findAny();
assert optItem.isPresent();
Item item = optItem.get();
e1.clearInventory();
e1.setLocation(workshop.getLocation());
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("assemble", item.getName()));
for (int i = 2; i < 30; i++){
String agName = "agentA" + i;
Entity ent = world.getEntity(agName);
ent.clearInventory();
ent.setLocation(workshop.getLocation());
actions.put(agName, new Action("assist_assemble", "agentA1"));
}
Entity[] assistants = new Entity[]{e2, e3, e4};
List<Item> requiredItems = new ArrayList<>(item.getRequiredItems());
e1.addItem(requiredItems.get(0), 1);
for(int i = 1; i < requiredItems.size(); i++)
assistants[i%assistants.length].addItem(requiredItems.get(i), 1);
// check assembly
sim.preStep(step);
sim.step(step, actions);
assert e1.getLastActionResult().equals("successful");
assert e2.getLastActionResult().equals("successful");
assert e1.getItemCount(item) == 1;
item.getRequiredItems().forEach(req -> {
assert e1.getItemCount(req) == 0;
assert e2.getItemCount(req) == 0;
});
}
@Test
public void dumpWorks(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
Dump dump = world.getDumps().iterator().next();
Item item = world.getItems().get(0);
e1.clearInventory();
e1.addItem(item, 7);
e1.setLocation(dump.getLocation());
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("dump", item.getName(), "4"));
sim.step(step, actions);
assert(e1.getItemCount(item) == 3);
}
@Test
public void chargeWorks(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
ChargingStation station = world.getChargingStations().iterator().next();
e1.discharge();
e1.setLocation(station.getLocation());
assert e1.getCurrentBattery() == 0;
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("charge"));
sim.step(step, actions);
assert e1.getCurrentBattery() == Math.min(e1.getRole().getMaxBattery(), station.getRate());
}
@Test
public void rechargeWorks(){
WorldState world = sim.getWorldState();
Entity e2 = world.getEntity("agentA2");
Shop shop = world.getShops().iterator().next();
e2.setLocation(shop.getLocation()); // make sure the agent is not in a charging station
e2.discharge();
assert e2.getCurrentBattery() == 0;
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA2", new Action("recharge"));
sim.step(step, actions);
assert e2.getCurrentBattery() > 0; // actual amount is unpredictable
}
@Test
public void gatherWorks(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
ResourceNode node = world.getResourceNodes().iterator().next();
Item item = node.getResource();
e1.clearInventory();
e1.setLocation(node.getLocation());
assert e1.getItemCount(item) == 0;
// check if the agent gathers at least once in 10 steps
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("gather"));
for(int i = 0; i < 10; i++){
sim.preStep(step);
sim.step(step, actions);
step++;
}
assert e1.getItemCount(item) > 0;
}
@Test
public void jobActionsWork(){
Map<String, Action> actions = buildActionMap();
WorldState world = sim.getWorldState();
Storage storage = world.getStorages().iterator().next();
Entity eA = world.getEntity("agentA1");
Entity eB = world.getEntity("agentB1");
Item item = world.getItems().get(0);
long moneyA = world.getTeam("A").getMassium();
long moneyB = world.getTeam("B").getMassium();
int reward = 77777;
ItemBox requirements = new ItemBox();
requirements.store(item, 5);
eA.clearInventory();
eB.clearInventory();
eA.setLocation(storage.getLocation());
eB.setLocation(storage.getLocation());
eA.addItem(item, 3);
eB.addItem(item, 1);
storage.removeDelivered(item, 10000, "A");
storage.removeDelivered(item, 10000, "B");
Job job = new Job(reward, storage, step + 1, step + 4, requirements, JobData.POSTER_SYSTEM);
world.addJob(job);
// activate job
sim.preStep(step);
sim.step(step++, actions);
// check delivering (partial)
actions = buildActionMap();
actions.put("agentA1", new Action("deliver_job", job.getName()));
actions.put("agentB1", new Action("deliver_job", job.getName()));
sim.preStep(step);
sim.step(step++, actions);
assert eA.getItemCount(item) == 0;
assert eB.getItemCount(item) == 0;
assert eA.getLastActionResult().equals("successful_partial");
assert eB.getLastActionResult().equals("successful_partial");
// check completion
eA.addItem(item, 3);
sim.preStep(step);
sim.step(step, actions);
assert eA.getItemCount(item) == 1;
assert eA.getLastActionResult().equals("successful");
assert world.getTeam("A").getMassium() == moneyA + reward;
assert storage.getDelivered(item, "A") == 0;
assert storage.getDelivered(item, "B") == 1;
step++;
// retrieve delivery
actions = buildActionMap();
actions.put("agentB1", new Action("retrieve_delivered", item.getName(), "1"));
sim.preStep(step);
sim.step(step, actions);
assert eB.getItemCount(item) == 1;
}
@Test
public void bidWorks(){
WorldState world = sim.getWorldState();
Storage storage = world.getStorages().iterator().next();
Item item = world.getItems().get(0);
ItemBox itemsRequired = new ItemBox();
itemsRequired.store(item, 1);
AuctionJob auction = new AuctionJob(999, storage, step + 1, step + 4, itemsRequired, 2, 888);
AuctionJob auction2 = new AuctionJob(999, storage, step + 1, step + 4, itemsRequired, 2, 888);
world.addJob(auction);
world.addJob(auction2);
long moneyA = world.getTeam("A").getMassium();
long moneyB = world.getTeam("B").getMassium();
Entity eB = world.getEntity("agentB1");
Map<String, Action> actions = buildActionMap();
sim.preStep(step);
sim.step(step, actions); // let auctions get names and be registered
step++;
actions.put("agentA1", new Action("bid_for_job", auction.getName(), "1000"));
actions.put("agentB1", new Action("bid_for_job", auction2.getName(), "998"));
sim.preStep(step);
sim.step(step, actions);
assert auction.getLowestBid() == null;
assert auction2.getLowestBid() == 998;
step++;
actions = buildActionMap();
actions.put("agentA1", new Action("bid_for_job", auction.getName(), "778"));
sim.preStep(step);
sim.step(step, actions);
assert auction.getLowestBid() == 778;
step++;
// complete auction for team B
eB.addItem(item, 1);
eB.setLocation(storage.getLocation());
actions = buildActionMap();
actions.put("agentB1", new Action("deliver_job", auction2.getName()));
sim.preStep(step);
sim.step(step, actions);
assert eB.getLastActionResult().equals("successful");
step++;
// check if team A paid the fine and B got the reward
sim.preStep(step);
sim.step(step, buildActionMap());
assert world.getTeam("A").getMassium() == moneyA - auction.getFine();
assert world.getTeam("B").getMassium() == moneyB + auction2.getLowestBid();
}
// TODO test facility creation/generation
@Test
public void chargingStationsWork(){
//there is at least one charging station in the simulation
assert !sim.getWorldState().getChargingStations().isEmpty();
}
@Test
public void shopsWork(){
// TODO
}
@Test
public void dumpsWork(){
//there is at least one dump in the simulation
assert !sim.getWorldState().getDumps().isEmpty();
}
@Test
public void workshopsWork(){
//there is at least one workshop in the simulation
assert !sim.getWorldState().getWorkshops().isEmpty();
}
@Test
public void storageWork(){
//there is at least one storage in the simulation
assert !sim.getWorldState().getStorages().isEmpty();
}
@Test
public void resourceNodesWork(){
// TODO
}
// @Test
// public void jobsWork(){
// WorldState world = sim.getWorldState();
// Set<Job> jobs = new HashSet<>();
// for(int i=0; i<20; i++){
// jobs = world.getGenerator().generateJobs(i,world);
// if(!jobs.isEmpty()){
// break;
// }
// }
// if(!jobs.isEmpty()){
// Job job = jobs.iterator().next();
// assert !job.getRequiredItems().getStoredTypes().isEmpty();
// assert job.getReward()>0;
// }
// }
@Test
public void stuckAgentsAreRescued(){
WorldState world = sim.getWorldState();
Entity e1 = world.getEntity("agentA1");
e1.setLocation(new Location(2.34953, 48.86091));
sim.preStep(step);
Map<String, Action> actions = buildActionMap();
actions.put("agentA1", new Action("goto", "shop3"));
sim.step(step, actions);
assert e1.getLastActionResult().equals(ActionExecutor.FAILED_NO_ROUTE);
sim.preStep(step);
actions.put("agentA1", new Action("goto", "shop3"));
sim.step(step, actions);
assert !e1.getLastActionResult().equals(ActionExecutor.FAILED_NO_ROUTE);
}
/**
* @return a new action-map where each agent just skips
*/
private static Map<String, Action> buildActionMap(){
return sim.getWorldState().getAgents().stream()
.collect(Collectors.toMap(ag -> ag, ag -> new Action("skip")));
}
/**
* Checks if a request action contains the correct percept and returns it.
* @param agent name of an agent
* @param messages all req act messages received in preStep
* @return the req act message of the agent cast to the correct percept
*/
private static CityStepPercept getPercept(String agent, Map<String, RequestAction> messages){
RequestAction reqAct = messages.get(agent);
assert(reqAct != null && reqAct instanceof CityStepPercept);
return (CityStepPercept) reqAct;
}
} | add test for shops/trade
| server/src/test/java/massim/scenario/city/CitySimulationTest.java | add test for shops/trade |
|
Java | agpl-3.0 | f01221e696cd25955bcbdca0407d00531734e2d9 | 0 | sabarish14/agreementmaker,Stanwar/agreementmaker,sabarish14/agreementmaker,Stanwar/agreementmaker,Stanwar/agreementmaker,sabarish14/agreementmaker,Stanwar/agreementmaker,sabarish14/agreementmaker | package am;
import java.io.PrintStream;
/**
* GSM class contains global static variables that are used through out the program.
*
* @author ADVIS Research Laboratory
* @version 11/27/2004
*/
public class GlobalStaticVariables
{
// AgreementMaker Version!
public static final String AgreementMakerVersion = "v0.23";
/**
* IMPORTANT!: USE_PROGRESS_BAR determines if the matcher will be sending setProgress() messages to the progress dialog.
*
* The progress dialog will always be shown, but whether the progress bar will be used can be set here, using the USE_PROGRESS_BAR variable.
* This option has been added because using the progress bar adds a little overhead (stepDone() and setProgress() is called from inside the algorithm).
* So in order to allow for the developer users to achieve the best running time, the progress bar can be toggled on and off.
*/
public static final boolean USE_PROGRESS_BAR = true;
// local title
public static final String TARGETTITLE = "Target Ontology";
// ontology title
public static final String SOURCETITILE = "Source Ontology";
// OWL File type representation
public static final int SOURCENODE = 0;
// OWL File type representation
public static final int TARGETNODE = 1;
public static final int XMLFILE = 2;
public static final int OWLFILE = 1;
public static final int RDFSFILE = 0;
public static final int TABBEDTEXT = 3;
//public static final int DAMLFILE = 3;
public static final int RDFXML = 0;
public static final int RDFXMLABBREV = 1;
public static final int NTRIPLE = 2;
public static final int N3 = 3;
public static final int TURTLE = 4;
public final static String SYNTAX_RDFXML = "RDF/XML";
public final static String SYNTAX_RDFXMLABBREV = "RDF/XML-ABBREV";
public final static String SYNTAX_NTRIPLE = "N-TRIPLE";
public final static String SYNTAX_N3 = "N3";
public final static String SYNTAX_TURTLE = "TURTLE";
public final static String[] syntaxStrings = {SYNTAX_RDFXML, SYNTAX_RDFXMLABBREV, SYNTAX_NTRIPLE, SYNTAX_N3, SYNTAX_TURTLE};
public final static String LANG_RDFS = "RDFS";
public final static String LANG_OWL = "OWL";
public final static String LANG_XML = "XML";
public final static String LANG_TABBEDTEXT = "Tabbed TEXT";
public static final String[] languageStrings = {LANG_RDFS, LANG_OWL, LANG_XML, LANG_TABBEDTEXT};
public static final PrintStream out = new PrintStream(System.out, true);
public static String getSyntaxString(int syntaxIndex) {
//N3 and TURTLE ARE CONSIDERED SAME SYNTAX FOR THE REASONER
if(syntaxIndex == 4) {
syntaxIndex = 3;
}
return syntaxStrings[syntaxIndex];
}
public static String getLanguageString(int l) {
return languageStrings[l];
}
}
| AgreementMaker/src/am/GlobalStaticVariables.java | package am;
import java.io.PrintStream;
/**
* GSM class contains global static variables that are used through out the program.
*
* @author ADVIS Research Laboratory
* @version 11/27/2004
*/
public class GlobalStaticVariables
{
// AgreementMaker Version!
public static final String AgreementMakerVersion = "v0.22";
/**
* IMPORTANT!: USE_PROGRESS_BAR determines if the matcher will be sending setProgress() messages to the progress dialog.
*
* The progress dialog will always be shown, but whether the progress bar will be used can be set here, using the USE_PROGRESS_BAR variable.
* This option has been added because using the progress bar adds a little overhead (stepDone() and setProgress() is called from inside the algorithm).
* So in order to allow for the developer users to achieve the best running time, the progress bar can be toggled on and off.
*/
public static final boolean USE_PROGRESS_BAR = true;
// local title
public static final String TARGETTITLE = "Target Ontology";
// ontology title
public static final String SOURCETITILE = "Source Ontology";
// OWL File type representation
public static final int SOURCENODE = 0;
// OWL File type representation
public static final int TARGETNODE = 1;
public static final int XMLFILE = 2;
public static final int ONTFILE = 1;
public static final int RDFSFILE = 0;
public static final int TABBEDTEXT = 3;
//public static final int DAMLFILE = 3;
public static final int RDFXML = 0;
public static final int RDFXMLABBREV = 1;
public static final int NTRIPLE = 2;
public static final int N3 = 3;
public static final int TURTLE = 4;
public final static String SYNTAX_RDFXML = "RDF/XML";
public final static String SYNTAX_RDFXMLABBREV = "RDF/XML-ABBREV";
public final static String SYNTAX_NTRIPLE = "N-TRIPLE";
public final static String SYNTAX_N3 = "N3";
public final static String SYNTAX_TURTLE = "TURTLE";
public final static String[] syntaxStrings = {SYNTAX_RDFXML, SYNTAX_RDFXMLABBREV, SYNTAX_NTRIPLE, SYNTAX_N3, SYNTAX_TURTLE};
public final static String LANG_RDFS = "RDFS";
public final static String LANG_OWL = "OWL";
public final static String LANG_XML = "XML";
public final static String LANG_TABBEDTEXT = "Tabbed TEXT";
public static final String[] languageStrings = {LANG_RDFS, LANG_OWL, LANG_XML, LANG_TABBEDTEXT};
public static final PrintStream out = new PrintStream(System.out, true);
public static String getSyntaxString(int syntaxIndex) {
//N3 and TURTLE ARE CONSIDERED SAME SYNTAX FOR THE REASONER
if(syntaxIndex == 4) {
syntaxIndex = 3;
}
return syntaxStrings[syntaxIndex];
}
public static String getLanguageString(int l) {
return languageStrings[l];
}
}
| Version 0.23
| AgreementMaker/src/am/GlobalStaticVariables.java | Version 0.23 |
|
Java | lgpl-2.1 | 3f90d7887d7d04995b1ec7107c3122dadc9b9388 | 0 | julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine | package org.intermine.web.struts;
/*
* Copyright (C) 2002-2008 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.HashMap;
import java.util.Map;
import org.intermine.objectstore.query.ConstraintOp;
import org.intermine.pathquery.PathNode;
import org.intermine.web.logic.Constants;
import org.intermine.web.logic.profile.Profile;
import org.intermine.web.logic.query.QueryMonitorTimeout;
import org.intermine.web.logic.session.SessionMethods;
import org.intermine.web.logic.template.TemplateHelper;
import org.intermine.web.logic.template.TemplateQuery;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.struts.Globals;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.util.MessageResources;
/**
* @author Xavier Watkins
*
*/
public class QuickSearchAction extends InterMineAction
{
/**
* Method called when user has submitted search form.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
@Override
public ActionForward execute(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
@SuppressWarnings("unused") HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
ServletContext context = session.getServletContext();
QuickSearchForm qsf = (QuickSearchForm) form;
String qsType = qsf.getQuickSearchType();
session.setAttribute("quickSearchType", qsType);
Profile profile = ((Profile) session.getAttribute(Constants.PROFILE));
if (qsType.equals("ids")) {
Map webPropertiesMap = (Map) context.getAttribute(Constants.WEB_PROPERTIES);
// remove the last query ran, otherwise the old query will show up on the results page
session.removeAttribute(Constants.QUERY);
String templateName = (String) webPropertiesMap.get("begin.browse.template");
String templateType = "global";
SessionMethods.logTemplateQueryUse(session, templateType, templateName);
String userName = profile.getUsername();
TemplateQuery template = TemplateHelper.findTemplate(context, session, userName,
templateName, templateType);
QueryMonitorTimeout clientState = new QueryMonitorTimeout(Constants.
QUERY_TIMEOUT_SECONDS * 1000);
MessageResources messages =
(MessageResources) request.getAttribute(Globals.MESSAGES_KEY);
Map<String, Object> valuesMap = new HashMap <String, Object> ();
Map <String, ConstraintOp> constraintOpsMap = new HashMap <String, ConstraintOp> ();
PathNode node = (template.getEditableNodes().get(0));
valuesMap.put(node.getPathString(), qsf.getParsedValue());
constraintOpsMap.put(node.getPathString(), ConstraintOp.EQUALS);
TemplateQuery queryCopy = TemplateHelper.editTemplate(valuesMap,
constraintOpsMap, template, null, new HashMap<String, String>());
String qid = SessionMethods.startQuery(clientState, session, messages, false,
queryCopy);
Thread.sleep(200);
return new ForwardParameters(mapping.findForward("waiting"))
.addParameter("qid", qid)
.addParameter("trail", "")
.forward();
} else if (qsType.equals("bgs")) {
request.setAttribute("type", "bag");
request.setAttribute("initialFilterText", qsf.getValue());
return new ForwardParameters(mapping.findForward("bags"))
.addParameter("subtab", "view").forward();
} else if (qsType.equals("tpls")) {
request.setAttribute("type", "template");
request.setAttribute("initialFilterText", qsf.getValue());
return mapping.findForward("templates");
} else {
throw new RuntimeException("Quick search type not valid");
}
}
}
| intermine/web/main/src/org/intermine/web/struts/QuickSearchAction.java | package org.intermine.web.struts;
/*
* Copyright (C) 2002-2008 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.HashMap;
import java.util.Map;
import org.intermine.objectstore.query.ConstraintOp;
import org.intermine.pathquery.PathNode;
import org.intermine.web.logic.Constants;
import org.intermine.web.logic.profile.Profile;
import org.intermine.web.logic.query.QueryMonitorTimeout;
import org.intermine.web.logic.session.SessionMethods;
import org.intermine.web.logic.template.TemplateHelper;
import org.intermine.web.logic.template.TemplateQuery;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.struts.Globals;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.util.MessageResources;
/**
* @author Xavier Watkins
*
*/
public class QuickSearchAction extends InterMineAction
{
/**
* Method called when user has submitted search form.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
@Override
public ActionForward execute(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
@SuppressWarnings("unused") HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
ServletContext context = session.getServletContext();
QuickSearchForm qsf = (QuickSearchForm) form;
String qsType = qsf.getQuickSearchType();
session.setAttribute("quickSearchType", qsType);
Profile profile = ((Profile) session.getAttribute(Constants.PROFILE));
if (qsType.equals("ids")) {
Map webPropertiesMap = (Map) context.getAttribute(Constants.WEB_PROPERTIES);
String templateName = (String) webPropertiesMap.get("begin.browse.template");
String templateType = "global";
SessionMethods.logTemplateQueryUse(session, templateType, templateName);
String userName = profile.getUsername();
TemplateQuery template = TemplateHelper.findTemplate(context, session, userName,
templateName, templateType);
QueryMonitorTimeout clientState = new QueryMonitorTimeout(Constants.
QUERY_TIMEOUT_SECONDS * 1000);
MessageResources messages =
(MessageResources) request.getAttribute(Globals.MESSAGES_KEY);
Map<String, Object> valuesMap = new HashMap <String, Object> ();
Map <String, ConstraintOp> constraintOpsMap = new HashMap <String, ConstraintOp> ();
PathNode node = (template.getEditableNodes().get(0));
valuesMap.put(node.getPathString(), qsf.getParsedValue());
constraintOpsMap.put(node.getPathString(), ConstraintOp.EQUALS);
TemplateQuery queryCopy = TemplateHelper.editTemplate(valuesMap,
constraintOpsMap, template, null, new HashMap<String, String>());
String qid = SessionMethods.startQuery(clientState, session, messages,
false, queryCopy);
Thread.sleep(200);
return new ForwardParameters(mapping.findForward("waiting"))
.addParameter("qid", qid)
.addParameter("trail", "")
.forward();
} else if (qsType.equals("bgs")) {
request.setAttribute("type", "bag");
request.setAttribute("initialFilterText", qsf.getValue());
return new ForwardParameters(mapping.findForward("bags"))
.addParameter("subtab", "view").forward();
} else if (qsType.equals("tpls")) {
request.setAttribute("type", "template");
request.setAttribute("initialFilterText", qsf.getValue());
return mapping.findForward("templates");
} else {
throw new RuntimeException("Quick search type not valid");
}
}
}
| after quicksearch, remove the last query ran. otherwise the original template query shows up on the results page. there's probably a more graceful way to handle this, but I couldn't think of one. I don't see a way to determine if a user has just run the quick search - other than to add a new special flag.
Former-commit-id: 6574ce0812f06ed45d3cd5c4fab926f06e63164d | intermine/web/main/src/org/intermine/web/struts/QuickSearchAction.java | after quicksearch, remove the last query ran. otherwise the original template query shows up on the results page. there's probably a more graceful way to handle this, but I couldn't think of one. I don't see a way to determine if a user has just run the quick search - other than to add a new special flag. |
|
Java | lgpl-2.1 | 58c722c4379dbd4024907362561a59462a86319e | 0 | SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,viktorbahr/jaer | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package eu.seebetter.ini.chips.davis;
import net.sf.jaer.Description;
import net.sf.jaer.DevelopmentStatus;
import net.sf.jaer.aemonitor.AEPacketRaw;
import net.sf.jaer.chip.Chip;
import net.sf.jaer.event.ApsDvsEvent;
import net.sf.jaer.event.ApsDvsEventPacket;
import net.sf.jaer.event.ApsDvsEventRGBW;
import net.sf.jaer.event.ApsDvsEventRGBW.ColorFilter;
import net.sf.jaer.event.EventPacket;
import net.sf.jaer.event.OutputEventIterator;
import net.sf.jaer.event.TypedEvent;
import eu.seebetter.ini.chips.DavisChip;
import eu.seebetter.ini.chips.davis.imu.IMUSample;
/**
* CDAVIS camera with heterogenous mixture of DAVIS and RGB APS global shutter
* pixels camera
*
* @author Chenghan Li, Luca Longinotti, Tobi Delbruck
*/
@Description("DAVIS APS-DVS camera with RGBW CFA color filter array and 640x480 APS pixels and 320x240 DAVIS pixels")
@DevelopmentStatus(DevelopmentStatus.Status.Experimental)
public class DavisRGBW640 extends Davis346BaseCamera {
public static final short WIDTH_PIXELS = 640;
public static final short HEIGHT_PIXELS = 480;
protected DavisRGBW640Config davisConfig;
public DavisRGBW640() {
setName("DavisRGBW640");
setDefaultPreferencesFile("biasgenSettings/DavisRGBW640/DavisRGBW640.xml");
setSizeX(WIDTH_PIXELS);
setSizeY(HEIGHT_PIXELS);
setEventClass(ApsDvsEventRGBW.class);
setBiasgen(davisConfig = new DavisRGBW640Config(this));
setEventExtractor(new DavisRGBWEventExtractor(this));
apsDVSrenderer = new DavisRGBW640Renderer(this); // must be called after configuration is constructed, because
// it needs to know if frames are enabled to reset pixmap
apsDVSrenderer.setMaxADC(DavisChip.MAX_ADC);
setRenderer(apsDVSrenderer);
}
/**
* The event extractor. Each pixel has two polarities 0 and 1.
*
* <p>
* The bits in the raw data coming from the device are as follows.
* <p>
* Bit 0 is polarity, on=1, off=0<br>
* Bits 1-9 are x address (max value 320)<br>
* Bits 10-17 are y address (max value 240) <br>
* <p>
*/
public class DavisRGBWEventExtractor extends DavisBaseCamera.DavisEventExtractor {
public DavisRGBWEventExtractor(DavisBaseCamera chip) {
super(chip);
}
/**
* extracts the meaning of the raw events.
*
* @param in the raw events, can be null
* @return out the processed events. these are partially processed
* in-place. empty packet is returned if null is supplied as in.
*/
@Override
synchronized public EventPacket extractPacket(final AEPacketRaw in) {
if (!(chip instanceof DavisChip)) {
return null;
}
if (out == null) {
out = new ApsDvsEventPacket(chip.getEventClass());
} else {
out.clear();
}
out.setRawPacket(in);
if (in == null) {
return out;
}
final int n = in.getNumEvents(); // addresses.length;
int sx1 = chip.getSizeX() - 1;
int sy1 = chip.getSizeY() - 1;
final int[] datas = in.getAddresses();
final int[] timestamps = in.getTimestamps();
final OutputEventIterator outItr = out.outputIterator();
// NOTE we must make sure we write ApsDvsEventRGBWs when we want them, not reuse the IMUSamples
// at this point the raw data from the USB IN packet has already been digested to extract timestamps,
// including timestamp wrap events and timestamp resets.
// The datas array holds the data, which consists of a mixture of AEs and ADC values.
// Here we extract the datas and leave the timestamps alone.
// TODO entire rendering / processing approach is not very efficient now
// System.out.println("Extracting new packet "+out);
for (int i = 0; i < n; i++) { // TODO implement skipBy/subsampling, but without missing the frame start/end
// events and still delivering frames
final int data = datas[i];
if ((incompleteIMUSampleException != null)
|| ((DavisChip.ADDRESS_TYPE_IMU & data) == DavisChip.ADDRESS_TYPE_IMU)) {
if (IMUSample.extractSampleTypeCode(data) == 0) { // / only start getting an IMUSample at code 0,
// the first sample type
try {
final IMUSample possibleSample = IMUSample.constructFromAEPacketRaw(in, i,
incompleteIMUSampleException);
i += IMUSample.SIZE_EVENTS - 1;
incompleteIMUSampleException = null;
imuSample = possibleSample; // asking for sample from AEChip now gives this value, but no
// access to intermediate IMU samples
imuSample.imuSampleEvent = true;
outItr.writeToNextOutput(imuSample); // also write the event out to the next output event
// slot
continue;
} catch (final IMUSample.IncompleteIMUSampleException ex) {
incompleteIMUSampleException = ex;
if ((missedImuSampleCounter++ % DavisEventExtractor.IMU_WARNING_INTERVAL) == 0) {
Chip.log.warning(String.format("%s (obtained %d partial samples so far)",
ex.toString(), missedImuSampleCounter));
}
break; // break out of loop because this packet only contained part of an IMUSample and
// formed the end of the packet anyhow. Next time we come back here we will complete
// the IMUSample
} catch (final IMUSample.BadIMUDataException ex2) {
if ((badImuDataCounter++ % DavisEventExtractor.IMU_WARNING_INTERVAL) == 0) {
Chip.log.warning(String.format("%s (%d bad samples so far)", ex2.toString(),
badImuDataCounter));
}
incompleteIMUSampleException = null;
continue; // continue because there may be other data
}
}
} else if ((data & DavisChip.ADDRESS_TYPE_MASK) == DavisChip.ADDRESS_TYPE_DVS) {
// DVS event
final ApsDvsEventRGBW e = nextApsDvsEvent(outItr);
if ((data & DavisChip.EVENT_TYPE_MASK) == DavisChip.EXTERNAL_INPUT_EVENT_ADDR) {
e.adcSample = -1; // TODO hack to mark as not an ADC sample
e.special = true; // TODO special is set here when capturing frames which will mess us up if
// this is an IMUSample used as a plain ApsDvsEventRGBW
e.address = data;
e.timestamp = (timestamps[i]);
e.setIsDVS(true);
} else {
e.adcSample = -1; // TODO hack to mark as not an ADC sample
e.special = false;
e.address = data;
e.timestamp = (timestamps[i]);
e.polarity = (data & DavisChip.POLMASK) == DavisChip.POLMASK ? ApsDvsEventRGBW.Polarity.On
: ApsDvsEventRGBW.Polarity.Off;
e.type = (byte) ((data & DavisChip.POLMASK) == DavisChip.POLMASK ? 1 : 0);
e.x = (short) (2 * (319 - ((data & DavisChip.XMASK) >>> DavisChip.XSHIFT)));
e.y = (short) (2 * ((data & DavisChip.YMASK) >>> DavisChip.YSHIFT));
e.setIsDVS(true);
e.setColorFilter(ColorFilter.W);
// System.out.println(data);
// autoshot triggering
autoshotEventsSinceLastShot++; // number DVS events captured here
}
} else if ((data & DavisChip.ADDRESS_TYPE_MASK) == DavisChip.ADDRESS_TYPE_APS) {
// APS event
// We first calculate the positions, so we can put events such as StartOfFrame at their
// right place, before the actual APS event denoting (0, 0) for example.
final int timestamp = timestamps[i];
short x = (short) (((data & DavisChip.XMASK) >>> DavisChip.XSHIFT));
short y = (short) ((data & DavisChip.YMASK) >>> DavisChip.YSHIFT);
ApsDvsEventRGBW.ColorFilter ColorFilter = ApsDvsEventRGBW.ColorFilter.Null;
if (((x % 2) == 1) && ((y % 2) == 1)) {
ColorFilter = ApsDvsEventRGBW.ColorFilter.R;// R
} else if (((x % 2) == 0) && ((y % 2) == 1)) {
ColorFilter = ApsDvsEventRGBW.ColorFilter.G;// G
} else if (((x % 2) == 0) && ((y % 2) == 0)) {
ColorFilter = ApsDvsEventRGBW.ColorFilter.B;// B
} else if (((x % 2) == 1) && ((y % 2) == 0)) {
ColorFilter = ApsDvsEventRGBW.ColorFilter.W;// w
}
final boolean pixFirst = firstFrameAddress(x, y); // First event of frame (addresses get flipped)
final boolean pixLast = lastFrameAddress(x, y); // Last event of frame (addresses get flipped)
ApsDvsEventRGBW.ReadoutType readoutType = ApsDvsEventRGBW.ReadoutType.Null;
switch ((data & DavisChip.ADC_READCYCLE_MASK) >>> ADC_NUMBER_OF_TRAILING_ZEROS) {
case 0:
readoutType = ApsDvsEventRGBW.ReadoutType.ResetRead;
break;
case 1:
readoutType = ApsDvsEventRGBW.ReadoutType.SignalRead;
break;
case 2:
readoutType = ApsDvsEventRGBW.ReadoutType.CpResetRead;
break;
case 3:
Chip.log.warning("Event with readout cycle null was sent out!");
break;
default:
if ((warningCount < 10)
|| ((warningCount % DavisEventExtractor.WARNING_COUNT_DIVIDER) == 0)) {
Chip.log.warning("Event with unknown readout cycle was sent out!.");
}
warningCount++;
break;
}
// Start of Frame (SOF)
if (pixFirst && !getDavisConfig().getApsReadoutControl().isGlobalShutterMode()
&& (readoutType == ApsDvsEventRGBW.ReadoutType.ResetRead)) { // RS
createApsFlagEvent(outItr, ApsDvsEventRGBW.ReadoutType.SOF, timestamp);
frameIntervalUs = timestamp - frameExposureStartTimestampUs;
frameExposureStartTimestampUs = timestamp;
}
if (pixFirst && getDavisConfig().getApsReadoutControl().isGlobalShutterMode()
&& (readoutType == ApsDvsEventRGBW.ReadoutType.SignalRead)) { // GS
createApsFlagEvent(outItr, ApsDvsEventRGBW.ReadoutType.SOF, timestamp);
frameIntervalUs = timestamp - frameExposureStartTimestampUs;
frameExposureStartTimestampUs = timestamp;
}
final ApsDvsEventRGBW e = nextApsDvsEvent(outItr);
e.adcSample = data & DavisChip.ADC_DATA_MASK;
e.readoutType = readoutType;
e.special = false;
e.timestamp = timestamp;
e.address = data;
e.x = x;
e.y = y;
e.type = (byte) (2);
e.setColorFilter(ColorFilter);
// TODO: figure out exposure for both GS and RS, and start of frame for GS.
if (pixLast && !getDavisConfig().getApsReadoutControl().isGlobalShutterMode() && (readoutType == ApsDvsEventRGBW.ReadoutType.SignalRead)) {
// if we use ResetRead+SignalRead+C readout, OR, if we use ResetRead-SignalRead readout and we
// are at last APS pixel, then write EOF event
// insert a new "end of frame" event not present in original data
createApsFlagEvent(outItr, ApsDvsEventRGBW.ReadoutType.EOF, timestamp);
if (snapshot) {
snapshot = false;
getDavisConfig().getApsReadoutControl().setAdcEnabled(false);
}
setFrameCount(getFrameCount() + 1);
}
if (pixLast && getDavisConfig().getApsReadoutControl().isGlobalShutterMode() && (readoutType == ApsDvsEventRGBW.ReadoutType.ResetRead)) {
// if we use ResetRead+SignalRead+C readout, OR, if we use ResetRead-SignalRead readout and we
// are at last APS pixel, then write EOF event
// insert a new "end of frame" event not present in original data
createApsFlagEvent(outItr, ApsDvsEventRGBW.ReadoutType.EOF, timestamp);
if (snapshot) {
snapshot = false;
getDavisConfig().getApsReadoutControl().setAdcEnabled(false);
}
setFrameCount(getFrameCount() + 1);
}
}
}
if ((getAutoshotThresholdEvents() > 0) && (autoshotEventsSinceLastShot > getAutoshotThresholdEvents())) {
takeSnapshot();
autoshotEventsSinceLastShot = 0;
}
return out;
} // extractPacket
@Override
protected ApsDvsEventRGBW nextApsDvsEvent(final OutputEventIterator outItr) {
ApsDvsEvent e = super.nextApsDvsEvent(outItr);
if (e instanceof ApsDvsEventRGBW) {
((ApsDvsEventRGBW) e).setColorFilter(null);
}
return (ApsDvsEventRGBW) e;
}
/**
* To handle filtered ApsDvsEventRGBWs, this method rewrites the fields
* of the raw address encoding x and y addresses to reflect the event's
* x and y fields.
*
* @param e the ApsDvsEventRGBW
* @return the raw address
*/
@Override
public int reconstructRawAddressFromEvent(final TypedEvent e) {
int address = e.address;
// if(e.x==0 && e.y==0){
// log.info("start of frame event "+e);
// }
// if(e.x==-1 && e.y==-1){
// log.info("end of frame event "+e);
// }
// e.x came from e.x = (short) (chip.getSizeX()-1-((data & XMASK) >>> XSHIFT)); // for DVS event, no x flip
// if APS event
if (((ApsDvsEventRGBW) e).adcSample >= 0) {
address = (address & ~DavisChip.XMASK) | (((e.x) / 2) << DavisChip.XSHIFT);
} else {
address = (address & ~DavisChip.XMASK) | ((getSizeX() - 1 - (e.x / 2)) << DavisChip.XSHIFT);
}
// e.y came from e.y = (short) ((data & YMASK) >>> YSHIFT);
address = (address & ~DavisChip.YMASK) | ((e.y / 2) << DavisChip.YSHIFT);
return address;
}
public boolean firstFrameAddress(short x, short y) {
return (x == 0) && (y == 0);
}
public boolean lastFrameAddress(short x, short y) {
return (x == (getSizeX() - 1)) && (y == (getSizeY() - 1));
}
} // extractor
}
| src/eu/seebetter/ini/chips/davis/DavisRGBW640.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package eu.seebetter.ini.chips.davis;
import net.sf.jaer.Description;
import net.sf.jaer.DevelopmentStatus;
import net.sf.jaer.aemonitor.AEPacketRaw;
import net.sf.jaer.chip.Chip;
import net.sf.jaer.event.ApsDvsEvent;
import net.sf.jaer.event.ApsDvsEventPacket;
import net.sf.jaer.event.ApsDvsEventRGBW;
import net.sf.jaer.event.ApsDvsEventRGBW.ColorFilter;
import net.sf.jaer.event.EventPacket;
import net.sf.jaer.event.OutputEventIterator;
import net.sf.jaer.event.TypedEvent;
import eu.seebetter.ini.chips.DavisChip;
import eu.seebetter.ini.chips.davis.imu.IMUSample;
/**
* CDAVIS camera with heterogenous mixture of DAVIS and RGB APS global shutter
* pixels camera
*
* @author Chenghan Li, Luca Longinotti, Tobi Delbruck
*/
@Description("DAVIS APS-DVS camera with RGBW CFA color filter array and 640x480 APS pixels and 320x240 DAVIS pixels")
@DevelopmentStatus(DevelopmentStatus.Status.Experimental)
public class DavisRGBW640 extends Davis346BaseCamera {
public static final short WIDTH_PIXELS = 640;
public static final short HEIGHT_PIXELS = 480;
protected DavisRGBW640Config davisConfig;
public DavisRGBW640() {
setName("DavisRGBW640");
setDefaultPreferencesFile("biasgenSettings/DavisRGBW640/DavisRGBW640.xml");
setSizeX(WIDTH_PIXELS);
setSizeY(HEIGHT_PIXELS);
setEventClass(ApsDvsEventRGBW.class);
setBiasgen(davisConfig = new DavisRGBW640Config(this));
setEventExtractor(new DavisRGBWEventExtractor(this));
apsDVSrenderer = new DavisRGBW640Renderer(this); // must be called after configuration is constructed, because
// it needs to know if frames are enabled to reset pixmap
apsDVSrenderer.setMaxADC(DavisChip.MAX_ADC);
setRenderer(apsDVSrenderer);
}
/**
* The event extractor. Each pixel has two polarities 0 and 1.
*
* <p>
* The bits in the raw data coming from the device are as follows.
* <p>
* Bit 0 is polarity, on=1, off=0<br>
* Bits 1-9 are x address (max value 320)<br>
* Bits 10-17 are y address (max value 240) <br>
* <p>
*/
public class DavisRGBWEventExtractor extends DavisBaseCamera.DavisEventExtractor {
public DavisRGBWEventExtractor(DavisBaseCamera chip) {
super(chip);
}
/**
* extracts the meaning of the raw events.
*
* @param in the raw events, can be null
* @return out the processed events. these are partially processed
* in-place. empty packet is returned if null is supplied as in.
*/
@Override
synchronized public EventPacket extractPacket(final AEPacketRaw in) {
if (!(chip instanceof DavisChip)) {
return null;
}
if (out == null) {
out = new ApsDvsEventPacket(chip.getEventClass());
} else {
out.clear();
}
out.setRawPacket(in);
if (in == null) {
return out;
}
final int n = in.getNumEvents(); // addresses.length;
int sx1 = chip.getSizeX() - 1;
int sy1 = chip.getSizeY() - 1;
final int[] datas = in.getAddresses();
final int[] timestamps = in.getTimestamps();
final OutputEventIterator outItr = out.outputIterator();
// NOTE we must make sure we write ApsDvsEventRGBWs when we want them, not reuse the IMUSamples
// at this point the raw data from the USB IN packet has already been digested to extract timestamps,
// including timestamp wrap events and timestamp resets.
// The datas array holds the data, which consists of a mixture of AEs and ADC values.
// Here we extract the datas and leave the timestamps alone.
// TODO entire rendering / processing approach is not very efficient now
// System.out.println("Extracting new packet "+out);
for (int i = 0; i < n; i++) { // TODO implement skipBy/subsampling, but without missing the frame start/end
// events and still delivering frames
final int data = datas[i];
if ((incompleteIMUSampleException != null)
|| ((DavisChip.ADDRESS_TYPE_IMU & data) == DavisChip.ADDRESS_TYPE_IMU)) {
if (IMUSample.extractSampleTypeCode(data) == 0) { // / only start getting an IMUSample at code 0,
// the first sample type
try {
final IMUSample possibleSample = IMUSample.constructFromAEPacketRaw(in, i,
incompleteIMUSampleException);
i += IMUSample.SIZE_EVENTS - 1;
incompleteIMUSampleException = null;
imuSample = possibleSample; // asking for sample from AEChip now gives this value, but no
// access to intermediate IMU samples
imuSample.imuSampleEvent = true;
outItr.writeToNextOutput(imuSample); // also write the event out to the next output event
// slot
continue;
} catch (final IMUSample.IncompleteIMUSampleException ex) {
incompleteIMUSampleException = ex;
if ((missedImuSampleCounter++ % DavisEventExtractor.IMU_WARNING_INTERVAL) == 0) {
Chip.log.warning(String.format("%s (obtained %d partial samples so far)",
ex.toString(), missedImuSampleCounter));
}
break; // break out of loop because this packet only contained part of an IMUSample and
// formed the end of the packet anyhow. Next time we come back here we will complete
// the IMUSample
} catch (final IMUSample.BadIMUDataException ex2) {
if ((badImuDataCounter++ % DavisEventExtractor.IMU_WARNING_INTERVAL) == 0) {
Chip.log.warning(String.format("%s (%d bad samples so far)", ex2.toString(),
badImuDataCounter));
}
incompleteIMUSampleException = null;
continue; // continue because there may be other data
}
}
} else if ((data & DavisChip.ADDRESS_TYPE_MASK) == DavisChip.ADDRESS_TYPE_DVS) {
// DVS event
final ApsDvsEventRGBW e = nextApsDvsEvent(outItr);
if ((data & DavisChip.EVENT_TYPE_MASK) == DavisChip.EXTERNAL_INPUT_EVENT_ADDR) {
e.adcSample = -1; // TODO hack to mark as not an ADC sample
e.special = true; // TODO special is set here when capturing frames which will mess us up if
// this is an IMUSample used as a plain ApsDvsEventRGBW
e.address = data;
e.timestamp = (timestamps[i]);
e.setIsDVS(true);
} else {
e.adcSample = -1; // TODO hack to mark as not an ADC sample
e.special = false;
e.address = data;
e.timestamp = (timestamps[i]);
e.polarity = (data & DavisChip.POLMASK) == DavisChip.POLMASK ? ApsDvsEventRGBW.Polarity.On
: ApsDvsEventRGBW.Polarity.Off;
e.type = (byte) ((data & DavisChip.POLMASK) == DavisChip.POLMASK ? 1 : 0);
e.x = (short) (2 * (319 - ((data & DavisChip.XMASK) >>> DavisChip.XSHIFT)));
e.y = (short) (2 * ((data & DavisChip.YMASK) >>> DavisChip.YSHIFT));
e.setIsDVS(true);
e.setColorFilter(ColorFilter.W);
// System.out.println(data);
// autoshot triggering
autoshotEventsSinceLastShot++; // number DVS events captured here
}
} else if ((data & DavisChip.ADDRESS_TYPE_MASK) == DavisChip.ADDRESS_TYPE_APS) {
// APS event
// We first calculate the positions, so we can put events such as StartOfFrame at their
// right place, before the actual APS event denoting (0, 0) for example.
final int timestamp = timestamps[i];
short x = (short) (((data & DavisChip.XMASK) >>> DavisChip.XSHIFT));
short y = (short) ((data & DavisChip.YMASK) >>> DavisChip.YSHIFT);
ApsDvsEventRGBW.ColorFilter ColorFilter = ApsDvsEventRGBW.ColorFilter.Null;
if (((x % 2) == 1) && ((y % 2) == 1)) {
ColorFilter = ApsDvsEventRGBW.ColorFilter.R;// R
} else if (((x % 2) == 0) && ((y % 2) == 1)) {
ColorFilter = ApsDvsEventRGBW.ColorFilter.G;// G
} else if (((x % 2) == 0) && ((y % 2) == 0)) {
ColorFilter = ApsDvsEventRGBW.ColorFilter.B;// B
} else if (((x % 2) == 1) && ((y % 2) == 0)) {
ColorFilter = ApsDvsEventRGBW.ColorFilter.W;// w
}
final boolean pixFirst = firstFrameAddress(x, y); // First event of frame (addresses get flipped)
final boolean pixLast = lastFrameAddress(x, y); // Last event of frame (addresses get flipped)
ApsDvsEventRGBW.ReadoutType readoutType = ApsDvsEventRGBW.ReadoutType.Null;
switch ((data & DavisChip.ADC_READCYCLE_MASK) >>> ADC_NUMBER_OF_TRAILING_ZEROS) {
case 0:
readoutType = ApsDvsEventRGBW.ReadoutType.ResetRead;
break;
case 1:
readoutType = ApsDvsEventRGBW.ReadoutType.SignalRead;
break;
case 2:
readoutType = ApsDvsEventRGBW.ReadoutType.CpResetRead;
break;
case 3:
Chip.log.warning("Event with readout cycle null was sent out!");
break;
default:
if ((warningCount < 10)
|| ((warningCount % DavisEventExtractor.WARNING_COUNT_DIVIDER) == 0)) {
Chip.log.warning("Event with unknown readout cycle was sent out!.");
}
warningCount++;
break;
}
// Start of Frame (SOF)
if (pixFirst && !getDavisConfig().getApsReadoutControl().isGlobalShutterMode()
&& (readoutType == ApsDvsEventRGBW.ReadoutType.ResetRead)) { // RS
createApsFlagEvent(outItr, ApsDvsEventRGBW.ReadoutType.SOF, timestamp);
frameIntervalUs = timestamp - frameExposureStartTimestampUs;
frameExposureStartTimestampUs = timestamp;
}
if (pixFirst && getDavisConfig().getApsReadoutControl().isGlobalShutterMode()
&& (readoutType == ApsDvsEventRGBW.ReadoutType.SignalRead)) { // GS
createApsFlagEvent(outItr, ApsDvsEventRGBW.ReadoutType.SOF, timestamp);
frameIntervalUs = timestamp - frameExposureStartTimestampUs;
frameExposureStartTimestampUs = timestamp;
}
final ApsDvsEventRGBW e = nextApsDvsEvent(outItr);
e.adcSample = data & DavisChip.ADC_DATA_MASK;
e.readoutType = readoutType;
e.special = false;
e.timestamp = timestamp;
e.address = data;
e.x = x;
e.y = y;
e.type = (byte) (2);
e.setColorFilter(ColorFilter);
// TODO: figure out exposure for both GS and RS, and start of frame for GS.
if (pixLast && (readoutType == ApsDvsEventRGBW.ReadoutType.SignalRead)) {
// if we use ResetRead+SignalRead+C readout, OR, if we use ResetRead-SignalRead readout and we
// are at last APS pixel, then write EOF event
// insert a new "end of frame" event not present in original data
createApsFlagEvent(outItr, ApsDvsEventRGBW.ReadoutType.EOF, timestamp);
if (snapshot) {
snapshot = false;
getDavisConfig().getApsReadoutControl().setAdcEnabled(false);
}
setFrameCount(getFrameCount() + 1);
}
}
}
if ((getAutoshotThresholdEvents() > 0) && (autoshotEventsSinceLastShot > getAutoshotThresholdEvents())) {
takeSnapshot();
autoshotEventsSinceLastShot = 0;
}
return out;
} // extractPacket
@Override
protected ApsDvsEventRGBW nextApsDvsEvent(final OutputEventIterator outItr) {
ApsDvsEvent e = super.nextApsDvsEvent(outItr);
if (e instanceof ApsDvsEventRGBW) {
((ApsDvsEventRGBW) e).setColorFilter(null);
}
return (ApsDvsEventRGBW) e;
}
/**
* To handle filtered ApsDvsEventRGBWs, this method rewrites the fields
* of the raw address encoding x and y addresses to reflect the event's
* x and y fields.
*
* @param e the ApsDvsEventRGBW
* @return the raw address
*/
@Override
public int reconstructRawAddressFromEvent(final TypedEvent e) {
int address = e.address;
// if(e.x==0 && e.y==0){
// log.info("start of frame event "+e);
// }
// if(e.x==-1 && e.y==-1){
// log.info("end of frame event "+e);
// }
// e.x came from e.x = (short) (chip.getSizeX()-1-((data & XMASK) >>> XSHIFT)); // for DVS event, no x flip
// if APS event
if (((ApsDvsEventRGBW) e).adcSample >= 0) {
address = (address & ~DavisChip.XMASK) | (((e.x) / 2) << DavisChip.XSHIFT);
} else {
address = (address & ~DavisChip.XMASK) | ((getSizeX() - 1 - (e.x / 2)) << DavisChip.XSHIFT);
}
// e.y came from e.y = (short) ((data & YMASK) >>> YSHIFT);
address = (address & ~DavisChip.YMASK) | ((e.y / 2) << DavisChip.YSHIFT);
return address;
}
public boolean firstFrameAddress(short x, short y) {
return (x == 0) && (y == 0);
}
public boolean lastFrameAddress(short x, short y) {
return (x == (getSizeX() - 1)) && (y == (getSizeY() - 1));
}
} // extractor
}
| changed DavisRGBW640 extractor to be compatible with D4A Sm3
git-svn-id: fe6b3b33f0410f5f719dcd9e0c58b92353e7a5d3@6653 b7f4320f-462c-0410-a916-d9f35bb82d52
| src/eu/seebetter/ini/chips/davis/DavisRGBW640.java | changed DavisRGBW640 extractor to be compatible with D4A Sm3 |
|
Java | unlicense | eabb95e101a42eccc977df5a53304181cfd13ab1 | 0 | SleepyTrousers/EnderIO,HenryLoenwind/EnderIO | package crazypants.enderio.conduit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.enderio.core.client.render.BoundingBox;
import appeng.api.networking.IGridNode;
import appeng.api.util.AECableType;
import appeng.api.util.AEPartLocation;
import crazypants.enderio.base.EnderIO;
import crazypants.enderio.base.TileEntityEio;
import crazypants.enderio.base.conduit.ConduitDisplayMode;
import crazypants.enderio.base.conduit.ConduitUtil;
import crazypants.enderio.base.conduit.ConnectionMode;
import crazypants.enderio.base.conduit.IConduit;
import crazypants.enderio.base.conduit.IConduitBundle;
import crazypants.enderio.base.conduit.facade.EnumFacadeType;
import crazypants.enderio.base.conduit.geom.CollidableCache;
import crazypants.enderio.base.conduit.geom.CollidableComponent;
import crazypants.enderio.base.conduit.geom.ConduitConnectorType;
import crazypants.enderio.base.conduit.geom.ConduitGeometryUtil;
import crazypants.enderio.base.conduit.geom.Offset;
import crazypants.enderio.base.conduit.geom.Offsets;
import crazypants.enderio.base.conduit.registry.ConduitRegistry;
import crazypants.enderio.base.config.Config;
import crazypants.enderio.base.paint.PaintUtil;
import crazypants.enderio.base.paint.YetaUtil;
import crazypants.enderio.base.render.IBlockStateWrapper;
import crazypants.enderio.conduit.liquid.ILiquidConduit;
import crazypants.enderio.conduit.me.IMEConduit;
import crazypants.enderio.conduit.oc.IOCConduit;
import crazypants.enderio.conduit.power.IPowerConduit;
import crazypants.enderio.conduit.redstone.InsulatedRedstoneConduit;
import crazypants.enderio.conduit.render.BlockStateWrapperConduitBundle;
import crazypants.enderio.conduit.render.ConduitRenderMapper;
import li.cil.oc.api.network.Message;
import li.cil.oc.api.network.Node;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.FluidTankInfo;
import net.minecraftforge.fml.common.Optional.Method;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import static crazypants.enderio.base.config.Config.transparentFacadesLetThroughBeaconBeam;
import static crazypants.enderio.conduit.init.ConduitObject.block_conduit_bundle;
public class TileConduitBundle extends TileEntityEio implements IConduitBundle, IConduitComponent {
public static final short NBT_VERSION = 1;
// TODO Fix duct-tape
private final List<IConduit> conduits = new CopyOnWriteArrayList<IConduit>(); // <- duct-tape fix
private IBlockState facade = null;
private EnumFacadeType facadeType = EnumFacadeType.BASIC;
private final List<CollidableComponent> cachedCollidables = new CopyOnWriteArrayList<CollidableComponent>(); // <- duct-tape fix
private final List<CollidableComponent> cachedConnectors = new CopyOnWriteArrayList<CollidableComponent>(); // <- duct-tape fix
private boolean conduitsDirty = true;
private boolean collidablesDirty = true;
private boolean connectorsDirty = true;
private boolean clientUpdated = false;
private int lightOpacityOverride = -1;
@SideOnly(Side.CLIENT)
private FacadeRenderState facadeRenderAs;
private ConduitDisplayMode lastMode = ConduitDisplayMode.ALL;
Object covers;
public TileConduitBundle() {
this.blockType = block_conduit_bundle.getBlock();
}
@Override
public void dirty() {
conduitsDirty = true;
collidablesDirty = true;
}
@Override
public boolean shouldRenderInPass(int arg0) {
if(facade != null && facade.isOpaqueCube() && !YetaUtil.isFacadeHidden(this, EnderIO.proxy.getClientPlayer())) {
return false;
}
return super.shouldRenderInPass(arg0);
}
@Override
public World getBundleworld() {
return getWorld();
}
@Override
public void writeCustomNBT(NBTTagCompound nbtRoot) {
NBTTagList conduitTags = new NBTTagList();
for (IConduit conduit : conduits) {
NBTTagCompound conduitRoot = new NBTTagCompound();
ConduitUtil.writeToNBT(conduit, conduitRoot);
conduitTags.appendTag(conduitRoot);
}
nbtRoot.setTag("conduits", conduitTags);
if(facade != null) {
PaintUtil.writeNbt(nbtRoot, facade);
nbtRoot.setString("facadeType", facadeType.name());
}
nbtRoot.setShort("nbtVersion", NBT_VERSION);
}
@Override
public synchronized void readCustomNBT(NBTTagCompound nbtRoot) {
short nbtVersion = nbtRoot.getShort("nbtVersion");
conduits.clear();
cachedCollidables.clear();
NBTTagList conduitTags = (NBTTagList) nbtRoot.getTag("conduits");
if(conduitTags != null) {
for (int i = 0; i < conduitTags.tagCount(); i++) {
NBTTagCompound conduitTag = conduitTags.getCompoundTagAt(i);
IConduit conduit = ConduitUtil.readConduitFromNBT(conduitTag, nbtVersion);
if(conduit != null) {
conduit.setBundle(this);
conduits.add(conduit);
// keep conduits sorted so the client side cache key is stable
ConduitRegistry.sort(conduits);
}
}
}
facade = PaintUtil.readNbt(nbtRoot);
if (facade != null) {
if (nbtRoot.hasKey("facadeType")) { // backwards compat, never true in freshly placed bundles
facadeType = EnumFacadeType.valueOf(nbtRoot.getString("facadeType"));
} else {
facadeType = EnumFacadeType.BASIC;
}
} else {
facade = null;
facadeType = EnumFacadeType.BASIC;
}
if(world != null && world.isRemote) {
clientUpdated = true;
}
}
@Override
public boolean hasFacade() {
return facade != null;
}
@Override
public void setPaintSource(@Nullable IBlockState paintSource) {
facade = paintSource;
markDirty();
// force re-calc of lighting for both client and server
IBlockState bs = world.getBlockState(pos);
IBlockState newBs = bs.withProperty(BlockConduitBundle.OPAQUE, getLightOpacity() > 0);
if (bs == newBs) {
world.setBlockState(getPos(), newBs.cycleProperty(BlockConduitBundle.OPAQUE));
}
world.setBlockState(getPos(), newBs);
}
@Override
public IBlockState getPaintSource() {
return facade;
}
@Override
public void setFacadeType(EnumFacadeType type) {
facadeType = type;
markDirty();
}
@Override
public EnumFacadeType getFacadeType() {
return facadeType;
}
@Override
@SideOnly(Side.CLIENT)
@Nonnull
public FacadeRenderState getFacadeRenderedAs() {
if(facadeRenderAs == null) {
facadeRenderAs = FacadeRenderState.NONE;
}
return facadeRenderAs;
}
@Override
@SideOnly(Side.CLIENT)
public void setFacadeRenderAs(FacadeRenderState state) {
this.facadeRenderAs = state;
}
@SuppressWarnings("deprecation")
@Override
public int getLightOpacity() {
if (world != null && world.isRemote && lightOpacityOverride != -1) {
return lightOpacityOverride;
}
if (facade != null) {
if (getFacadeType().isTransparent() && transparentFacadesLetThroughBeaconBeam) {
return Math.min(facade.getLightOpacity(), 14);
} else {
return facade.getLightOpacity();
}
} else {
return 0;
}
}
@Override
public void setLightOpacityOverride(int opacity) {
lightOpacityOverride = opacity;
}
@Override
public void onChunkUnload() {
for (IConduit conduit : conduits) {
conduit.onChunkUnload();
}
}
@Override
public void doUpdate() {
getWorld().profiler.startSection("conduitBundle");
getWorld().profiler.startSection("tick");
for (IConduit conduit : conduits) {
getWorld().profiler.startSection(conduit.getClass().toString());
conduit.updateEntity(world);
getWorld().profiler.endSection();
}
if(conduitsDirty) {
getWorld().profiler.startSection("neigborUpdate");
doConduitsDirty();
getWorld().profiler.endSection();
}
getWorld().profiler.endSection();
//client side only, check for changes in rendering of the bundle
if(world.isRemote) {
getWorld().profiler.startSection("clientTick");
updateEntityClient();
getWorld().profiler.endSection();
}
getWorld().profiler.endSection();
}
private void doConduitsDirty() {
if(!world.isRemote) {
IBlockState bs = world.getBlockState(pos);
world.notifyBlockUpdate(pos, bs, bs, 3);
world.neighborChanged(pos, getBlockType(), pos);
markDirty();
} else {
geometryChanged(); // Q&D
}
conduitsDirty = false;
}
private void updateEntityClient() {
boolean markForUpdate = false;
if(clientUpdated) {
//TODO: This is not the correct solution here but just marking the block for a render update server side
//seems to get out of sync with the client sometimes so connections are not rendered correctly
markForUpdate = true;
clientUpdated = false;
}
FacadeRenderState curRS = getFacadeRenderedAs();
FacadeRenderState rs = ConduitUtil.getRequiredFacadeRenderState(this, EnderIO.proxy.getClientPlayer());
if(Config.updateLightingWhenHidingFacades) {
int shouldBeLO = rs == FacadeRenderState.FULL ? -1 : 0;
if (lightOpacityOverride != shouldBeLO) {
setLightOpacityOverride(shouldBeLO);
world.checkLight(getPos());
}
}
if(curRS != rs) {
setFacadeRenderAs(rs);
if(!ConduitUtil.forceSkylightRecalculation(world, getPos())) {
markForUpdate = true;
}
}
ConduitDisplayMode curMode = ConduitDisplayMode.getDisplayMode(EnderIO.proxy.getClientPlayer().getHeldItemMainhand());
if (curMode != lastMode && !(lastMode.isAll() && curMode.isAll())) {
markForUpdate = true;
}
lastMode = curMode;
if(markForUpdate) {
geometryChanged(); // Q&D
IBlockState bs = world.getBlockState(pos);
world.notifyBlockUpdate(pos, bs, bs, 3);
}
}
@Override
public void onNeighborBlockChange(Block blockId) {
boolean needsUpdate = false;
for (IConduit conduit : conduits) {
needsUpdate |= conduit.onNeighborBlockChange(blockId);
}
if(needsUpdate) {
dirty();
}
}
@Override
public void onNeighborChange(IBlockAccess world, BlockPos posIn, BlockPos neighbor) {
boolean needsUpdate = false;
for (IConduit conduit : conduits) {
needsUpdate |= conduit.onNeighborChange(neighbor);
}
if(needsUpdate) {
dirty();
}
}
@Override
@Nonnull
public TileConduitBundle getEntity() {
return this;
}
@Override
public boolean hasType(Class<? extends IConduit> type) {
return getConduit(type) != null;
}
@SuppressWarnings("unchecked")
@Override
public <T extends IConduit> T getConduit(Class<T> type) {
if(type == null) {
return null;
}
for (IConduit conduit : conduits) {
if(type.isInstance(conduit)) {
return (T) conduit;
}
}
return null;
}
@Override
public void addConduit(IConduit conduit) {
if(world.isRemote) {
return;
}
conduits.add(conduit);
conduit.setBundle(this);
conduit.onAddedToBundle();
dirty();
}
@Override
public void removeConduit(IConduit conduit) {
if(conduit != null) {
removeConduit(conduit, true);
}
}
public void removeConduit(IConduit conduit, boolean notify) {
if(world.isRemote) {
return;
}
conduit.onRemovedFromBundle();
conduits.remove(conduit);
conduit.setBundle(null);
if(notify) {
dirty();
}
}
@Override
public void onBlockRemoved() {
if(world.isRemote) {
return;
}
List<IConduit> copy = new ArrayList<IConduit>(conduits);
for (IConduit con : copy) {
removeConduit(con, false);
}
dirty();
}
@Override
public Collection<IConduit> getConduits() {
return conduits;
}
@Override
public Set<EnumFacing> getConnections(Class<? extends IConduit> type) {
IConduit con = getConduit(type);
if(con != null) {
return con.getConduitConnections();
}
return null;
}
@Override
public boolean containsConnection(Class<? extends IConduit> type, EnumFacing dir) {
IConduit con = getConduit(type);
if(con != null) {
return con.containsConduitConnection(dir);
}
return false;
}
@Override
public boolean containsConnection(EnumFacing dir) {
for (IConduit con : conduits) {
if(con.containsConduitConnection(dir)) {
return true;
}
}
return false;
}
@Override
public Set<EnumFacing> getAllConnections() {
EnumSet<EnumFacing> result = EnumSet.noneOf(EnumFacing.class);
for (IConduit con : conduits) {
result.addAll(con.getConduitConnections());
}
return result;
}
// Geometry
@Override
public Offset getOffset(Class<? extends IConduit> type, EnumFacing dir) {
if(getConnectionCount(dir) < 2) {
return Offset.NONE;
}
return Offsets.get(type, dir);
}
@Override
public List<CollidableComponent> getCollidableComponents() {
for (IConduit con : conduits) {
collidablesDirty = collidablesDirty || con.haveCollidablesChangedSinceLastCall();
}
if(collidablesDirty) {
connectorsDirty = true;
}
if(!collidablesDirty && !cachedCollidables.isEmpty()) {
return cachedCollidables;
}
cachedCollidables.clear();
for (IConduit conduit : conduits) {
cachedCollidables.addAll(conduit.getCollidableComponents());
}
addConnectors(cachedCollidables);
collidablesDirty = false;
return cachedCollidables;
}
@Override
public List<CollidableComponent> getConnectors() {
List<CollidableComponent> result = new ArrayList<CollidableComponent>();
addConnectors(result);
return result;
}
@SuppressWarnings("unchecked")
private void addConnectors(List<CollidableComponent> result) {
if(conduits.isEmpty()) {
return;
}
for (IConduit con : conduits) {
boolean b = con.haveCollidablesChangedSinceLastCall();
collidablesDirty = collidablesDirty || b;
connectorsDirty = connectorsDirty || b;
}
if(!connectorsDirty && !cachedConnectors.isEmpty()) {
result.addAll(cachedConnectors);
return;
}
cachedConnectors.clear();
// TODO: What an unholly mess! (and it doesn't even work correctly...)
List<CollidableComponent> coreBounds = new ArrayList<CollidableComponent>();
for (IConduit con : conduits) {
addConduitCores(coreBounds, con);
}
cachedConnectors.addAll(coreBounds);
result.addAll(coreBounds);
// 1st algorithm
List<CollidableComponent> conduitsBounds = new ArrayList<CollidableComponent>();
for (IConduit con : conduits) {
conduitsBounds.addAll(con.getCollidableComponents());
addConduitCores(conduitsBounds, con);
}
Set<Class<IConduit>> collidingTypes = new HashSet<Class<IConduit>>();
for (CollidableComponent conCC : conduitsBounds) {
for (CollidableComponent innerCC : conduitsBounds) {
if(!InsulatedRedstoneConduit.COLOR_CONTROLLER_ID.equals(innerCC.data) && !InsulatedRedstoneConduit.COLOR_CONTROLLER_ID.equals(conCC.data)
&& conCC != innerCC && conCC.bound.intersects(innerCC.bound)) {
collidingTypes.add((Class<IConduit>) conCC.conduitType);
}
}
}
//TODO: Remove the core geometries covered up by this as no point in rendering these
if(!collidingTypes.isEmpty()) {
List<CollidableComponent> colCores = new ArrayList<CollidableComponent>();
for (Class<IConduit> c : collidingTypes) {
IConduit con = getConduit(c);
if(con != null) {
addConduitCores(colCores, con);
}
}
BoundingBox bb = null;
for (CollidableComponent cBB : colCores) {
if(bb == null) {
bb = cBB.bound;
} else {
bb = bb.expandBy(cBB.bound);
}
}
if(bb != null) {
bb = bb.scale(1.05, 1.05, 1.05);
CollidableComponent cc = new CollidableComponent(null, bb, null, ConduitConnectorType.INTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
}
//2nd algorithm
for (IConduit con : conduits) {
if(con.hasConnections()) {
List<CollidableComponent> cores = new ArrayList<CollidableComponent>();
addConduitCores(cores, con);
if(cores.size() > 1) {
BoundingBox bb = cores.get(0).bound;
double area = bb.getArea();
for (CollidableComponent cc : cores) {
bb = bb.expandBy(cc.bound);
}
if(bb.getArea() > area * 1.5f) {
bb = bb.scale(1.05, 1.05, 1.05);
CollidableComponent cc = new CollidableComponent(null, bb, null, ConduitConnectorType.INTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
}
}
}
// Merge all internal conduit connectors into one box
BoundingBox conBB = null;
for (int i = 0; i < result.size(); i++) {
CollidableComponent cc = result.get(i);
if (cc.conduitType == null && cc.data == ConduitConnectorType.INTERNAL) {
conBB = conBB == null ? cc.bound : conBB.expandBy(cc.bound);
result.remove(i);
i--;
cachedConnectors.remove(cc);
}
}
if(conBB != null) {
CollidableComponent cc = new CollidableComponent(null, conBB, null, ConduitConnectorType.INTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
// External Connectors
EnumSet<EnumFacing> externalDirs = EnumSet.noneOf(EnumFacing.class);
for (IConduit con : conduits) {
Set<EnumFacing> extCons = con.getExternalConnections();
if(extCons != null) {
for (EnumFacing dir : extCons) {
if(con.getConnectionMode(dir) != ConnectionMode.DISABLED) {
externalDirs.add(dir);
}
}
}
}
for (EnumFacing dir : externalDirs) {
BoundingBox bb = ConduitGeometryUtil.instance.getExternalConnectorBoundingBox(dir);
CollidableComponent cc = new CollidableComponent(null, bb, dir, ConduitConnectorType.EXTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
connectorsDirty = false;
}
private void addConduitCores(List<CollidableComponent> result, IConduit con) {
CollidableCache cc = CollidableCache.instance;
Class<? extends IConduit> type = con.getCollidableType();
if(con.hasConnections()) {
for (EnumFacing dir : con.getExternalConnections()) {
result.addAll(cc.getCollidables(cc.createKey(type, getOffset(con.getBaseConduitType(), dir), null, false), con));
}
for (EnumFacing dir : con.getConduitConnections()) {
result.addAll(cc.getCollidables(cc.createKey(type, getOffset(con.getBaseConduitType(), dir), null, false), con));
}
} else {
result.addAll(cc.getCollidables(cc.createKey(type, getOffset(con.getBaseConduitType(), null), null, false), con));
}
}
private int getConnectionCount(EnumFacing dir) {
if(dir == null) {
return conduits.size();
}
int result = 0;
for (IConduit con : conduits) {
if(con.containsConduitConnection(dir) || con.containsExternalConnection(dir)) {
result++;
}
}
return result;
}
// TODO Find a way to separate conduit types
// ------------ Power -----------------------------
@Override
public int receiveEnergy(EnumFacing from, int maxReceive, boolean simulate) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.receiveEnergy(from, maxReceive, simulate);
}
return 0;
}
@Override
public boolean canConnectEnergy(EnumFacing from) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.canConnectEnergy(from);
}
return false;
}
@Override
public int getMaxEnergyStored(EnumFacing from) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getMaxEnergyStored(null);
}
return 0;
}
@Override
public int getMaxEnergyRecieved(EnumFacing dir) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getMaxEnergyRecieved(dir);
}
return 0;
}
@Override
public int getEnergyStored(EnumFacing from) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getEnergyStored(from);
}
return 0;
}
public int getMaxEnergyStored() {
return getMaxEnergyStored(null);
}
@Override
public void setEnergyStored(int stored) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
pc.setEnergyStored(stored);
}
}
//------- Liquids -----------------------------
@Override
public int fill(EnumFacing from, FluidStack resource, boolean doFill) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.fill(from, resource, doFill);
}
return 0;
}
@Override
public FluidStack drain(EnumFacing from, FluidStack resource, boolean doDrain) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.drain(from, resource, doDrain);
}
return null;
}
@Override
public FluidStack drain(EnumFacing from, int maxDrain, boolean doDrain) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.drain(from, maxDrain, doDrain);
}
return null;
}
@Override
public boolean canFill(EnumFacing from, Fluid fluid) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.canFill(from, fluid);
}
return false;
}
@Override
public boolean canDrain(EnumFacing from, Fluid fluid) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.canDrain(from, fluid);
}
return false;
}
@Override
public FluidTankInfo[] getTankInfo(EnumFacing from) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.getTankInfo(from);
}
return new FluidTankInfo[0];
}
@Override
public boolean displayPower() {
return false;
}
@Override
public void geometryChanged() {
}
// AE2
private Object node; // IGridNode object, untyped to avoid crash w/o AE2
@Override
@Method(modid = "appliedenergistics2")
public IGridNode getGridNode(AEPartLocation loc) {
IMEConduit cond = getConduit(IMEConduit.class);
if (cond != null) {
if (loc == null || loc == AEPartLocation.INTERNAL || cond.getConnectionMode(loc.getOpposite().getFacing()) == ConnectionMode.IN_OUT) {
return (IGridNode) node;
}
}
return null;
}
@SuppressWarnings("cast")
@Override
@Method(modid = "appliedenergistics2")
public void setGridNode(Object node) {
this.node = (IGridNode) node;
}
@Override
@Method(modid = "appliedenergistics2")
public AECableType getCableConnectionType(AEPartLocation loc) {
IMEConduit cond = getConduit(IMEConduit.class);
if (cond == null || loc == AEPartLocation.INTERNAL) {
return AECableType.NONE;
} else {
return cond.isConnectedTo(loc.getFacing()) ? cond.isDense() ? AECableType.DENSE : AECableType.SMART : AECableType.NONE;
}
}
@Override
@Method(modid = "appliedenergistics2")
public void securityBreak() {
}
// OpenComputers
@Override
@Method(modid = "OpenComputersAPI|Network")
public Node node() {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
return cond.node();
} else {
return null;
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
public void onConnect(Node node) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
cond.onConnect(node);
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
public void onDisconnect(Node node) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
cond.onDisconnect(node);
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
public void onMessage(Message message) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
cond.onMessage(message);
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
public Node sidedNode(EnumFacing side) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
return cond.sidedNode(side);
} else {
return null;
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
@SideOnly(Side.CLIENT)
public boolean canConnect(EnumFacing side) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
return cond.canConnect(side);
} else {
return false;
}
}
@Override
public void invalidate() {
super.invalidate();
if (world.isRemote) {
return;
}
List<IConduit> copy = new ArrayList<IConduit>(conduits);
for (IConduit con : copy) {
con.invalidate();
}
}
@SideOnly(Side.CLIENT)
@Override
public void hashCodeForModelCaching(IBlockStateWrapper wrapper, BlockStateWrapperConduitBundle.ConduitCacheKey hashCodes) {
hashCodes.add(facadeType.ordinal() << 16 | getFacadeRenderedAs().ordinal() << 8 | wrapper.getYetaDisplayMode().getDisplayMode().ordinal() << 1
| (wrapper.getYetaDisplayMode().isHideFacades() ? 1 : 0));
for (IConduit conduit : conduits) {
if (conduit instanceof IConduitComponent) {
((IConduitComponent) conduit).hashCodeForModelCaching(wrapper, hashCodes);
} else {
hashCodes.add(conduit);
}
}
}
@Override
public String toString() {
return world == null ? super.toString() : world.isRemote ? toStringC(this) : toStringS(this);
}
@SideOnly(Side.CLIENT)
public static String toStringC(TileConduitBundle self) {
BlockStateWrapperConduitBundle bsw = new BlockStateWrapperConduitBundle(self.world.getBlockState(self.pos), self.world, self.pos,
ConduitRenderMapper.instance);
bsw.addCacheKey(self);
return "CLIENT: TileConduitBundle [pos=" + self.pos + ", facade=" + self.facade + ", facadeType=" + self.facadeType + ", conduits=" + self.conduits
+ ", cachekey=" + bsw.getCachekey() + ", bsw=" + bsw + "]";
}
public static String toStringS(TileConduitBundle self) {
return "SERVER: TileConduitBundle [pos=" + self.pos + ", conduits=" + self.conduits + "]";
}
}
| src/main/java/crazypants/enderio/conduit/TileConduitBundle.java | package crazypants.enderio.conduit;
import static crazypants.enderio.base.ModObject.blockConduitBundle;
import static crazypants.enderio.base.config.Config.transparentFacadesLetThroughBeaconBeam;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.enderio.core.client.render.BoundingBox;
import com.enderio.core.common.util.BlockCoord;
import appeng.api.networking.IGridNode;
import appeng.api.util.AECableType;
import appeng.api.util.AEPartLocation;
import crazypants.enderio.base.EnderIO;
import crazypants.enderio.base.TileEntityEio;
import crazypants.enderio.base.conduit.ConduitDisplayMode;
import crazypants.enderio.base.conduit.ConduitUtil;
import crazypants.enderio.base.conduit.ConnectionMode;
import crazypants.enderio.base.conduit.IConduit;
import crazypants.enderio.base.conduit.IConduitBundle;
import crazypants.enderio.base.conduit.facade.EnumFacadeType;
import crazypants.enderio.base.conduit.geom.CollidableCache;
import crazypants.enderio.base.conduit.geom.CollidableComponent;
import crazypants.enderio.base.conduit.geom.ConduitConnectorType;
import crazypants.enderio.base.conduit.geom.ConduitGeometryUtil;
import crazypants.enderio.base.conduit.geom.Offset;
import crazypants.enderio.base.conduit.geom.Offsets;
import crazypants.enderio.base.conduit.registry.ConduitRegistry;
import crazypants.enderio.base.config.Config;
import crazypants.enderio.base.paint.PaintUtil;
import crazypants.enderio.base.paint.YetaUtil;
import crazypants.enderio.base.render.IBlockStateWrapper;
import crazypants.enderio.conduit.liquid.ILiquidConduit;
import crazypants.enderio.conduit.me.IMEConduit;
import crazypants.enderio.conduit.oc.IOCConduit;
import crazypants.enderio.conduit.power.IPowerConduit;
import crazypants.enderio.conduit.redstone.InsulatedRedstoneConduit;
import crazypants.enderio.conduit.render.BlockStateWrapperConduitBundle;
import crazypants.enderio.conduit.render.ConduitRenderMapper;
import li.cil.oc.api.network.Message;
import li.cil.oc.api.network.Node;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.FluidTankInfo;
import net.minecraftforge.fml.common.Optional.Method;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public class TileConduitBundle extends TileEntityEio implements IConduitBundle, IConduitComponent {
public static final short NBT_VERSION = 1;
private final List<IConduit> conduits = new CopyOnWriteArrayList<IConduit>(); // <- duct-tape fix
private IBlockState facade = null;
private EnumFacadeType facadeType = EnumFacadeType.BASIC;
private final List<CollidableComponent> cachedCollidables = new CopyOnWriteArrayList<CollidableComponent>(); // <- duct-tape fix
private final List<CollidableComponent> cachedConnectors = new CopyOnWriteArrayList<CollidableComponent>(); // <- duct-tape fix
private boolean conduitsDirty = true;
private boolean collidablesDirty = true;
private boolean connectorsDirty = true;
private boolean clientUpdated = false;
private int lightOpacityOverride = -1;
@SideOnly(Side.CLIENT)
private FacadeRenderState facadeRenderAs;
private ConduitDisplayMode lastMode = ConduitDisplayMode.ALL;
Object covers;
public TileConduitBundle() {
this.blockType = blockConduitBundle.getBlock();
}
@Override
public void dirty() {
conduitsDirty = true;
collidablesDirty = true;
}
@Override
public boolean shouldRenderInPass(int arg0) {
if(facade != null && facade.isOpaqueCube() && !YetaUtil.isFacadeHidden(this, EnderIO.proxy.getClientPlayer())) {
return false;
}
return super.shouldRenderInPass(arg0);
}
@Override
public World getBundleworld() {
return getWorld();
}
@Override
public void writeCustomNBT(NBTTagCompound nbtRoot) {
NBTTagList conduitTags = new NBTTagList();
for (IConduit conduit : conduits) {
NBTTagCompound conduitRoot = new NBTTagCompound();
ConduitUtil.writeToNBT(conduit, conduitRoot);
conduitTags.appendTag(conduitRoot);
}
nbtRoot.setTag("conduits", conduitTags);
if(facade != null) {
PaintUtil.writeNbt(nbtRoot, facade);
nbtRoot.setString("facadeType", facadeType.name());
}
nbtRoot.setShort("nbtVersion", NBT_VERSION);
}
@Override
public synchronized void readCustomNBT(NBTTagCompound nbtRoot) {
short nbtVersion = nbtRoot.getShort("nbtVersion");
conduits.clear();
cachedCollidables.clear();
NBTTagList conduitTags = (NBTTagList) nbtRoot.getTag("conduits");
if(conduitTags != null) {
for (int i = 0; i < conduitTags.tagCount(); i++) {
NBTTagCompound conduitTag = conduitTags.getCompoundTagAt(i);
IConduit conduit = ConduitUtil.readConduitFromNBT(conduitTag, nbtVersion);
if(conduit != null) {
conduit.setBundle(this);
conduits.add(conduit);
// keep conduits sorted so the client side cache key is stable
ConduitRegistry.sort(conduits);
}
}
}
facade = PaintUtil.readNbt(nbtRoot);
if (facade != null) {
if (nbtRoot.hasKey("facadeType")) { // backwards compat, never true in freshly placed bundles
facadeType = EnumFacadeType.valueOf(nbtRoot.getString("facadeType"));
} else {
facadeType = EnumFacadeType.BASIC;
}
} else {
facade = null;
facadeType = EnumFacadeType.BASIC;
}
if(world != null && world.isRemote) {
clientUpdated = true;
}
}
@Override
public boolean hasFacade() {
return facade != null;
}
@Override
public void setPaintSource(@Nullable IBlockState paintSource) {
facade = paintSource;
markDirty();
// force re-calc of lighting for both client and server
IBlockState bs = world.getBlockState(pos);
IBlockState newBs = bs.withProperty(BlockConduitBundle.OPAQUE, getLightOpacity() > 0);
if (bs == newBs) {
world.setBlockState(getPos(), newBs.cycleProperty(BlockConduitBundle.OPAQUE));
}
world.setBlockState(getPos(), newBs);
}
@Override
public IBlockState getPaintSource() {
return facade;
}
@Override
public void setFacadeType(EnumFacadeType type) {
facadeType = type;
markDirty();
}
@Override
public EnumFacadeType getFacadeType() {
return facadeType;
}
@Override
@SideOnly(Side.CLIENT)
@Nonnull
public FacadeRenderState getFacadeRenderedAs() {
if(facadeRenderAs == null) {
facadeRenderAs = FacadeRenderState.NONE;
}
return facadeRenderAs;
}
@Override
@SideOnly(Side.CLIENT)
public void setFacadeRenderAs(FacadeRenderState state) {
this.facadeRenderAs = state;
}
@SuppressWarnings("deprecation")
@Override
public int getLightOpacity() {
if (world != null && world.isRemote && lightOpacityOverride != -1) {
return lightOpacityOverride;
}
if (facade != null) {
if (getFacadeType().isTransparent() && transparentFacadesLetThroughBeaconBeam) {
return Math.min(facade.getLightOpacity(), 14);
} else {
return facade.getLightOpacity();
}
} else {
return 0;
}
}
@Override
public void setLightOpacityOverride(int opacity) {
lightOpacityOverride = opacity;
}
@Override
public void onChunkUnload() {
for (IConduit conduit : conduits) {
conduit.onChunkUnload(world);
}
}
@Override
public void doUpdate() {
getWorld().theProfiler.startSection("conduitBundle");
getWorld().theProfiler.startSection("tick");
for (IConduit conduit : conduits) {
getWorld().theProfiler.startSection(conduit.getClass().toString());
conduit.updateEntity(world);
getWorld().theProfiler.endSection();
}
if(conduitsDirty) {
getWorld().theProfiler.startSection("neigborUpdate");
doConduitsDirty();
getWorld().theProfiler.endSection();
}
getWorld().theProfiler.endSection();
//client side only, check for changes in rendering of the bundle
if(world.isRemote) {
getWorld().theProfiler.startSection("clientTick");
updateEntityClient();
getWorld().theProfiler.endSection();
}
getWorld().theProfiler.endSection();
}
private void doConduitsDirty() {
if(!world.isRemote) {
IBlockState bs = world.getBlockState(pos);
world.notifyBlockUpdate(pos, bs, bs, 3);
world.notifyNeighborsOfStateChange(pos, getBlockType());
markDirty();
} else {
geometryChanged(); // Q&D
}
conduitsDirty = false;
}
private void updateEntityClient() {
boolean markForUpdate = false;
if(clientUpdated) {
//TODO: This is not the correct solution here but just marking the block for a render update server side
//seems to get out of sync with the client sometimes so connections are not rendered correctly
markForUpdate = true;
clientUpdated = false;
}
FacadeRenderState curRS = getFacadeRenderedAs();
FacadeRenderState rs = ConduitUtil.getRequiredFacadeRenderState(this, EnderIO.proxy.getClientPlayer());
if(Config.updateLightingWhenHidingFacades) {
int shouldBeLO = rs == FacadeRenderState.FULL ? -1 : 0;
if (lightOpacityOverride != shouldBeLO) {
setLightOpacityOverride(shouldBeLO);
world.checkLight(getPos());
}
}
if(curRS != rs) {
setFacadeRenderAs(rs);
if(!ConduitUtil.forceSkylightRecalculation(world, getPos())) {
markForUpdate = true;
}
}
ConduitDisplayMode curMode = ConduitDisplayMode.getDisplayMode(EnderIO.proxy.getClientPlayer().getHeldItemMainhand());
if (curMode != lastMode && !(lastMode.isAll() && curMode.isAll())) {
markForUpdate = true;
}
lastMode = curMode;
if(markForUpdate) {
geometryChanged(); // Q&D
IBlockState bs = world.getBlockState(pos);
world.notifyBlockUpdate(pos, bs, bs, 3);
}
}
@Override
public void onNeighborBlockChange(Block blockId) {
boolean needsUpdate = false;
for (IConduit conduit : conduits) {
needsUpdate |= conduit.onNeighborBlockChange(blockId);
}
if(needsUpdate) {
dirty();
}
}
@Override
public void onNeighborChange(IBlockAccess world, BlockPos posIn, BlockPos neighbor) {
boolean needsUpdate = false;
for (IConduit conduit : conduits) {
needsUpdate |= conduit.onNeighborChange(world, posIn, neighbor);
}
if(needsUpdate) {
dirty();
}
}
@Override
public TileConduitBundle getEntity() {
return this;
}
@Override
public boolean hasType(Class<? extends IConduit> type) {
return getConduit(type) != null;
}
@SuppressWarnings("unchecked")
@Override
public <T extends IConduit> T getConduit(Class<T> type) {
if(type == null) {
return null;
}
for (IConduit conduit : conduits) {
if(type.isInstance(conduit)) {
return (T) conduit;
}
}
return null;
}
@Override
public void addConduit(IConduit conduit) {
if(world.isRemote) {
return;
}
conduits.add(conduit);
conduit.setBundle(this);
conduit.onAddedToBundle();
dirty();
}
@Override
public void removeConduit(IConduit conduit) {
if(conduit != null) {
removeConduit(conduit, true);
}
}
public void removeConduit(IConduit conduit, boolean notify) {
if(world.isRemote) {
return;
}
conduit.onRemovedFromBundle();
conduits.remove(conduit);
conduit.setBundle(null);
if(notify) {
dirty();
}
}
@Override
public void onBlockRemoved() {
if(world.isRemote) {
return;
}
List<IConduit> copy = new ArrayList<IConduit>(conduits);
for (IConduit con : copy) {
removeConduit(con, false);
}
dirty();
}
@Override
public Collection<IConduit> getConduits() {
return conduits;
}
@Override
public Set<EnumFacing> getConnections(Class<? extends IConduit> type) {
IConduit con = getConduit(type);
if(con != null) {
return con.getConduitConnections();
}
return null;
}
@Override
public boolean containsConnection(Class<? extends IConduit> type, EnumFacing dir) {
IConduit con = getConduit(type);
if(con != null) {
return con.containsConduitConnection(dir);
}
return false;
}
@Override
public boolean containsConnection(EnumFacing dir) {
for (IConduit con : conduits) {
if(con.containsConduitConnection(dir)) {
return true;
}
}
return false;
}
@Override
public Set<EnumFacing> getAllConnections() {
EnumSet<EnumFacing> result = EnumSet.noneOf(EnumFacing.class);
for (IConduit con : conduits) {
result.addAll(con.getConduitConnections());
}
return result;
}
// Geometry
@Override
public Offset getOffset(Class<? extends IConduit> type, EnumFacing dir) {
if(getConnectionCount(dir) < 2) {
return Offset.NONE;
}
return Offsets.get(type, dir);
}
@Override
public List<CollidableComponent> getCollidableComponents() {
for (IConduit con : conduits) {
collidablesDirty = collidablesDirty || con.haveCollidablesChangedSinceLastCall();
}
if(collidablesDirty) {
connectorsDirty = true;
}
if(!collidablesDirty && !cachedCollidables.isEmpty()) {
return cachedCollidables;
}
cachedCollidables.clear();
for (IConduit conduit : conduits) {
cachedCollidables.addAll(conduit.getCollidableComponents());
}
addConnectors(cachedCollidables);
collidablesDirty = false;
return cachedCollidables;
}
@Override
public List<CollidableComponent> getConnectors() {
List<CollidableComponent> result = new ArrayList<CollidableComponent>();
addConnectors(result);
return result;
}
@SuppressWarnings("unchecked")
private void addConnectors(List<CollidableComponent> result) {
if(conduits.isEmpty()) {
return;
}
for (IConduit con : conduits) {
boolean b = con.haveCollidablesChangedSinceLastCall();
collidablesDirty = collidablesDirty || b;
connectorsDirty = connectorsDirty || b;
}
if(!connectorsDirty && !cachedConnectors.isEmpty()) {
result.addAll(cachedConnectors);
return;
}
cachedConnectors.clear();
// TODO: What an unholly mess! (and it doesn't even work correctly...)
List<CollidableComponent> coreBounds = new ArrayList<CollidableComponent>();
for (IConduit con : conduits) {
addConduitCores(coreBounds, con);
}
cachedConnectors.addAll(coreBounds);
result.addAll(coreBounds);
// 1st algorithm
List<CollidableComponent> conduitsBounds = new ArrayList<CollidableComponent>();
for (IConduit con : conduits) {
conduitsBounds.addAll(con.getCollidableComponents());
addConduitCores(conduitsBounds, con);
}
Set<Class<IConduit>> collidingTypes = new HashSet<Class<IConduit>>();
for (CollidableComponent conCC : conduitsBounds) {
for (CollidableComponent innerCC : conduitsBounds) {
if(!InsulatedRedstoneConduit.COLOR_CONTROLLER_ID.equals(innerCC.data) && !InsulatedRedstoneConduit.COLOR_CONTROLLER_ID.equals(conCC.data)
&& conCC != innerCC && conCC.bound.intersects(innerCC.bound)) {
collidingTypes.add((Class<IConduit>) conCC.conduitType);
}
}
}
//TODO: Remove the core geometries covered up by this as no point in rendering these
if(!collidingTypes.isEmpty()) {
List<CollidableComponent> colCores = new ArrayList<CollidableComponent>();
for (Class<IConduit> c : collidingTypes) {
IConduit con = getConduit(c);
if(con != null) {
addConduitCores(colCores, con);
}
}
BoundingBox bb = null;
for (CollidableComponent cBB : colCores) {
if(bb == null) {
bb = cBB.bound;
} else {
bb = bb.expandBy(cBB.bound);
}
}
if(bb != null) {
bb = bb.scale(1.05, 1.05, 1.05);
CollidableComponent cc = new CollidableComponent(null, bb, null, ConduitConnectorType.INTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
}
//2nd algorithm
for (IConduit con : conduits) {
if(con.hasConnections()) {
List<CollidableComponent> cores = new ArrayList<CollidableComponent>();
addConduitCores(cores, con);
if(cores.size() > 1) {
BoundingBox bb = cores.get(0).bound;
double area = bb.getArea();
for (CollidableComponent cc : cores) {
bb = bb.expandBy(cc.bound);
}
if(bb.getArea() > area * 1.5f) {
bb = bb.scale(1.05, 1.05, 1.05);
CollidableComponent cc = new CollidableComponent(null, bb, null, ConduitConnectorType.INTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
}
}
}
// Merge all internal conduit connectors into one box
BoundingBox conBB = null;
for (int i = 0; i < result.size(); i++) {
CollidableComponent cc = result.get(i);
if (cc.conduitType == null && cc.data == ConduitConnectorType.INTERNAL) {
conBB = conBB == null ? cc.bound : conBB.expandBy(cc.bound);
result.remove(i);
i--;
cachedConnectors.remove(cc);
}
}
if(conBB != null) {
CollidableComponent cc = new CollidableComponent(null, conBB, null, ConduitConnectorType.INTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
// External Connectors
EnumSet<EnumFacing> externalDirs = EnumSet.noneOf(EnumFacing.class);
for (IConduit con : conduits) {
Set<EnumFacing> extCons = con.getExternalConnections();
if(extCons != null) {
for (EnumFacing dir : extCons) {
if(con.getConnectionMode(dir) != ConnectionMode.DISABLED) {
externalDirs.add(dir);
}
}
}
}
for (EnumFacing dir : externalDirs) {
BoundingBox bb = ConduitGeometryUtil.instance.getExternalConnectorBoundingBox(dir);
CollidableComponent cc = new CollidableComponent(null, bb, dir, ConduitConnectorType.EXTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
connectorsDirty = false;
}
private void addConduitCores(List<CollidableComponent> result, IConduit con) {
CollidableCache cc = CollidableCache.instance;
Class<? extends IConduit> type = con.getCollidableType();
if(con.hasConnections()) {
for (EnumFacing dir : con.getExternalConnections()) {
result.addAll(cc.getCollidables(cc.createKey(type, getOffset(con.getBaseConduitType(), dir), null, false), con));
}
for (EnumFacing dir : con.getConduitConnections()) {
result.addAll(cc.getCollidables(cc.createKey(type, getOffset(con.getBaseConduitType(), dir), null, false), con));
}
} else {
result.addAll(cc.getCollidables(cc.createKey(type, getOffset(con.getBaseConduitType(), null), null, false), con));
}
}
private int getConnectionCount(EnumFacing dir) {
if(dir == null) {
return conduits.size();
}
int result = 0;
for (IConduit con : conduits) {
if(con.containsConduitConnection(dir) || con.containsExternalConnection(dir)) {
result++;
}
}
return result;
}
// ------------ Power -----------------------------
@Override
public int receiveEnergy(EnumFacing from, int maxReceive, boolean simulate) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.receiveEnergy(from, maxReceive, simulate);
}
return 0;
}
@Override
public boolean canConnectEnergy(EnumFacing from) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.canConnectEnergy(from);
}
return false;
}
@Override
public int getMaxEnergyStored(EnumFacing from) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getMaxEnergyStored(null);
}
return 0;
}
@Override
public int getMaxEnergyRecieved(EnumFacing dir) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getMaxEnergyRecieved(dir);
}
return 0;
}
@Override
public int getEnergyStored(EnumFacing from) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getEnergyStored(from);
}
return 0;
}
public int getMaxEnergyStored() {
return getMaxEnergyStored(null);
}
@Override
public void setEnergyStored(int stored) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
pc.setEnergyStored(stored);
}
}
//------- Liquids -----------------------------
@Override
public int fill(EnumFacing from, FluidStack resource, boolean doFill) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.fill(from, resource, doFill);
}
return 0;
}
@Override
public FluidStack drain(EnumFacing from, FluidStack resource, boolean doDrain) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.drain(from, resource, doDrain);
}
return null;
}
@Override
public FluidStack drain(EnumFacing from, int maxDrain, boolean doDrain) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.drain(from, maxDrain, doDrain);
}
return null;
}
@Override
public boolean canFill(EnumFacing from, Fluid fluid) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.canFill(from, fluid);
}
return false;
}
@Override
public boolean canDrain(EnumFacing from, Fluid fluid) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.canDrain(from, fluid);
}
return false;
}
@Override
public FluidTankInfo[] getTankInfo(EnumFacing from) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.getTankInfo(from);
}
return new FluidTankInfo[0];
}
@Override
public boolean displayPower() {
return false;
}
@Override
public BlockCoord getLocation() {
return new BlockCoord(getPos());
}
@Override
public void geometryChanged() {
}
// AE2
private Object node; // IGridNode object, untyped to avoid crash w/o AE2
@Override
@Method(modid = "appliedenergistics2")
public IGridNode getGridNode(AEPartLocation loc) {
IMEConduit cond = getConduit(IMEConduit.class);
if (cond != null) {
if (loc == null || loc == AEPartLocation.INTERNAL || cond.getConnectionMode(loc.getOpposite().getFacing()) == ConnectionMode.IN_OUT) {
return (IGridNode) node;
}
}
return null;
}
@SuppressWarnings("cast")
@Override
@Method(modid = "appliedenergistics2")
public void setGridNode(Object node) {
this.node = (IGridNode) node;
}
@Override
@Method(modid = "appliedenergistics2")
public AECableType getCableConnectionType(AEPartLocation loc) {
IMEConduit cond = getConduit(IMEConduit.class);
if (cond == null || loc == AEPartLocation.INTERNAL) {
return AECableType.NONE;
} else {
return cond.isConnectedTo(loc.getFacing()) ? cond.isDense() ? AECableType.DENSE : AECableType.SMART : AECableType.NONE;
}
}
@Override
@Method(modid = "appliedenergistics2")
public void securityBreak() {
}
// OpenComputers
@Override
@Method(modid = "OpenComputersAPI|Network")
public Node node() {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
return cond.node();
} else {
return null;
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
public void onConnect(Node node) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
cond.onConnect(node);
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
public void onDisconnect(Node node) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
cond.onDisconnect(node);
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
public void onMessage(Message message) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
cond.onMessage(message);
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
public Node sidedNode(EnumFacing side) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
return cond.sidedNode(side);
} else {
return null;
}
}
@Override
@Method(modid = "OpenComputersAPI|Network")
@SideOnly(Side.CLIENT)
public boolean canConnect(EnumFacing side) {
IOCConduit cond = getConduit(IOCConduit.class);
if (cond != null) {
return cond.canConnect(side);
} else {
return false;
}
}
@Override
public void invalidate() {
super.invalidate();
if (world.isRemote) {
return;
}
List<IConduit> copy = new ArrayList<IConduit>(conduits);
for (IConduit con : copy) {
con.invalidate();
}
}
@SideOnly(Side.CLIENT)
@Override
public void hashCodeForModelCaching(IBlockStateWrapper wrapper, BlockStateWrapperConduitBundle.ConduitCacheKey hashCodes) {
hashCodes.add(facadeType.ordinal() << 16 | getFacadeRenderedAs().ordinal() << 8 | wrapper.getYetaDisplayMode().getDisplayMode().ordinal() << 1
| (wrapper.getYetaDisplayMode().isHideFacades() ? 1 : 0));
for (IConduit conduit : conduits) {
if (conduit instanceof IConduitComponent) {
((IConduitComponent) conduit).hashCodeForModelCaching(wrapper, hashCodes);
} else {
hashCodes.add(conduit);
}
}
}
@Override
public String toString() {
return world == null ? super.toString() : world.isRemote ? toStringC(this) : toStringS(this);
}
@SideOnly(Side.CLIENT)
public static String toStringC(TileConduitBundle self) {
BlockStateWrapperConduitBundle bsw = new BlockStateWrapperConduitBundle(self.world.getBlockState(self.pos), self.world, self.pos,
ConduitRenderMapper.instance);
bsw.addCacheKey(self);
return "CLIENT: TileConduitBundle [pos=" + self.pos + ", facade=" + self.facade + ", facadeType=" + self.facadeType + ", conduits=" + self.conduits
+ ", cachekey=" + bsw.getCachekey() + ", bsw=" + bsw + "]";
}
public static String toStringS(TileConduitBundle self) {
return "SERVER: TileConduitBundle [pos=" + self.pos + ", conduits=" + self.conduits + "]";
}
}
| Began Work on TileConduitBundle
Still need to separate conduit types from the tile if possible. | src/main/java/crazypants/enderio/conduit/TileConduitBundle.java | Began Work on TileConduitBundle |
|
Java | apache-2.0 | 95ee62f36e093a9528af77901a63ec204db7664e | 0 | valid4j/http-matchers | package org.valid4j.matchers.http;
import org.junit.Test;
import javax.ws.rs.core.*;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.Response.StatusType;
import java.util.Date;
import java.util.Locale;
import static javax.ws.rs.core.MediaType.*;
import static javax.ws.rs.core.Response.Status.*;
import static javax.ws.rs.core.Response.Status.Family.CLIENT_ERROR;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
import static org.valid4j.matchers.http.HttpResponseMatchers.*;
import static org.valid4j.matchers.http.helpers.MatcherHelpers.mismatchOf;
import static org.valid4j.matchers.http.helpers.MatcherMatchers.isDescribedBy;
public class HttpResponseMatchersTest {
private static final MediaType TEXT_WILDCARD = new MediaType("text", MEDIA_TYPE_WILDCARD);
@Test
public void shouldMatchOkResponse() {
Response ok = Response.ok().build();
assertThat(ok, isResponseOk());
assertThat(isResponseOk(), isDescribedBy("is response ok"));
}
@Test
public void shouldMatchByStatusCode() {
assertThat(response(BAD_REQUEST), hasStatusCode(400));
assertThat(hasStatusCode(400),
isDescribedBy("has status code <400>"));
assertThat(mismatchOf(response(UNAUTHORIZED), hasStatusCode(400)),
equalTo("was status code <401>"));
}
@Test
public void shouldMatchByStatusCodeMatcher() {
Response response = response(BAD_GATEWAY);
assertThat(response, hasStatusCode(isOneOf(502, 503)));
assertThat(hasStatusCode(isOneOf(502, 503)),
isDescribedBy("has status code one of {<502>, <503>}"));
assertThat(mismatchOf(
response(INTERNAL_SERVER_ERROR),
hasStatusCode(isOneOf(502, 503))),
equalTo("was status code <500>"));
}
@Test
public void shouldMatchByStatusCodeOfFamily() {
Response response = response(UNAUTHORIZED);
assertThat(response, hasStatusCode(ofFamily(CLIENT_ERROR)));
assertThat(response, hasStatusCode(CLIENT_ERROR));
assertThat(hasStatusCode(ofFamily(CLIENT_ERROR)),
isDescribedBy("has status code of family <CLIENT_ERROR>"));
assertThat(hasStatusCode(CLIENT_ERROR),
isDescribedBy("has status code of family <CLIENT_ERROR>"));
assertThat(mismatchOf(response(BAD_GATEWAY), hasStatusCode(CLIENT_ERROR)),
equalTo("was status code <502>"));
}
@Test
public void shouldMatchByStatusCodeOfStatus() {
Response response = response(Status.OK);
HttpStatus okWithIgnoredReason = new HttpStatus(200, "Ignored Reason");
assertThat(response, hasStatusCodeOf(okWithIgnoredReason));
assertThat(hasStatusCodeOf(okWithIgnoredReason),
isDescribedBy("has status code <200>"));
assertThat(mismatchOf(response(BAD_REQUEST), hasStatusCodeOf(okWithIgnoredReason)),
equalTo("was status code <400>"));
}
@Test
public void shouldMatchByStatusCodeAndReason() {
Response response = response(ACCEPTED);
assertThat(response, hasStatus(ACCEPTED));
assertThat(hasStatus(ACCEPTED),
isDescribedBy("has status <202 - Accepted>"));
assertThat(mismatchOf(response(BAD_GATEWAY), hasStatus(ACCEPTED)),
equalTo("was status <502 - Bad Gateway>"));
}
@Test
public void shouldNotMatchByStatusCodeAndReason() {
Response response = response(OK);
assertThat(response, not(hasStatus(new HttpStatus(200, "Mismatched Reason"))));
assertThat(response, not(hasStatus(new HttpStatus(201, "OK"))));
assertThat(response, hasStatus(new HttpStatus(200, "OK")));
}
@Test
public void shouldMatchByContentType() {
Response response = Response.ok("content", TEXT_PLAIN_TYPE).build();
assertThat(response, hasContentType(TEXT_PLAIN_TYPE));
assertThat(response, hasContentType(TEXT_PLAIN));
assertThat(response, hasContentType(isCompatibleWith(TEXT_WILDCARD)));
assertThat(response, not(hasContentType(APPLICATION_JSON_TYPE)));
assertThat(response, not(hasContentType(APPLICATION_JSON)));
assertThat(hasContentType(TEXT_PLAIN_TYPE),
isDescribedBy("has content type <text/plain>"));
Response jsonResponse = Response.ok("content", APPLICATION_JSON_TYPE).build();
assertThat(mismatchOf(jsonResponse, hasContentType(TEXT_PLAIN_TYPE)),
equalTo("was content type <application/json>"));
}
@Test
public void shouldMatchByHeader() {
Response response = Response.ok().header("some-key", "some-value").build();
assertThat(response, hasHeader("some-key"));
assertThat(response, not(hasHeader("some-other-key")));
assertThat(hasHeader("some-key"),
isDescribedBy("has header \"some-key\""));
assertThat(mismatchOf(response, hasHeader("some-other-key")),
equalTo("header \"some-other-key\" was missing"));
}
@Test
public void shouldMatchByHeaderWithValue() {
MultivaluedMap<String, Object> headers = new MultivaluedHashMap<String, Object>();
headers.add("some-key", "some-value");
headers.add("some-key", "some-value2");
headers.add("some-key", "some-value3");
headers.add("some-key2", 42);
Response response = Response.ok().replaceAll(headers).build();
assertThat(response, hasHeader("some-key", equalTo("some-value")));
assertThat(response, hasHeader("some-key2", equalTo(42)));
assertThat(response, not(hasHeader("some-key", equalTo("some-value2"))));
assertThat(response, not(hasHeader("some-key2", hasItem(equalTo(53)))));
assertThat(response, not(hasHeader("some-key3", hasItem(equalTo(0)))));
assertThat(hasHeader("some-key", equalTo("some-value")),
isDescribedBy("has header \"some-key\" with value \"some-value\""));
assertThat(mismatchOf(response, hasHeader("some-other-key", equalTo("some-value"))),
equalTo("header \"some-other-key\" was missing"));
assertThat(mismatchOf(response, hasHeader("some-key", equalTo("some-other-value"))),
equalTo("header \"some-key\" contained \"some-value\""));
}
@Test
public void shouldMatchByHeaderWithValues() {
MultivaluedMap<String, Object> headers = new MultivaluedHashMap<String, Object>();
headers.add("some-key", "some-value");
headers.add("some-key", "some-value2");
headers.add("some-key", "some-value3");
headers.add("some-key2", 42);
Response response = Response.ok().replaceAll(headers).build();
assertThat(response, hasHeaderValues("some-key", hasItem(equalTo("some-value"))));
assertThat(response, hasHeaderValues("some-key", hasItem(equalTo("some-value2"))));
assertThat(response, hasHeaderValues("some-key", hasItem(equalTo("some-value3"))));
assertThat(response, hasHeaderValues("some-key2", hasItem(equalTo(42))));
assertThat(response, not(hasHeaderValues("some-key", hasItem(equalTo("some-other-value")))));
assertThat(response, not(hasHeaderValues("some-key2", hasItem(equalTo(53)))));
assertThat(response, not(hasHeaderValues("some-key3", hasItem(equalTo(0)))));
assertThat(hasHeaderValues("some-key", hasItem(equalTo("some-value"))),
isDescribedBy("has header \"some-key\" with a collection containing \"some-value\""));
assertThat(mismatchOf(response, hasHeaderValues("some-other-key", hasItem(equalTo("some-value")))),
equalTo("header \"some-other-key\" was missing"));
}
@Test
public void shouldMatchByCookie() {
Response response = Response.ok().cookie(
new NewCookie("cookie1", "my-value"),
new NewCookie("cookie1", "my-other-value"),
new NewCookie("cookie1", "my-yet-another-value"),
new NewCookie("cookie2", "my-value-2")).build();
assertThat(response, hasCookie("cookie1"));
assertThat(response, hasCookie("cookie2"));
assertThat(response, not(hasCookie("cookie3")));
}
public void shouldMatchByCookieWithValue() {
}
@Test
public void shouldMatchByHasEntity() {
Response response = Response.ok("entity").build();
assertThat(response, hasEntity());
assertThat(hasEntity(), isDescribedBy("has entity"));
assertThat(mismatchOf(response, hasEntity()),
equalTo("has entity"));
}
@Test
public void shouldMatchByHasNoEntity() {
Response response = Response.noContent().build();
assertThat(response, not(hasEntity()));
assertThat(mismatchOf(response, hasEntity()),
equalTo("has no entity"));
}
@Test
public void shouldMatchByLanguage() {
Response response = Response.ok("message").language(Locale.UK).build();
assertThat(response, ofLanguage("en-GB"));
assertThat(response, ofLanguage(Locale.UK));
assertThat(response, ofLanguage(equalTo(Locale.UK)));
assertThat(response, not(ofLanguage("en-US")));
assertThat(response, not(ofLanguage(Locale.US)));
assertThat(response, not(ofLanguage(equalTo(Locale.US))));
assertThat(ofLanguage("en-CA"),
isDescribedBy("of language <en_CA>"));
assertThat(mismatchOf(response, ofLanguage("en-CA")),
equalTo("was language \"en-GB\""));
}
@Test
public void shouldMatchByLastModified() {
final long lastModMillis = 1452960194707L;
Date lastModDate = new Date(lastModMillis);
Response response = Response.ok().lastModified(lastModDate).build();
assertThat(response, withLastModifiedDate(equalTo(lastModDate)));
assertThat(withLastModifiedDate(equalTo(lastModDate)),
isDescribedBy("with last modified date <Sat Jan 16 17:03:14 CET 2016>"));
assertThat(mismatchOf(response, withLastModifiedDate(equalTo(new Date(0L)))),
equalTo("last modified date was <Sat Jan 16 17:03:14 CET 2016>"));
}
public void shouldMatchByHasLink() {
}
public void shouldMatchByLinkByRelation() {
}
public void shouldMatchByLocation() {
}
private static Response response(StatusType status) {
return Response.status(status).build();
}
}
| src/test/java/org/valid4j/matchers/http/HttpResponseMatchersTest.java | package org.valid4j.matchers.http;
import org.junit.Test;
import javax.ws.rs.core.*;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.Response.StatusType;
import java.util.Date;
import java.util.Locale;
import static javax.ws.rs.core.MediaType.*;
import static javax.ws.rs.core.Response.Status.*;
import static javax.ws.rs.core.Response.Status.Family.CLIENT_ERROR;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
import static org.valid4j.matchers.http.HttpResponseMatchers.*;
import static org.valid4j.matchers.http.helpers.MatcherHelpers.mismatchOf;
import static org.valid4j.matchers.http.helpers.MatcherMatchers.isDescribedBy;
public class HttpResponseMatchersTest {
private static final MediaType TEXT_WILDCARD = new MediaType("text", MEDIA_TYPE_WILDCARD);
@Test
public void shouldMatchOkResponse() {
Response ok = Response.ok().build();
assertThat(ok, isResponseOk());
assertThat(isResponseOk(), isDescribedBy("is response ok"));
}
@Test
public void shouldMatchByStatusCode() {
assertThat(response(BAD_REQUEST), hasStatusCode(400));
assertThat(hasStatusCode(400),
isDescribedBy("has status code <400>"));
assertThat(mismatchOf(response(UNAUTHORIZED), hasStatusCode(400)),
equalTo("was status code <401>"));
}
@Test
public void shouldMatchByStatusCodeMatcher() {
Response response = response(BAD_GATEWAY);
assertThat(response, hasStatusCode(isOneOf(502, 503)));
assertThat(hasStatusCode(isOneOf(502, 503)),
isDescribedBy("has status code one of {<502>, <503>}"));
assertThat(mismatchOf(
response(INTERNAL_SERVER_ERROR),
hasStatusCode(isOneOf(502, 503))),
equalTo("was status code <500>"));
}
@Test
public void shouldMatchByStatusCodeOfFamily() {
Response response = response(UNAUTHORIZED);
assertThat(response, hasStatusCode(ofFamily(CLIENT_ERROR)));
assertThat(response, hasStatusCode(CLIENT_ERROR));
assertThat(hasStatusCode(ofFamily(CLIENT_ERROR)),
isDescribedBy("has status code of family <CLIENT_ERROR>"));
assertThat(hasStatusCode(CLIENT_ERROR),
isDescribedBy("has status code of family <CLIENT_ERROR>"));
assertThat(mismatchOf(response(BAD_GATEWAY), hasStatusCode(CLIENT_ERROR)),
equalTo("was status code <502>"));
}
@Test
public void shouldMatchByStatusCodeOfStatus() {
Response response = response(Status.OK);
HttpStatus okWithIgnoredReason = new HttpStatus(200, "Ignored Reason");
assertThat(response, hasStatusCodeOf(okWithIgnoredReason));
assertThat(hasStatusCodeOf(okWithIgnoredReason),
isDescribedBy("has status code <200>"));
assertThat(mismatchOf(response(BAD_REQUEST), hasStatusCodeOf(okWithIgnoredReason)),
equalTo("was status code <400>"));
}
@Test
public void shouldMatchByStatusCodeAndReason() {
Response response = response(ACCEPTED);
assertThat(response, hasStatus(ACCEPTED));
assertThat(hasStatus(ACCEPTED),
isDescribedBy("has status <202 - Accepted>"));
assertThat(mismatchOf(response(BAD_GATEWAY), hasStatus(ACCEPTED)),
equalTo("was status <502 - Bad Gateway>"));
}
@Test
public void shouldNotMatchByStatusCodeAndReason() {
Response response = response(OK);
assertThat(response, not(hasStatus(new HttpStatus(200, "Mismatched Reason"))));
assertThat(response, not(hasStatus(new HttpStatus(201, "OK"))));
assertThat(response, hasStatus(new HttpStatus(200, "OK")));
}
@Test
public void shouldMatchByContentType() {
Response response = Response.ok("content", TEXT_PLAIN_TYPE).build();
assertThat(response, hasContentType(TEXT_PLAIN_TYPE));
assertThat(response, hasContentType(TEXT_PLAIN));
assertThat(response, hasContentType(isCompatibleWith(TEXT_WILDCARD)));
assertThat(response, not(hasContentType(APPLICATION_JSON_TYPE)));
assertThat(response, not(hasContentType(APPLICATION_JSON)));
assertThat(hasContentType(TEXT_PLAIN_TYPE),
isDescribedBy("has content type <text/plain>"));
Response jsonResponse = Response.ok("content", APPLICATION_JSON_TYPE).build();
assertThat(mismatchOf(jsonResponse, hasContentType(TEXT_PLAIN_TYPE)),
equalTo("was content type <application/json>"));
}
@Test
public void shouldMatchByHeader() {
Response response = Response.ok().header("some-key", "some-value").build();
assertThat(response, hasHeader("some-key"));
assertThat(response, not(hasHeader("some-other-key")));
assertThat(hasHeader("some-key"),
isDescribedBy("has header \"some-key\""));
assertThat(mismatchOf(response, hasHeader("some-other-key")),
equalTo("header \"some-other-key\" was missing"));
}
@Test
public void shouldMatchByHeaderWithValue() {
MultivaluedMap<String, Object> headers = new MultivaluedHashMap<String, Object>();
headers.add("some-key", "some-value");
headers.add("some-key", "some-value2");
headers.add("some-key", "some-value3");
headers.add("some-key2", 42);
Response response = Response.ok().replaceAll(headers).build();
assertThat(response, hasHeader("some-key", equalTo("some-value")));
assertThat(response, hasHeader("some-key2", equalTo(42)));
assertThat(response, not(hasHeader("some-key", equalTo("some-value2"))));
assertThat(response, not(hasHeader("some-key2", hasItem(equalTo(53)))));
assertThat(response, not(hasHeader("some-key3", hasItem(equalTo(0)))));
assertThat(hasHeader("some-key", equalTo("some-value")),
isDescribedBy("has header \"some-key\" with value \"some-value\""));
assertThat(mismatchOf(response, hasHeader("some-other-key", equalTo("some-value"))),
equalTo("header \"some-other-key\" was missing"));
assertThat(mismatchOf(response, hasHeader("some-key", equalTo("some-other-value"))),
equalTo("header \"some-key\" contained \"some-value\""));
}
@Test
public void shouldMatchByHeaderWithValues() {
MultivaluedMap<String, Object> headers = new MultivaluedHashMap<String, Object>();
headers.add("some-key", "some-value");
headers.add("some-key", "some-value2");
headers.add("some-key", "some-value3");
headers.add("some-key2", 42);
Response response = Response.ok().replaceAll(headers).build();
assertThat(response, hasHeaderValues("some-key", hasItem(equalTo("some-value"))));
assertThat(response, hasHeaderValues("some-key", hasItem(equalTo("some-value2"))));
assertThat(response, hasHeaderValues("some-key", hasItem(equalTo("some-value3"))));
assertThat(response, hasHeaderValues("some-key2", hasItem(equalTo(42))));
assertThat(response, not(hasHeaderValues("some-key", hasItem(equalTo("some-other-value")))));
assertThat(response, not(hasHeaderValues("some-key2", hasItem(equalTo(53)))));
assertThat(response, not(hasHeaderValues("some-key3", hasItem(equalTo(0)))));
assertThat(hasHeaderValues("some-key", hasItem(equalTo("some-value"))),
isDescribedBy("has header \"some-key\" with a collection containing \"some-value\""));
assertThat(mismatchOf(response, hasHeaderValues("some-other-key", hasItem(equalTo("some-value")))),
equalTo("header \"some-other-key\" was missing"));
}
@Test
public void shouldMatchByCookie() {
Response response = Response.ok().cookie(
new NewCookie("cookie1", "my-value"),
new NewCookie("cookie1", "my-other-value"),
new NewCookie("cookie1", "my-yet-another-value"),
new NewCookie("cookie2", "my-value-2")).build();
assertThat(response, hasCookie("cookie1"));
assertThat(response, hasCookie("cookie2"));
assertThat(response, not(hasCookie("cookie3")));
}
public void shouldMatchByCookieWithValue() {
}
@Test
public void shouldMatchByHasEntity() {
Response response = Response.ok("entity").build();
assertThat(response, hasEntity());
assertThat(hasEntity(), isDescribedBy("has entity"));
assertThat(mismatchOf(response, hasEntity()),
equalTo("has entity"));
}
@Test
public void shouldMatchByHasNoEntity() {
Response response = Response.noContent().build();
assertThat(response, not(hasEntity()));
assertThat(mismatchOf(response, hasEntity()),
equalTo("has no entity"));
}
@Test
public void shouldMatchByLanguage() {
Response response = Response.ok("message").language(Locale.UK).build();
assertThat(response, ofLanguage("en-GB"));
assertThat(response, ofLanguage(Locale.UK));
assertThat(response, ofLanguage(equalTo(Locale.UK)));
assertThat(response, not(ofLanguage("en-US")));
assertThat(response, not(ofLanguage(Locale.US)));
assertThat(response, not(ofLanguage(equalTo(Locale.US))));
assertThat(ofLanguage("en-CA"),
isDescribedBy("of language <en_CA>"));
assertThat(mismatchOf(response, ofLanguage("en-CA")),
equalTo("was language \"en-GB\""));
}
@Test
public void shouldMatchByLastModified() {
final long lastModMillis = 1452960194707L;
Date lastModDate = new Date(lastModMillis);
Response response = Response.ok().lastModified(lastModDate).build();
assertThat(response, withLastModifiedDate(equalTo(lastModDate)));
assertThat(withLastModifiedDate(equalTo(lastModDate)),
isDescribedBy("with last modified date <Sat Jan 16 17:03:14 CET 2016>"));
assertThat(mismatchOf(response, withLastModifiedDate(equalTo(new Date(0L)))),
equalTo("last modified date was = <Sat Jan 16 17:03:14 CET 2016>"));
}
public void shouldMatchByHasLink() {
}
public void shouldMatchByLinkByRelation() {
}
public void shouldMatchByLocation() {
}
private static Response response(StatusType status) {
return Response.status(status).build();
}
}
| Corrected failing unit test
| src/test/java/org/valid4j/matchers/http/HttpResponseMatchersTest.java | Corrected failing unit test |
|
Java | apache-2.0 | abbfdfe7476a3116b7d4deb1b0408df1a39e2e40 | 0 | leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere,apache/incubator-shardingsphere,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc | package com.saaavsaaa.client.zookeeper;
import com.saaavsaaa.client.cache.PathStatus;
import com.saaavsaaa.client.cache.PathTree;
import com.saaavsaaa.client.election.LeaderElection;
import com.saaavsaaa.client.utility.PathUtil;
import com.saaavsaaa.client.utility.constant.Constants;
import com.saaavsaaa.client.utility.section.ClientTask;
import com.saaavsaaa.client.utility.section.Properties;
import org.apache.zookeeper.AsyncCallback;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import java.io.IOException;
import java.util.List;
import java.util.Stack;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* Created by aaa
*/
public final class CacheClient extends UsualClient {
private final ScheduledExecutorService cacheService = Executors.newSingleThreadScheduledExecutor();
protected PathTree pathTree = null;
CacheClient(String servers, int sessionTimeoutMilliseconds) {
super(servers, sessionTimeoutMilliseconds);
}
@Override
public synchronized void start() throws IOException, InterruptedException {
super.start();
pathTree = new PathTree(rootNode);
cacheService.scheduleAtFixedRate(new ClientTask(strategy.getProvider()) {
@Override
public void run(Provider provider) throws KeeperException, InterruptedException {
if (PathStatus.RELEASE == pathTree.getStatus()) {
loadCache(provider);
}
}
}, Properties.INSTANCE.getThreadInitialDelay(), Properties.INSTANCE.getThreadPeriod(), TimeUnit.MILLISECONDS);
}
//用替换整树的方式更新
private synchronized void loadCache(final Provider provider) throws KeeperException, InterruptedException {
LeaderElection election = new LeaderElection() {
@Override
public void action() throws KeeperException, InterruptedException {
pathTree.loading(provider);
}
};
provider.executeContention(election);
}
@Override
public void createCurrentOnly(final String key, final String value, final CreateMode createMode) throws KeeperException, InterruptedException {
strategy.createCurrentOnly(key, value, createMode);
pathTree.put(PathUtil.getRealPath(rootNode, key), value);
System.out.println("cache put : " + key);
}
@Override
public void deleteOnlyCurrent(final String key) throws KeeperException, InterruptedException {
strategy.deleteOnlyCurrent(key);
pathTree.delete(PathUtil.getRealPath(rootNode, key));
}
@Override
public void deleteOnlyCurrent(final String key, final AsyncCallback.VoidCallback callback, final Object ctx) throws KeeperException, InterruptedException {
strategy.deleteOnlyCurrent(key, callback, ctx);
pathTree.delete(PathUtil.getRealPath(rootNode, key));
}
//==================================================================
private boolean cacheReady(){
return PathStatus.RELEASE == pathTree.getStatus();
}
@Override
public byte[] getData(final String key) throws KeeperException, InterruptedException {
String path = PathUtil.getRealPath(rootNode, key);
if (cacheReady()){
return pathTree.getValue(path);
}
// without watcher ensure cache execute result consistency
byte[] data = zooKeeper.getData(path, false, null);
pathTree.put(path, new String(data));
return data;
}
@Override
public boolean checkExists(final String key) throws KeeperException, InterruptedException {
String path = PathUtil.getRealPath(rootNode, key);
if (cacheReady()){
return null != pathTree.getValue(path);
}
return null != zooKeeper.exists(PathUtil.getRealPath(rootNode, key), false);
}
@Override
public List<String> getChildren(final String key) throws KeeperException, InterruptedException {
String path = PathUtil.getRealPath(rootNode, key);
if (cacheReady()){
return pathTree.getChildren(path);
}
return zooKeeper.getChildren(PathUtil.getRealPath(rootNode, key), false);
}
}
| src/main/java/com/saaavsaaa/client/zookeeper/CacheClient.java | package com.saaavsaaa.client.zookeeper;
/**
* Created by aaa on 18-5-2.
* todo log
*/
public final class CacheClient extends UsualClient {
CacheClient(String servers, int sessionTimeoutMilliseconds) {
super(servers, sessionTimeoutMilliseconds);
}
}
| cache restart
| src/main/java/com/saaavsaaa/client/zookeeper/CacheClient.java | cache restart |
|
Java | apache-2.0 | 3e7ca1550b17f595ee8034a426e3d0ff8d17736b | 0 | ox-it/wl-course-signup,ox-it/wl-course-signup,ox-it/wl-course-signup,ox-it/wl-course-signup,ox-it/wl-course-signup | package uk.ac.ox.oucs.vle;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.jdom.Document;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.sakaiproject.util.FormattedText;
import org.xcri.Extension;
import org.xcri.common.Description;
import org.xcri.common.ExtensionManager;
import org.xcri.common.OverrideManager;
import org.xcri.common.descriptive.Regulations;
import org.xcri.core.Catalog;
import org.xcri.core.Course;
import org.xcri.core.Presentation;
import org.xcri.core.Provider;
import org.xcri.exceptions.InvalidElementException;
import org.xcri.presentation.Venue;
import uk.ac.ox.oucs.vle.xcri.daisy.Bookable;
import uk.ac.ox.oucs.vle.xcri.daisy.CourseSubUnit;
import uk.ac.ox.oucs.vle.xcri.daisy.DepartmentThirdLevelApproval;
import uk.ac.ox.oucs.vle.xcri.daisy.DepartmentalSubUnit;
import uk.ac.ox.oucs.vle.xcri.daisy.DivisionWideEmail;
import uk.ac.ox.oucs.vle.xcri.daisy.EmployeeEmail;
import uk.ac.ox.oucs.vle.xcri.daisy.EmployeeName;
import uk.ac.ox.oucs.vle.xcri.daisy.Identifier;
import uk.ac.ox.oucs.vle.xcri.daisy.ModuleApproval;
import uk.ac.ox.oucs.vle.xcri.daisy.OtherDepartment;
import uk.ac.ox.oucs.vle.xcri.daisy.Sessions;
import uk.ac.ox.oucs.vle.xcri.daisy.SupervisorApproval;
import uk.ac.ox.oucs.vle.xcri.daisy.TeachingDetails;
import uk.ac.ox.oucs.vle.xcri.daisy.TermCode;
import uk.ac.ox.oucs.vle.xcri.daisy.TermLabel;
import uk.ac.ox.oucs.vle.xcri.daisy.WebAuthCode;
import uk.ac.ox.oucs.vle.xcri.oxcap.MemberApplyTo;
import uk.ac.ox.oucs.vle.xcri.oxcap.OxcapCourse;
import uk.ac.ox.oucs.vle.xcri.oxcap.OxcapPresentation;
import uk.ac.ox.oucs.vle.xcri.oxcap.Session;
import uk.ac.ox.oucs.vle.xcri.oxcap.Subject;
public class XcriOxCapPopulatorImpl implements Populator {
/**
* The DAO to update our entries through.
*/
private CourseDAO dao;
public void setCourseDao(CourseDAO dao) {
this.dao = dao;
}
/**
* The proxy for getting users.
*/
private SakaiProxy proxy;
public void setProxy(SakaiProxy proxy) {
this.proxy = proxy;
}
private static final Log log = LogFactory.getLog(XcriOxCapPopulatorImpl.class);
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd MMMM yyyy hh:mm");
static {
ExtensionManager.registerExtension(new WebAuthCode());
ExtensionManager.registerExtension(new DepartmentalSubUnit());
ExtensionManager.registerExtension(new DepartmentThirdLevelApproval());
ExtensionManager.registerExtension(new DivisionWideEmail());
ExtensionManager.registerExtension(new CourseSubUnit());
ExtensionManager.registerExtension(new ModuleApproval());
ExtensionManager.registerExtension(new SupervisorApproval());
ExtensionManager.registerExtension(new OtherDepartment());
ExtensionManager.registerExtension(new Sessions());
ExtensionManager.registerExtension(new Bookable());
ExtensionManager.registerExtension(new TermCode());
ExtensionManager.registerExtension(new TermLabel());
ExtensionManager.registerExtension(new EmployeeName());
ExtensionManager.registerExtension(new EmployeeEmail());
ExtensionManager.registerExtension(new Identifier());
ExtensionManager.registerExtension(new MemberApplyTo());
ExtensionManager.registerExtension(new TeachingDetails());
ExtensionManager.registerExtension(new Subject());
ExtensionManager.registerExtension(new Session());
OverrideManager.registerOverride(Course.class, new OxcapCourse());
OverrideManager.registerOverride(Presentation.class, new OxcapPresentation());
}
/**
* @throws MalformedURLException
*
*/
public void update(PopulatorContext context) throws PopulatorException {
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
URL xcri = new URL(context.getURI());
HttpHost targetHost = new HttpHost(xcri.getHost(), xcri.getPort(), xcri.getProtocol());
httpclient.getCredentialsProvider().setCredentials(
new AuthScope(targetHost.getHostName(), targetHost.getPort()),
new UsernamePasswordCredentials(context.getUser(), context.getPassword()));
HttpGet httpget = new HttpGet(xcri.toURI());
HttpResponse response = httpclient.execute(targetHost, httpget);
HttpEntity entity = response.getEntity();
if (HttpStatus.SC_OK != response.getStatusLine().getStatusCode()) {
throw new IllegalStateException(
"Invalid response ["+response.getStatusLine().getStatusCode()+"]");
}
process(context.getName(), entity.getContent());
} catch (MalformedURLException e) {
log.warn("MalformedURLException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (IllegalStateException e) {
log.warn("IllegalStateException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (IOException e) {
log.warn("IOException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (URISyntaxException e) {
log.warn("URISyntaxException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (JDOMException e) {
log.warn("JDOMException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (InvalidElementException e) {
log.warn("InvalidElementException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} finally {
// When HttpClient instance is no longer needed,
// shut down the connection manager to ensure
// immediate deallocation of all system resources
httpclient.getConnectionManager().shutdown();
}
}
/**
*
* @param inputStream
* @throws IOException
* @throws JDOMException
* @throws InvalidElementException
*/
public void process(String name, InputStream inputStream)
throws JDOMException, IOException, InvalidElementException {
Catalog catalog = new Catalog();
SAXBuilder builder = new SAXBuilder();
Document document = builder.build(inputStream);
catalog.fromXml(document);
XcriOxcapPopulatorInstanceData data =
new XcriOxcapPopulatorInstanceData(proxy,name, simpleDateFormat.format(catalog.getGenerated()));
Provider[] providers = catalog.getProviders();
// First pass to create course groups
for (Provider provider : providers) {
provider(provider, data, true);
}
// Second pass to create course components
for (Provider provider : providers) {
provider(provider, data, false);
}
data.endTasks();
}
/**
*
* @param provider
* @param createGroups
* @throws IOException
*/
private void provider(Provider provider, XcriOxcapPopulatorInstanceData data, boolean createGroups)
throws IOException {
String departmentName = null;
if (provider.getTitles().length > 0) {
departmentName = provider.getTitles()[0].getValue();
}
String departmentCode = null;
String divisionEmail = null;
boolean departmentApproval = false;
String divisionCode = null;
Set<String> departmentApprovers = new HashSet<String>();
Collection<String> divisionSuperUsers = new HashSet<String>();
Map<String, String> subunits = new HashMap<String, String>();
for (Extension extension : provider.getExtensions()) {
if (extension instanceof Identifier) {
Identifier identifier = (Identifier) extension;
if (typeProviderId(identifier.getType())) {
if (typeProviderFallbackId(identifier.getType()) &&
null != departmentCode) {
continue;
}
departmentCode = identifier.getValue();
continue;
}
if (typeProviderDivision(identifier.getType())) {
divisionCode = identifier.getValue();
continue;
}
}
if (extension instanceof DivisionWideEmail) {
divisionEmail = extension.getValue();
continue;
}
if (extension instanceof DepartmentThirdLevelApproval) {
departmentApproval = parseBoolean(extension.getValue());
continue;
}
if (extension instanceof ModuleApproval) {
departmentApprovers.add(getUser(extension.getValue()));
continue;
}
if (extension instanceof WebAuthCode) {
WebAuthCode webAuthCode = (WebAuthCode) extension;
if (webAuthCode.getWebAuthCodeType() == WebAuthCode.WebAuthCodeType.superUser) {
divisionSuperUsers.add(getUser(webAuthCode.getValue()));
}
continue;
}
if (extension instanceof DepartmentalSubUnit) {
DepartmentalSubUnit subUnit = (DepartmentalSubUnit) extension;
subunits.put(subUnit.getCode(), subUnit.getValue());
continue;
}
}
if (null == departmentCode) {
data.logMe(
"Log Failure Provider ["+departmentCode+":"+departmentName+"] No Provider Identifier");
return;
}
if (createGroups) {
data.incrDepartmentSeen();
if (updateDepartment(departmentCode, departmentName, departmentApproval,
departmentApprovers)) {
data.incrDepartmentCreated();;
} else {
data.incrDepartmentUpdated();
}
for (Map.Entry<String, String> entry : subunits.entrySet()) {
data.incrSubunitSeen();
if (updateSubUnit(entry.getKey(), entry.getValue(), departmentCode)) {
data.incrSubunitCreated();;
} else {
data.incrSubunitUpdated();
}
}
}
for (Course course : provider.getCourses()) {
course(course, departmentCode, departmentName, divisionEmail, divisionSuperUsers, data, !createGroups);
}
}
/**
*
* @param type
* @return
*/
protected static boolean typeProviderId(String type) {
if ("ns:department".equals(type) ||
"ns:twoThree".equals(type)) {
return true;
}
return false;
}
protected static boolean typeProviderFallbackId(String type) {
if ("ns:department".equals(type)) {
return true;
}
return false;
}
/**
*
* @param type
* @return
*/
protected static boolean typeProviderDivision(String type) {
if ("ns:division".equals(type)) {
return true;
}
return false;
}
/**
* Process <course> tag
*
* @param course
* @param departmentCode
* @param departmentName
* @param divisionEmail
* @param divisionSuperUsers
* @param createComponents
* @throws IOException
*/
private void course(Course course,
String departmentCode, String departmentName,
String divisionEmail, Collection<String> divisionSuperUsers,
XcriOxcapPopulatorInstanceData data,
boolean createComponents)
throws IOException {
CourseGroupDAO myCourse = new CourseGroupDAO();
myCourse.setSource(data.getFeed());
myCourse.setDept(departmentCode);
myCourse.setDepartmentName(departmentName);
myCourse.setContactEmail(divisionEmail);
myCourse.setTitle(course.getTitles()[0].getValue());
OxcapCourse oxCourse = (OxcapCourse)course;
myCourse.setVisibility(oxCourse.getVisibility().toString());
if (course.getRegulations().length > 0) {
Regulations xRegulations = course.getRegulations()[0];
if (!xRegulations.isXhtml()) {
myCourse.setRegulations(parse(xRegulations.getValue()));
} else {
myCourse.setRegulations(xRegulations.getValue());
}
}
Collection<Subject> researchCategories = new HashSet<Subject>();
Collection<Subject> skillsCategories = new HashSet<Subject>();
Collection<Subject> jacsCategories = new HashSet<Subject>();
String teachingcomponentId = null;
Collection<String> administrators = new HashSet<String>();
Collection<String> otherDepartments = new HashSet<String>();
for (Extension extension : course.getExtensions()) {
if (extension instanceof Identifier) {
Identifier identifier = (Identifier) extension;
if (typeCourseId(identifier.getType())) {
myCourse.setCourseId(identifier.getValue());
}
if ("teachingComponentId".equals(identifier.getType())) {
teachingcomponentId = identifier.getValue();
}
continue;
}
if (extension instanceof SupervisorApproval) {
myCourse.setSupervisorApproval(parseBoolean(extension.getValue()));
continue;
}
if (extension instanceof ModuleApproval) {
myCourse.setAdministratorApproval(parseBoolean(extension.getValue()));
continue;
}
if (extension instanceof CourseSubUnit) {
CourseSubUnit subUnit = (CourseSubUnit)extension;
myCourse.setSubunit(subUnit.getCode());
myCourse.setSubunitName(subUnit.getValue());
continue;
}
if (extension instanceof WebAuthCode) {
WebAuthCode webAuthCode = (WebAuthCode) extension;
if (webAuthCode.getWebAuthCodeType() == WebAuthCode.WebAuthCodeType.administrator) {
administrators.add(getUser(webAuthCode.getValue()));
}
continue;
}
if (extension instanceof OtherDepartment) {
if (!extension.getValue().isEmpty()) {
otherDepartments.add(extension.getValue());
}
continue;
}
if (extension instanceof Subject) {
Subject subject = (Subject) extension;
if (subject.isRDFCategory()) {
skillsCategories.add(subject);
}
if (subject.isRMCategory()) {
researchCategories.add(subject);
}
if (subject.isJACSCategory()) {
jacsCategories.add(subject);
}
continue;
}
}
if (null == myCourse.getCourseId()) {
data.logMe(
"Log Failure Course ["+myCourse.getCourseId()+":"+myCourse.getTitle()+"] No Course Identifier");
return;
}
if (course.getDescriptions().length > 0) {
Description xDescription = course.getDescriptions()[0];
if (!xDescription.isXhtml()) {
myCourse.setDescription(parse(xDescription.getValue()));
} else {
myCourse.setDescription(xDescription.getValue());
}
} else {
data.logMe(
"Log Warning Course ["+myCourse.getCourseId()+":"+myCourse.getTitle()+"] has no description");
}
if (createComponents) {
Presentation[] presentations = course.getPresentations();
for (int i=0; i<presentations.length; i++) {
presentation(presentations[i], myCourse.getCourseId(), teachingcomponentId, data);
}
} else {
if (!myCourse.getCourseId().equals(data.getLastGroup())) {
data.incrGroupSeen();
data.setLastGroup(myCourse.getCourseId());
if (validCourse(data, myCourse,
(Set<String>) administrators,
(Set<String>) divisionSuperUsers,
(Set<String>) otherDepartments,
(Set<Subject>) researchCategories,
(Set<Subject>) skillsCategories,
(Set<Subject>) jacsCategories)) {
if (updateCourse(data, myCourse,
(Set<String>) administrators,
(Set<String>) divisionSuperUsers,
(Set<String>) otherDepartments,
(Set<Subject>) researchCategories,
(Set<Subject>) skillsCategories,
(Set<Subject>) jacsCategories)) {
data.incrGroupCreated();
} else {
data.incrGroupUpdated();
}
}
}
}
}
/**
*
* @param type
* @return
*/
protected static boolean typeCourseId(String type) {
if ("ns:daisy-course".equals(type) ||
"ns:itlp-course".equals(type) ||
"ns:careers-course".equals(type) ||
"ns:language-centre-course".equals(type) ||
"ns:medsci-course".equals(type) ||
"ns:sharepoint-course".equals(type)) {
return true;
}
return false;
}
/**
*
* @param presentation
* @param teachingcomponentId
* @param groups
* @throws IOException
*/
private void presentation(Presentation presentation,
String assessmentunitCode, String teachingcomponentId, XcriOxcapPopulatorInstanceData data)
throws IOException {
CourseComponentDAO myPresentation = new CourseComponentDAO();
myPresentation.setComponentId(teachingcomponentId);
myPresentation.setSource(data.getFeed());
myPresentation.setTitle(presentation.getTitles()[0].getValue());
if (null != presentation.getAttendanceMode()) {
myPresentation.setAttendanceMode(presentation.getAttendanceMode().getIdentifier());
myPresentation.setAttendanceModeText(presentation.getAttendanceMode().getValue());
}
if (null != presentation.getAttendancePattern()) {
myPresentation.setAttendancePattern(presentation.getAttendancePattern().getIdentifier());
myPresentation.setAttendancePatternText(presentation.getAttendancePattern().getValue());
}
if (null != presentation.getApplyTo()) {
myPresentation.setApplyTo(presentation.getApplyTo().getValue());
}
if (null != presentation.getStart()) {
myPresentation.setStarts(presentation.getStart().getDtf());
myPresentation.setStartsText(presentation.getStart().getValue());
}
if (null != presentation.getEnd()) {
myPresentation.setEnds(presentation.getEnd().getDtf());
myPresentation.setEndsText(presentation.getEnd().getValue());
}
if (null != presentation.getApplyFrom()) {
myPresentation.setOpens(presentation.getApplyFrom().getDtf());
myPresentation.setOpensText(presentation.getApplyFrom().getValue());
}
if (null != presentation.getApplyUntil()) {
myPresentation.setCloses(presentation.getApplyUntil().getDtf());
myPresentation.setClosesText(presentation.getApplyUntil().getValue());
}
if (0 != presentation.getVenues().length) {
Venue venue = presentation.getVenues()[0];
if (null != venue.getProvider() && venue.getProvider().getTitles().length > 0) {
myPresentation.setLocation(venue.getProvider().getTitles()[0].getValue());
}
}
String teacherId = null;
Collection<Session> sessions = new HashSet<Session>();
for (Extension extension : presentation.getExtensions()) {
if (extension instanceof Identifier) {
Identifier identifier = (Identifier) extension;
if ("presentationURI".equals(identifier.getType())) {
//uri = identifier.getValue();
continue;
}
if (typePresentationId(identifier.getType())) {
myPresentation.setPresentationId(identifier.getValue());
continue;
}
}
if (extension instanceof Bookable) {
myPresentation.setBookable(parseBoolean(extension.getValue()));
continue;
}
if (extension instanceof EmployeeName) {
myPresentation.setTeacherName(extension.getValue());
continue;
}
if (extension instanceof EmployeeEmail) {
myPresentation.setTeacherEmail(extension.getValue());
continue;
}
if (extension instanceof MemberApplyTo) {
myPresentation.setMemberApplyTo(extension.getValue());
continue;
}
if (extension instanceof Sessions) {
myPresentation.setSessions(extension.getValue());
continue;
}
if (extension instanceof TermCode) {
myPresentation.setTermcode(extension.getValue());
continue;
}
if (extension instanceof TermLabel) {
myPresentation.setSlot(extension.getValue());
continue;
}
if (extension instanceof TeachingDetails) {
myPresentation.setTeachingDetails(extension.getValue());
continue;
}
if (extension instanceof WebAuthCode) {
WebAuthCode webAuthCode = (WebAuthCode) extension;
if (webAuthCode.getWebAuthCodeType() == WebAuthCode.WebAuthCodeType.presenter) {
teacherId = webAuthCode.getValue();
}
continue;
}
if (extension instanceof Session) {
Session session = (Session)extension;
if (session.getIdentifiers().length > 0) {
sessions.add(session);
continue;
}
}
}
if (null != presentation.getPlaces() &&
!presentation.getPlaces().getValue().isEmpty()) {
try {
myPresentation.setSize(Integer.parseInt(presentation.getPlaces().getValue()));
} catch (Exception e) {
data.logMe(
"Log Warning Presentation ["+
myPresentation.getPresentationId()+":"+myPresentation.getTitle()+
"] value in places tag is not a number ["+presentation.getPlaces().getValue()+"]");
}
}
Set<String> groups = new HashSet<String>();
groups.add(assessmentunitCode);
Collection<CourseGroupDAO> courseGroups = getCourseGroups(groups);
data.incrComponentSeen();
if (validComponent(data, myPresentation, teacherId,
(Set<Session>) sessions, (Set<CourseGroupDAO>) courseGroups)) {
if (updateComponent(data, myPresentation, teacherId,
(Set<Session>) sessions,
(Set<CourseGroupDAO>) courseGroups)) {
data.incrComponentCreated();
} else {
data.incrComponentUpdated();
}
}
}
/**
*
* @param type
* @return
*/
protected static boolean typePresentationId(String type) {
if ("ns:daisy-presentation".equals(type) ||
"ns:careers-presentation".equals(type) ||
"ns:itlp-presentation".equals(type) ||
"ns:language-centre-presentation".equals(type) ||
"ns:medsci-presentation".equals(type) ||
"ns:sharepoint-presentation".equals(type)) {
return true;
}
return false;
}
/**
*
* @param code
* @param name
* @param approve
* @param approvers
* @return
*/
private boolean updateDepartment(String code, String name, boolean approve, Set<String> approvers) {
log.debug("XcriPopulatorImpl.updateDepartment ["+code+":"+name+":"+
approve+":"+approvers.size()+"]");
boolean created = false;
if (null != dao) {
CourseDepartmentDAO departmentDao = dao.findDepartmentByCode(code);
if (null == departmentDao) {
departmentDao = new CourseDepartmentDAO(code);
created = true;
}
departmentDao.setName(name);
departmentDao.setApprove(approve);
departmentDao.setApprovers(approvers);
dao.save(departmentDao);
}
return created;
}
/**
*
* @param code
* @param name
* @param departmentCode
* @return
*/
private boolean updateSubUnit(String code, String name, String departmentCode) {
log.debug("XcriPopulatorImpl.updateSubUnit ["+
code+":"+name+":"+departmentCode+"]");
boolean created = false;
if (null != dao) {
CourseSubunitDAO subunitDao = dao.findSubunitByCode(code);
if (null == subunitDao) {
subunitDao = new CourseSubunitDAO(code);
created = true;
}
subunitDao.setSubunitName(name);
subunitDao.setDepartmentCode(departmentCode);
dao.save(subunitDao);
}
return created;
}
/**
*
* @param code
* @param administrators
* @return
*/
protected boolean validCourse(XcriOxcapPopulatorInstanceData data,
CourseGroupDAO myCourse,
Set<String> administrators, Set<String> superusers, Set<String> otherDepartments,
Set<Subject> researchCategories, Set<Subject> skillsCategories, Set<Subject> jacsCategories) {
int i=0;
try {
if (null == myCourse.getCourseId()) {
logMe(data, "Log Failure Assessment Unit ["+myCourse.getCourseId()+":"+myCourse.getTitle()+"] No AssessmentUnit code");
i++;
}
if (i == 0) {
return true;
}
} catch (IOException e) {
}
return false;
}
/**
*
* @param code
* @param title
* @param departmentCode
* @param subunitCode
* @param description
* @param departmentName
* @param subunitName
* @param publicView
* @param supervisorApproval
* @param administratorApproval
* @param divisionEmail
* @param administrators
* @param superusers
* @param otherDepartments
* @return
* @throws IOException
*/
private boolean updateCourse(XcriOxcapPopulatorInstanceData data,
CourseGroupDAO myCourse,
Set<String> administrators,
Set<String> superusers,
Set<String> otherDepartments,
Set<Subject> researchCategories,
Set<Subject> skillsCategories,
Set<Subject> jacsCategories) throws IOException {
boolean created = false;
if (null != dao) {
CourseGroupDAO groupDao = dao.findCourseGroupById(myCourse.getCourseId());
if (groupDao == null) {
groupDao = dao.newCourseGroup(myCourse.getCourseId(), myCourse.getTitle(), myCourse.getDept(), myCourse.getSubunit());
created = true;
} else {
groupDao.setDept(myCourse.getDept());
groupDao.setSubunit(myCourse.getSubunit());
groupDao.setTitle(myCourse.getTitle());
}
groupDao.setDescription(myCourse.getDescription());
groupDao.setDepartmentName(myCourse.getDepartmentName());
groupDao.setSubunitName(myCourse.getSubunitName());
groupDao.setVisibility(myCourse.getVisibility());
groupDao.setSource(myCourse.getSource());
groupDao.setSupervisorApproval(myCourse.getSupervisorApproval());
groupDao.setAdministratorApproval(myCourse.getAdministratorApproval());
groupDao.setContactEmail(myCourse.getContactEmail());
groupDao.setAdministrators(administrators);
groupDao.setRegulations(myCourse.getRegulations());
groupDao.setDeleted(false);
if (null==superusers) {
superusers = Collections.<String>emptySet();
}
groupDao.setSuperusers(superusers);
if (null==otherDepartments) {
otherDepartments = Collections.<String>emptySet();
}
groupDao.setOtherDepartments(otherDepartments);
Set<CourseCategoryDAO> categories = new HashSet<CourseCategoryDAO>();
for (Subject subject : researchCategories) {
categories.add(new CourseCategoryDAO(
CourseGroup.Category_Type.RM, subject.getIdentifier(), subject.getValue()));
}
for (Subject subject : skillsCategories) {
categories.add(new CourseCategoryDAO(
CourseGroup.Category_Type.RDF, subject.getIdentifier(), subject.getValue()));
}
for (Subject subject : jacsCategories) {
categories.add(new CourseCategoryDAO(
CourseGroup.Category_Type.JACS, subject.getIdentifier(), subject.getValue()));
}
//remove unwanted categories
// done this way to avoid java.util.ConcurrentModificationException
for (Iterator<CourseCategoryDAO> itr = groupDao.getCategories().iterator(); itr.hasNext();) {
CourseCategoryDAO category = itr.next();
if (!categories.contains(category)) {
itr.remove();
}
}
//add any new categories
for (CourseCategoryDAO category : categories) {
if (!groupDao.getCategories().contains(category)) {
groupDao.getCategories().add(category);
}
}
dao.save(groupDao);
}
if (created) {
logMs(data, "Log Success Course Group created ["+myCourse.getCourseId()+":"+myCourse.getTitle()+"]");
} else {
logMs(data, "Log Success Course Group updated ["+myCourse.getCourseId()+":"+myCourse.getTitle()+"]");
}
return created;
}
/**
*
* @param id
* @param title
* @param subject
* @param openDate
* @param closeDate
* @param expiryDate
* @param termCode
* @param teachingComponentId
* @param termName
* @param groups
* @return
*/
protected boolean validComponent(XcriOxcapPopulatorInstanceData data,
CourseComponentDAO myPresentation,
String teacherId,
Set<Session> sessions,
Set<CourseGroupDAO> groups) {
int i=0;
try {
if (null != myPresentation.getOpens() && null != myPresentation.getCloses()) {
if (myPresentation.getOpens().after(myPresentation.getCloses())){
logMe(data, "Log Failure Teaching Instance ["+myPresentation.getPresentationId()+":"+myPresentation.getTitle()+"] Open date is after close date");
i++;
}
}
if (myPresentation.getTitle() == null || myPresentation.getTitle().trim().length() == 0) {
logMe(data, "Log Failure Teaching Instance ["+myPresentation.getPresentationId()+":"+myPresentation.getTitle()+"] Title isn't set");
i++;
}
if (groups.isEmpty()) {
logMe(data, "Log Failure Teaching Instance ["+myPresentation.getPresentationId()+":"+myPresentation.getTitle()+"] No Assessment Unit codes");
i++;
}
if (i == 0) {
return true;
}
} catch (IOException e) {
}
return false;
}
/**
*
* @param id
* @param title
* @param subject
* @param openDate
* @param closeDate
* @param expiryDate
* @param startDate
* @param endDate
* @param bookable
* @param capacity
* @param termCode
* @param teachingComponentId
* @param termName
* @param teacherId
* @param teacherName
* @param teacherEmail
* @param sessionDates
* @param sessions
* @param location
* @param groups
* @return
* @throws IOException
*/
private boolean updateComponent(XcriOxcapPopulatorInstanceData data,
CourseComponentDAO myPresentation,
String teacherId,
Set<Session> sessions, Set<CourseGroupDAO> groups) throws IOException {
boolean created = false;
if (null != dao) {
CourseComponentDAO componentDao = dao.findCourseComponent(myPresentation.getPresentationId());
if (componentDao == null) {
componentDao = dao.newCourseComponent(myPresentation.getPresentationId());
created = true;
}
componentDao.setTitle(myPresentation.getTitle());
componentDao.setSubject(myPresentation.getSubject());
componentDao.setOpens(myPresentation.getOpens());
componentDao.setOpensText(myPresentation.getOpensText());
componentDao.setCloses(myPresentation.getCloses());
componentDao.setClosesText(myPresentation.getClosesText());
componentDao.setStarts(myPresentation.getStarts());
componentDao.setStartsText(myPresentation.getStartsText());
componentDao.setEnds(myPresentation.getEnds());
componentDao.setEndsText(myPresentation.getEndsText());
componentDao.setBookable(myPresentation.isBookable());
componentDao.setSize(myPresentation.getSize());
componentDao.setTermcode(myPresentation.getTermcode());
componentDao.setAttendanceMode(myPresentation.getAttendanceMode());
componentDao.setAttendanceModeText(myPresentation.getAttendanceModeText());
componentDao.setAttendancePattern(myPresentation.getAttendancePattern());
componentDao.setAttendancePatternText(myPresentation.getAttendancePatternText());
componentDao.setComponentId(myPresentation.getComponentId()+":"+myPresentation.getTermcode());
componentDao.setTeacherName(myPresentation.getTeacherName());
componentDao.setTeacherEmail(myPresentation.getTeacherEmail());
componentDao.setWhen(myPresentation.getWhen());
componentDao.setSlot(myPresentation.getSlot());
componentDao.setSessions(myPresentation.getSessions());
componentDao.setLocation(myPresentation.getLocation());
componentDao.setApplyTo(myPresentation.getApplyTo());
componentDao.setMemberApplyTo(myPresentation.getMemberApplyTo());
componentDao.setTeachingDetails(myPresentation.getTeachingDetails());
componentDao.setBaseDate(baseDate(componentDao));
componentDao.setSource(myPresentation.getSource());
// Cleanout existing groups.
componentDao.setGroups(new HashSet<CourseGroupDAO>());
// Populate teacher details.
// Look for details in WebLearn first then fallback to details in DAISY.
if (teacherId != null && teacherId.length() > 0) {
UserProxy teacher = proxy.findUserByEid(teacherId);
if (teacher != null) {
componentDao.setTeacherName(teacher.getDisplayName());
componentDao.setTeacherEmail(teacher.getEmail());
}
}
componentDao.setGroups(groups);
componentDao.setDeleted(false);
Collection<CourseComponentSessionDAO> componentSessions = componentDao.getComponentSessions();
for (Session session : sessions) {
componentSessions.add(
new CourseComponentSessionDAO(session.getIdentifiers()[0].getValue(),
session.getStart().getDtf(), session.getStart().getValue(),
session.getEnd().getDtf(), session.getEnd().getValue()));
}
dao.save(componentDao);
}
if (created) {
logMs(data, "Log Success Course Component created ["+myPresentation.getPresentationId()+":"+myPresentation.getTitle()+"]");
} else {
logMs(data, "Log Success Course Component updated ["+myPresentation.getPresentationId()+":"+myPresentation.getTitle()+"]");
}
return created;
}
/**
* @throws IOException
*
*/
private void logMe(XcriOxcapPopulatorInstanceData data, String message) throws IOException {
log.warn(message);
if (null != data) {
data.logMe(message);
}
}
/**
* @throws IOException
*
*/
private void logMs(XcriOxcapPopulatorInstanceData data, String message) throws IOException {
log.warn(message);
if (null != data) {
data.logMs(message);
}
}
/**
*
* @param userCode
* @return
*/
private String getUser (String userCode) {
if (null == proxy) {
return userCode;
}
UserProxy user = proxy.findUserByEid(userCode);
if (null == user) {
log.warn("Failed to find User [" + userCode +"]");
return null;
}
return user.getId();
}
/**
*
* @param groups
* @return
*/
private Collection<CourseGroupDAO> getCourseGroups (Collection<String> groups) {
Set<CourseGroupDAO> courseGroups = new HashSet<CourseGroupDAO>();
for (String group : groups) {
CourseGroupDAO courseDao = null;
if (null == dao) {
courseDao = new CourseGroupDAO();
courseDao.setCourseId(group);
} else {
courseDao = dao.findCourseGroupById(group);
}
if (null == courseDao) {
System.out.println("Failed to find Group [" + group +"]");
} else {
courseGroups.add(courseDao);
}
}
return courseGroups;
}
protected static String viewDate(Date date, String text) {
if (null == date) {
return text+"[null]";
}
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy");
return text+"["+sdf.format(date)+"]";
}
private static boolean parseBoolean(String data) {
if ("1".equals(data)) {
return true;
}
if ("0".equals(data)) {
return false;
}
return Boolean.parseBoolean(data);
}
/**
*
* @param component
* @return
*/
public static Date baseDate(CourseComponentDAO component) {
if (null != component.getStarts()) {
return component.getStarts();
}
if (null != component.getCloses()) {
return component.getCloses();
}
return null;
}
/**
*
* @param data
* @return
*/
protected static String parse(String data) {
data = data.replaceAll("<", "<");
data = data.replaceAll(">", ">");
data = FormattedText.convertPlaintextToFormattedText(data);
Pattern pattern = Pattern.compile("[A-Z0-9._%+-]+@[A-Z0-9.-]+\\.[A-Z]{2,4}", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(data);
StringBuffer sb = new StringBuffer(data.length());
while (matcher.find()) {
String text = matcher.group(0);
matcher.appendReplacement(sb, "<a class=\"email\" href=\"mailto:"+text+"\">"+text+"</a>" );
}
matcher.appendTail(sb);
pattern = Pattern.compile("(https?|ftps?):\\/\\/[a-z_0-9\\\\\\-]+(\\.([\\w#!:?+=&%@!\\-\\/])+)+", Pattern.CASE_INSENSITIVE);
matcher = pattern.matcher(sb.toString());
sb = new StringBuffer(data.length());
while (matcher.find()) {
String text = matcher.group(0);
matcher.appendReplacement(sb, "<a class=\"url\" href=\""+text+"\" target=\"_blank\">"+text+"</a>" );
}
matcher.appendTail(sb);
return sb.toString();
}
}
| impl/src/main/java/uk/ac/ox/oucs/vle/XcriOxCapPopulatorImpl.java | package uk.ac.ox.oucs.vle;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.jdom.Document;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.sakaiproject.util.FormattedText;
import org.xcri.Extension;
import org.xcri.common.Description;
import org.xcri.common.ExtensionManager;
import org.xcri.common.OverrideManager;
import org.xcri.common.descriptive.Regulations;
import org.xcri.core.Catalog;
import org.xcri.core.Course;
import org.xcri.core.Presentation;
import org.xcri.core.Provider;
import org.xcri.exceptions.InvalidElementException;
import org.xcri.presentation.Venue;
import uk.ac.ox.oucs.vle.xcri.daisy.Bookable;
import uk.ac.ox.oucs.vle.xcri.daisy.CourseSubUnit;
import uk.ac.ox.oucs.vle.xcri.daisy.DepartmentThirdLevelApproval;
import uk.ac.ox.oucs.vle.xcri.daisy.DepartmentalSubUnit;
import uk.ac.ox.oucs.vle.xcri.daisy.DivisionWideEmail;
import uk.ac.ox.oucs.vle.xcri.daisy.EmployeeEmail;
import uk.ac.ox.oucs.vle.xcri.daisy.EmployeeName;
import uk.ac.ox.oucs.vle.xcri.daisy.Identifier;
import uk.ac.ox.oucs.vle.xcri.daisy.ModuleApproval;
import uk.ac.ox.oucs.vle.xcri.daisy.OtherDepartment;
import uk.ac.ox.oucs.vle.xcri.daisy.Sessions;
import uk.ac.ox.oucs.vle.xcri.daisy.SupervisorApproval;
import uk.ac.ox.oucs.vle.xcri.daisy.TeachingDetails;
import uk.ac.ox.oucs.vle.xcri.daisy.TermCode;
import uk.ac.ox.oucs.vle.xcri.daisy.TermLabel;
import uk.ac.ox.oucs.vle.xcri.daisy.WebAuthCode;
import uk.ac.ox.oucs.vle.xcri.oxcap.MemberApplyTo;
import uk.ac.ox.oucs.vle.xcri.oxcap.OxcapCourse;
import uk.ac.ox.oucs.vle.xcri.oxcap.OxcapPresentation;
import uk.ac.ox.oucs.vle.xcri.oxcap.Session;
import uk.ac.ox.oucs.vle.xcri.oxcap.Subject;
public class XcriOxCapPopulatorImpl implements Populator {
/**
* The DAO to update our entries through.
*/
private CourseDAO dao;
public void setCourseDao(CourseDAO dao) {
this.dao = dao;
}
/**
* The proxy for getting users.
*/
private SakaiProxy proxy;
public void setProxy(SakaiProxy proxy) {
this.proxy = proxy;
}
private static final Log log = LogFactory.getLog(XcriOxCapPopulatorImpl.class);
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd MMMM yyyy hh:mm");
static {
ExtensionManager.registerExtension(new WebAuthCode());
ExtensionManager.registerExtension(new DepartmentalSubUnit());
ExtensionManager.registerExtension(new DepartmentThirdLevelApproval());
ExtensionManager.registerExtension(new DivisionWideEmail());
ExtensionManager.registerExtension(new CourseSubUnit());
ExtensionManager.registerExtension(new ModuleApproval());
ExtensionManager.registerExtension(new SupervisorApproval());
ExtensionManager.registerExtension(new OtherDepartment());
ExtensionManager.registerExtension(new Sessions());
ExtensionManager.registerExtension(new Bookable());
ExtensionManager.registerExtension(new TermCode());
ExtensionManager.registerExtension(new TermLabel());
ExtensionManager.registerExtension(new EmployeeName());
ExtensionManager.registerExtension(new EmployeeEmail());
ExtensionManager.registerExtension(new Identifier());
ExtensionManager.registerExtension(new MemberApplyTo());
ExtensionManager.registerExtension(new TeachingDetails());
ExtensionManager.registerExtension(new Subject());
ExtensionManager.registerExtension(new Session());
OverrideManager.registerOverride(Course.class, new OxcapCourse());
OverrideManager.registerOverride(Presentation.class, new OxcapPresentation());
}
/**
* @throws MalformedURLException
*
*/
public void update(PopulatorContext context) throws PopulatorException {
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
URL xcri = new URL(context.getURI());
HttpHost targetHost = new HttpHost(xcri.getHost(), xcri.getPort(), xcri.getProtocol());
httpclient.getCredentialsProvider().setCredentials(
new AuthScope(targetHost.getHostName(), targetHost.getPort()),
new UsernamePasswordCredentials(context.getUser(), context.getPassword()));
HttpGet httpget = new HttpGet(xcri.toURI());
HttpResponse response = httpclient.execute(targetHost, httpget);
HttpEntity entity = response.getEntity();
if (HttpStatus.SC_OK != response.getStatusLine().getStatusCode()) {
throw new IllegalStateException(
"Invalid response ["+response.getStatusLine().getStatusCode()+"]");
}
process(context.getName(), entity.getContent());
} catch (MalformedURLException e) {
log.warn("MalformedURLException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (IllegalStateException e) {
log.warn("IllegalStateException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (IOException e) {
log.warn("IOException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (URISyntaxException e) {
log.warn("URISyntaxException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (JDOMException e) {
log.warn("JDOMException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} catch (InvalidElementException e) {
log.warn("InvalidElementException ["+context.getURI()+"]", e);
throw new PopulatorException(e.getLocalizedMessage());
} finally {
// When HttpClient instance is no longer needed,
// shut down the connection manager to ensure
// immediate deallocation of all system resources
httpclient.getConnectionManager().shutdown();
}
}
/**
*
* @param inputStream
* @throws IOException
* @throws JDOMException
* @throws InvalidElementException
*/
public void process(String name, InputStream inputStream)
throws JDOMException, IOException, InvalidElementException {
Catalog catalog = new Catalog();
SAXBuilder builder = new SAXBuilder();
Document document = builder.build(inputStream);
catalog.fromXml(document);
XcriOxcapPopulatorInstanceData data =
new XcriOxcapPopulatorInstanceData(proxy,name, simpleDateFormat.format(catalog.getGenerated()));
Provider[] providers = catalog.getProviders();
// First pass to create course groups
for (Provider provider : providers) {
provider(provider, data, true);
}
// Second pass to create course components
for (Provider provider : providers) {
provider(provider, data, false);
}
data.endTasks();
}
/**
*
* @param provider
* @param createGroups
* @throws IOException
*/
private void provider(Provider provider, XcriOxcapPopulatorInstanceData data, boolean createGroups)
throws IOException {
String departmentName = null;
if (provider.getTitles().length > 0) {
departmentName = provider.getTitles()[0].getValue();
}
String departmentCode = null;
String divisionEmail = null;
boolean departmentApproval = false;
String divisionCode = null;
Set<String> departmentApprovers = new HashSet<String>();
Collection<String> divisionSuperUsers = new HashSet<String>();
Map<String, String> subunits = new HashMap<String, String>();
for (Extension extension : provider.getExtensions()) {
if (extension instanceof Identifier) {
Identifier identifier = (Identifier) extension;
if (typeProviderId(identifier.getType())) {
if (typeProviderFallbackId(identifier.getType()) &&
null != departmentCode) {
continue;
}
departmentCode = identifier.getValue();
continue;
}
if (typeProviderDivision(identifier.getType())) {
divisionCode = identifier.getValue();
continue;
}
}
if (extension instanceof DivisionWideEmail) {
divisionEmail = extension.getValue();
continue;
}
if (extension instanceof DepartmentThirdLevelApproval) {
departmentApproval = parseBoolean(extension.getValue());
continue;
}
if (extension instanceof ModuleApproval) {
departmentApprovers.add(getUser(extension.getValue()));
continue;
}
if (extension instanceof WebAuthCode) {
WebAuthCode webAuthCode = (WebAuthCode) extension;
if (webAuthCode.getWebAuthCodeType() == WebAuthCode.WebAuthCodeType.superUser) {
divisionSuperUsers.add(getUser(webAuthCode.getValue()));
}
continue;
}
if (extension instanceof DepartmentalSubUnit) {
DepartmentalSubUnit subUnit = (DepartmentalSubUnit) extension;
subunits.put(subUnit.getCode(), subUnit.getValue());
continue;
}
}
if (null == departmentCode) {
data.logMe(
"Log Failure Provider ["+departmentCode+":"+departmentName+"] No Provider Identifier");
return;
}
if (createGroups) {
data.incrDepartmentSeen();
if (updateDepartment(departmentCode, departmentName, departmentApproval,
departmentApprovers)) {
data.incrDepartmentCreated();;
} else {
data.incrDepartmentUpdated();
}
for (Map.Entry<String, String> entry : subunits.entrySet()) {
data.incrSubunitSeen();
if (updateSubUnit(entry.getKey(), entry.getValue(), departmentCode)) {
data.incrSubunitCreated();;
} else {
data.incrSubunitUpdated();
}
}
}
for (Course course : provider.getCourses()) {
course(course, departmentCode, departmentName, divisionEmail, divisionSuperUsers, data, !createGroups);
}
}
/**
*
* @param type
* @return
*/
protected static boolean typeProviderId(String type) {
if ("ns:department".equals(type) ||
"ns:twoThree".equals(type)) {
return true;
}
return false;
}
protected static boolean typeProviderFallbackId(String type) {
if ("ns:department".equals(type)) {
return true;
}
return false;
}
/**
*
* @param type
* @return
*/
protected static boolean typeProviderDivision(String type) {
if ("ns:division".equals(type)) {
return true;
}
return false;
}
/**
* Process <course> tag
*
* @param course
* @param departmentCode
* @param departmentName
* @param divisionEmail
* @param divisionSuperUsers
* @param createComponents
* @throws IOException
*/
private void course(Course course,
String departmentCode, String departmentName,
String divisionEmail, Collection<String> divisionSuperUsers,
XcriOxcapPopulatorInstanceData data,
boolean createComponents)
throws IOException {
String title = course.getTitles()[0].getValue();
OxcapCourse oxCourse = (OxcapCourse)course;
String visibility = oxCourse.getVisibility().toString();
String regulations = null;
if (course.getRegulations().length > 0) {
Regulations xRegulations = course.getRegulations()[0];
if (!xRegulations.isXhtml()) {
regulations = parse(xRegulations.getValue());
} else {
regulations = xRegulations.getValue();
}
}
Collection<Subject> researchCategories = new HashSet<Subject>();
Collection<Subject> skillsCategories = new HashSet<Subject>();
Collection<Subject> jacsCategories = new HashSet<Subject>();
String id = null;
String teachingcomponentId = null;
boolean supervisorApproval = true;
boolean administratorApproval = true;
String subunitCode = null;
String subunitName = null;
Collection<String> administrators = new HashSet<String>();
Collection<String> otherDepartments = new HashSet<String>();
for (Extension extension : course.getExtensions()) {
if (extension instanceof Identifier) {
Identifier identifier = (Identifier) extension;
if (typeCourseId(identifier.getType())) {
id = identifier.getValue();
}
if ("teachingComponentId".equals(identifier.getType())) {
teachingcomponentId = identifier.getValue();
}
continue;
}
if (extension instanceof SupervisorApproval) {
supervisorApproval = parseBoolean(extension.getValue());
continue;
}
if (extension instanceof ModuleApproval) {
administratorApproval = parseBoolean(extension.getValue());
continue;
}
if (extension instanceof CourseSubUnit) {
CourseSubUnit subUnit = (CourseSubUnit)extension;
subunitCode = subUnit.getCode();
subunitName = subUnit.getValue();
continue;
}
if (extension instanceof WebAuthCode) {
WebAuthCode webAuthCode = (WebAuthCode) extension;
if (webAuthCode.getWebAuthCodeType() == WebAuthCode.WebAuthCodeType.administrator) {
administrators.add(getUser(webAuthCode.getValue()));
}
continue;
}
if (extension instanceof OtherDepartment) {
if (!extension.getValue().isEmpty()) {
otherDepartments.add(extension.getValue());
}
continue;
}
if (extension instanceof Subject) {
Subject subject = (Subject) extension;
if (subject.isRDFCategory()) {
skillsCategories.add(subject);
}
if (subject.isRMCategory()) {
researchCategories.add(subject);
}
if (subject.isJACSCategory()) {
jacsCategories.add(subject);
}
continue;
}
}
if (null == id) {
data.logMe(
"Log Failure Course ["+id+":"+title+"] No Course Identifier");
return;
}
String description = null;
if (course.getDescriptions().length > 0) {
Description xDescription = course.getDescriptions()[0];
if (!xDescription.isXhtml()) {
description = parse(xDescription.getValue());
} else {
description = xDescription.getValue();
}
} else {
data.logMe(
"Log Warning Course ["+id+":"+title+"] has no description");
}
if (createComponents) {
Presentation[] presentations = course.getPresentations();
for (int i=0; i<presentations.length; i++) {
presentation(presentations[i], id, teachingcomponentId, data);
}
} else {
if (!id.equals(data.getLastGroup())) {
data.incrGroupSeen();
data.setLastGroup(id);
if (validCourse(data, id, title, departmentCode, subunitCode, description,
departmentName, subunitName, visibility,
supervisorApproval, administratorApproval,
divisionEmail, regulations,
(Set<String>) administrators,
(Set<String>) divisionSuperUsers,
(Set<String>) otherDepartments,
(Set<Subject>) researchCategories,
(Set<Subject>) skillsCategories,
(Set<Subject>) jacsCategories)) {
if (updateCourse(data, id, title, departmentCode, subunitCode, description,
departmentName, subunitName, visibility,
supervisorApproval, administratorApproval,
divisionEmail, regulations, data.getFeed(),
(Set<String>) administrators,
(Set<String>) divisionSuperUsers,
(Set<String>) otherDepartments,
(Set<Subject>) researchCategories,
(Set<Subject>) skillsCategories,
(Set<Subject>) jacsCategories)) {
data.incrGroupCreated();
} else {
data.incrGroupUpdated();
}
}
}
}
}
/**
*
* @param type
* @return
*/
protected static boolean typeCourseId(String type) {
if ("ns:daisy-course".equals(type) ||
"ns:itlp-course".equals(type) ||
"ns:careers-course".equals(type) ||
"ns:language-centre-course".equals(type) ||
"ns:medsci-course".equals(type) ||
"ns:sharepoint-course".equals(type)) {
return true;
}
return false;
}
/**
*
* @param presentation
* @param teachingcomponentId
* @param groups
* @throws IOException
*/
private void presentation(Presentation presentation,
String assessmentunitCode, String teachingcomponentId, XcriOxcapPopulatorInstanceData data)
throws IOException {
String title = presentation.getTitles()[0].getValue();
String subject = null;
String slot = null;
String applyTo = null;
Date startDate = null;
String startText = null;
Date endDate = null;
String endText = null;
Date openDate = null;
String openText = null;
Date closeDate = null;
String closeText = null;
int capacity = 0;
String location = null;
String attendanceMode = null;
String attendanceModeText = null;
String attendancePattern = null;
String attendancePatternText = null;
if (null != presentation.getAttendanceMode()) {
attendanceMode = presentation.getAttendanceMode().getIdentifier();
attendanceModeText = presentation.getAttendanceMode().getValue();
}
if (null != presentation.getAttendancePattern()) {
attendancePattern = presentation.getAttendancePattern().getIdentifier();
attendancePatternText = presentation.getAttendancePattern().getValue();
}
if (null != presentation.getApplyTo()) {
applyTo = presentation.getApplyTo().getValue();
}
if (null != presentation.getStart()) {
startDate = presentation.getStart().getDtf();
startText = presentation.getStart().getValue();
}
if (null != presentation.getEnd()) {
endDate = presentation.getEnd().getDtf();
endText = presentation.getEnd().getValue();
}
if (null != presentation.getApplyFrom()) {
openDate = presentation.getApplyFrom().getDtf();
openText = presentation.getApplyFrom().getValue();
}
if (null != presentation.getApplyUntil()) {
closeDate = presentation.getApplyUntil().getDtf();
closeText = presentation.getApplyUntil().getValue();
}
if (0 != presentation.getVenues().length) {
Venue venue = presentation.getVenues()[0];
if (null != venue.getProvider() && venue.getProvider().getTitles().length > 0) {
location = venue.getProvider().getTitles()[0].getValue();
}
}
boolean bookable = false;
String id = null;
String uri = null;
String teacherId = null;
String teacherName = null;
String teacherEmail = null;
String sessionCount = null;
String termCode = null;
String sessionDates = null;
String memberApplyTo = null;
String teachingDetails = null;
Collection<Session> sessions = new HashSet<Session>();
for (Extension extension : presentation.getExtensions()) {
if (extension instanceof Identifier) {
Identifier identifier = (Identifier) extension;
if ("presentationURI".equals(identifier.getType())) {
uri = identifier.getValue();
continue;
}
if (typePresentationId(identifier.getType())) {
id = identifier.getValue();
continue;
}
}
if (extension instanceof Bookable) {
bookable = parseBoolean(extension.getValue());
continue;
}
if (extension instanceof EmployeeName) {
teacherName = extension.getValue();
continue;
}
if (extension instanceof EmployeeEmail) {
teacherEmail = extension.getValue();
continue;
}
if (extension instanceof MemberApplyTo) {
memberApplyTo = extension.getValue();
continue;
}
if (extension instanceof Sessions) {
sessionCount = extension.getValue();
continue;
}
if (extension instanceof TermCode) {
termCode = extension.getValue();
continue;
}
if (extension instanceof TermLabel) {
sessionDates = extension.getValue();
continue;
}
if (extension instanceof TeachingDetails) {
teachingDetails = extension.getValue();
continue;
}
if (extension instanceof WebAuthCode) {
WebAuthCode webAuthCode = (WebAuthCode) extension;
if (webAuthCode.getWebAuthCodeType() == WebAuthCode.WebAuthCodeType.presenter) {
teacherId = webAuthCode.getValue();
}
continue;
}
if (extension instanceof Session) {
Session session = (Session)extension;
if (session.getIdentifiers().length > 0) {
sessions.add(session);
continue;
}
}
}
if (null != presentation.getPlaces() &&
!presentation.getPlaces().getValue().isEmpty()) {
try {
capacity = Integer.parseInt(presentation.getPlaces().getValue());
} catch (Exception e) {
data.logMe(
"Log Warning Presentation ["+id+":"+title+"] value in places tag is not a number ["+presentation.getPlaces().getValue()+"]");
}
}
Set<String> groups = new HashSet<String>();
groups.add(assessmentunitCode);
Collection<CourseGroupDAO> courseGroups = getCourseGroups(groups);
data.incrComponentSeen();
if (validComponent(data, id, title, subject,
openDate, openText, closeDate, closeText, startDate, startText, endDate, endText,
bookable, capacity,
termCode, teachingcomponentId, sessionDates,
teacherId, teacherName, teacherEmail,
attendanceMode, attendanceModeText,
attendancePattern, attendancePatternText,
slot, sessionCount, location, applyTo, memberApplyTo,
teachingDetails,
(Set<Session>) sessions, (Set<CourseGroupDAO>) courseGroups)) {
if (updateComponent(data, id, title, subject,
openDate, openText, closeDate, closeText, startDate, startText, endDate, endText,
bookable, capacity,
termCode, teachingcomponentId, sessionDates,
teacherId, teacherName, teacherEmail,
attendanceMode, attendanceModeText,
attendancePattern, attendancePatternText,
slot, sessionCount, location, applyTo, memberApplyTo,
teachingDetails,data.getFeed(),
(Set<Session>) sessions, (Set<CourseGroupDAO>) courseGroups)) {
data.incrComponentCreated();
} else {
data.incrComponentUpdated();
}
}
}
/**
*
* @param type
* @return
*/
protected static boolean typePresentationId(String type) {
if ("ns:daisy-presentation".equals(type) ||
"ns:careers-presentation".equals(type) ||
"ns:itlp-presentation".equals(type) ||
"ns:language-centre-presentation".equals(type) ||
"ns:medsci-presentation".equals(type) ||
"ns:sharepoint-presentation".equals(type)) {
return true;
}
return false;
}
/**
*
* @param code
* @param name
* @param approve
* @param approvers
* @return
*/
private boolean updateDepartment(String code, String name, boolean approve, Set<String> approvers) {
log.debug("XcriPopulatorImpl.updateDepartment ["+code+":"+name+":"+
approve+":"+approvers.size()+"]");
boolean created = false;
if (null != dao) {
CourseDepartmentDAO departmentDao = dao.findDepartmentByCode(code);
if (null == departmentDao) {
departmentDao = new CourseDepartmentDAO(code);
created = true;
}
departmentDao.setName(name);
departmentDao.setApprove(approve);
departmentDao.setApprovers(approvers);
dao.save(departmentDao);
}
return created;
}
/**
*
* @param code
* @param name
* @param departmentCode
* @return
*/
private boolean updateSubUnit(String code, String name, String departmentCode) {
log.debug("XcriPopulatorImpl.updateSubUnit ["+
code+":"+name+":"+departmentCode+"]");
boolean created = false;
if (null != dao) {
CourseSubunitDAO subunitDao = dao.findSubunitByCode(code);
if (null == subunitDao) {
subunitDao = new CourseSubunitDAO(code);
created = true;
}
subunitDao.setSubunitName(name);
subunitDao.setDepartmentCode(departmentCode);
dao.save(subunitDao);
}
return created;
}
/**
*
* @param code
* @param administrators
* @return
*/
protected boolean validCourse(XcriOxcapPopulatorInstanceData data, String code, String title, String departmentCode, String subunitCode,
String description, String departmentName, String subunitName,
String visibility, boolean supervisorApproval, boolean administratorApproval,
String divisionEmail, String regulations,
Set<String> administrators, Set<String> superusers, Set<String> otherDepartments,
Set<Subject> researchCategories, Set<Subject> skillsCategories, Set<Subject> jacsCategories) {
log.debug("XcriPopulatorImpl.validCourse ["+code+":"+title+":"+departmentCode+":"+subunitCode+":"+
description+":"+departmentName+":"+subunitName+":"+
visibility+":"+supervisorApproval+":"+administratorApproval+":"+
divisionEmail+":"+
administrators.size()+":"+superusers.size()+":"+otherDepartments.size()+":"+
researchCategories.size()+":"+skillsCategories.size()+"]");
int i=0;
try {
if (null == code) {
logMe(data, "Log Failure Assessment Unit ["+code+":"+title+"] No AssessmentUnit code");
i++;
}
if (i == 0) {
return true;
}
} catch (IOException e) {
}
return false;
}
/**
*
* @param code
* @param title
* @param departmentCode
* @param subunitCode
* @param description
* @param departmentName
* @param subunitName
* @param publicView
* @param supervisorApproval
* @param administratorApproval
* @param divisionEmail
* @param administrators
* @param superusers
* @param otherDepartments
* @return
* @throws IOException
*/
private boolean updateCourse(XcriOxcapPopulatorInstanceData data, String id, String title, String departmentCode, String subunitCode,
String description, String departmentName, String subunitName,
String visibility, boolean supervisorApproval, boolean administratorApproval,
String divisionEmail, String regulations, String feed,
Set<String> administrators,
Set<String> superusers,
Set<String> otherDepartments,
Set<Subject> researchCategories,
Set<Subject> skillsCategories,
Set<Subject> jacsCategories) throws IOException {
boolean created = false;
if (null != dao) {
CourseGroupDAO groupDao = dao.findCourseGroupById(id);
if (groupDao == null) {
groupDao = dao.newCourseGroup(id, title, departmentCode, subunitCode);
created = true;
} else {
groupDao.setDept(departmentCode);
groupDao.setSubunit(subunitCode);
groupDao.setTitle(title);
}
groupDao.setDescription(description);
groupDao.setDepartmentName(departmentName);
groupDao.setSubunitName(subunitName);
groupDao.setVisibility(visibility);
groupDao.setSource(feed);
groupDao.setSupervisorApproval(supervisorApproval);
groupDao.setAdministratorApproval(administratorApproval);
groupDao.setContactEmail(divisionEmail);
groupDao.setAdministrators(administrators);
groupDao.setRegulations(regulations);
groupDao.setDeleted(false);
if (null==superusers) {
superusers = Collections.<String>emptySet();
}
groupDao.setSuperusers(superusers);
if (null==otherDepartments) {
otherDepartments = Collections.<String>emptySet();
}
groupDao.setOtherDepartments(otherDepartments);
Set<CourseCategoryDAO> categories = new HashSet<CourseCategoryDAO>();
for (Subject subject : researchCategories) {
categories.add(new CourseCategoryDAO(
CourseGroup.Category_Type.RM, subject.getIdentifier(), subject.getValue()));
}
for (Subject subject : skillsCategories) {
categories.add(new CourseCategoryDAO(
CourseGroup.Category_Type.RDF, subject.getIdentifier(), subject.getValue()));
}
for (Subject subject : jacsCategories) {
categories.add(new CourseCategoryDAO(
CourseGroup.Category_Type.JACS, subject.getIdentifier(), subject.getValue()));
}
//remove unwanted categories
// done this way to avoid java.util.ConcurrentModificationException
for (Iterator<CourseCategoryDAO> itr = groupDao.getCategories().iterator(); itr.hasNext();) {
CourseCategoryDAO category = itr.next();
if (!categories.contains(category)) {
itr.remove();
}
}
//add any new categories
for (CourseCategoryDAO category : categories) {
if (!groupDao.getCategories().contains(category)) {
groupDao.getCategories().add(category);
}
}
dao.save(groupDao);
}
if (created) {
logMs(data, "Log Success Course Group created ["+id+":"+title+"]");
} else {
logMs(data, "Log Success Course Group updated ["+id+":"+title+"]");
}
return created;
}
/**
*
* @param id
* @param title
* @param subject
* @param openDate
* @param closeDate
* @param expiryDate
* @param termCode
* @param teachingComponentId
* @param termName
* @param groups
* @return
*/
protected boolean validComponent(XcriOxcapPopulatorInstanceData data, String id, String title, String subject,
Date openDate, String openText, Date closeDate, String closeText, Date startDate, String startText, Date endDate, String endText,
boolean bookable, int capacity,
String termCode, String teachingComponentId, String termName,
String teacherId, String teacherName, String teacherEmail,
String attendanceMode, String attendanceModeText,
String attendancePattern, String attendancePatternText,
String sessionDates, String sessionCount, String location, String applyTo, String memberApplyTo,
String teachingDetails,
Set<Session> sessions, Set<CourseGroupDAO> groups) {
log.debug("XcriPopulatorImpl.validComponent ["+id+":"+title+":"+subject+":"+
viewDate(openDate, openText)+":"+viewDate(closeDate, closeText)+":"+viewDate(startDate, startText)+":"+viewDate(endDate, endText)+":"+
bookable+":"+capacity+":"+
termCode+":"+teachingComponentId+":"+termName+":"+
teacherId+":"+teacherName+":"+teacherEmail+":"+
attendanceMode+":"+attendanceModeText+":"+
attendancePattern+":"+attendancePatternText+":"+
sessionDates+":"+sessions+":"+location+":"+
applyTo+":"+memberApplyTo+":"+teachingDetails+":"+
groups.size()+"]");
int i=0;
try {
if (null != openDate && null != closeDate) {
if (openDate.after(closeDate)){
logMe(data, "Log Failure Teaching Instance ["+id+":"+title+"] Open date is after close date");
i++;
}
}
if (title == null || title.trim().length() == 0) {
logMe(data, "Log Failure Teaching Instance ["+id+":"+title+"] Title isn't set");
i++;
}
if (groups.isEmpty()) {
logMe(data, "Log Failure Teaching Instance ["+id+":"+title+"] No Assessment Unit codes");
i++;
}
if (i == 0) {
return true;
}
} catch (IOException e) {
}
return false;
}
/**
*
* @param id
* @param title
* @param subject
* @param openDate
* @param closeDate
* @param expiryDate
* @param startDate
* @param endDate
* @param bookable
* @param capacity
* @param termCode
* @param teachingComponentId
* @param termName
* @param teacherId
* @param teacherName
* @param teacherEmail
* @param sessionDates
* @param sessions
* @param location
* @param groups
* @return
* @throws IOException
*/
private boolean updateComponent(XcriOxcapPopulatorInstanceData data, String id, String title, String subject,
Date openDate, String openText, Date closeDate, String closeText, Date startDate, String startText, Date endDate, String endText,
boolean bookable, int capacity,
String termCode, String teachingComponentId, String termName,
String teacherId, String teacherName, String teacherEmail,
String attendanceMode, String attendanceModeText,
String attendancePattern, String attendancePatternText,
String sessionDates, String sessionCount, String location,
String applyTo, String memberApplyTo, String teachingDetails, String feed,
Set<Session> sessions, Set<CourseGroupDAO> groups) throws IOException {
boolean created = false;
if (null != dao) {
CourseComponentDAO componentDao = dao.findCourseComponent(id);
if (componentDao == null) {
componentDao = dao.newCourseComponent(id);
created = true;
}
componentDao.setTitle(title);
componentDao.setSubject(subject);
componentDao.setOpens(openDate);
componentDao.setOpensText(openText);
componentDao.setCloses(closeDate);
componentDao.setClosesText(closeText);
componentDao.setStarts(startDate);
componentDao.setStartsText(startText);
componentDao.setEnds(endDate);
componentDao.setEndsText(endText);
componentDao.setBookable(bookable);
componentDao.setSize(capacity);
componentDao.setTermcode(termCode);
componentDao.setAttendanceMode(attendanceMode);
componentDao.setAttendanceModeText(attendanceModeText);
componentDao.setAttendancePattern(attendancePattern);
componentDao.setAttendancePatternText(attendancePatternText);
componentDao.setComponentId(teachingComponentId+":"+termCode);
componentDao.setBaseDate(baseDate(componentDao));
componentDao.setSource(feed);
// Cleanout existing groups.
componentDao.setGroups(new HashSet<CourseGroupDAO>());
// Populate teacher details.
// Look for details in WebLearn first then fallback to details in DAISY.
if (teacherId != null && teacherId.length() > 0) {
UserProxy teacher = proxy.findUserByEid(teacherId);
if (teacher != null) {
teacherName = teacher.getDisplayName();
teacherEmail = teacher.getEmail();
}
}
componentDao.setTeacherName(teacherName);
componentDao.setTeacherEmail(teacherEmail);
componentDao.setWhen(termName);
componentDao.setSlot(sessionDates);
componentDao.setSessions(sessionCount);
componentDao.setLocation(location);
componentDao.setApplyTo(applyTo);
componentDao.setMemberApplyTo(memberApplyTo);
componentDao.setTeachingDetails(teachingDetails);
componentDao.setGroups(groups);
componentDao.setDeleted(false);
Collection<CourseComponentSessionDAO> componentSessions = componentDao.getComponentSessions();
for (Session session : sessions) {
componentSessions.add(
new CourseComponentSessionDAO(session.getIdentifiers()[0].getValue(),
session.getStart().getDtf(), session.getStart().getValue(),
session.getEnd().getDtf(), session.getEnd().getValue()));
}
dao.save(componentDao);
}
if (created) {
logMs(data, "Log Success Course Component created ["+id+":"+title+"]");
} else {
logMs(data, "Log Success Course Component updated ["+id+":"+title+"]");
}
return created;
}
/**
* @throws IOException
*
*/
private void logMe(XcriOxcapPopulatorInstanceData data, String message) throws IOException {
log.warn(message);
if (null != data) {
data.logMe(message);
}
}
/**
* @throws IOException
*
*/
private void logMs(XcriOxcapPopulatorInstanceData data, String message) throws IOException {
log.warn(message);
if (null != data) {
data.logMs(message);
}
}
/**
*
* @param userCode
* @return
*/
private String getUser (String userCode) {
if (null == proxy) {
return userCode;
}
UserProxy user = proxy.findUserByEid(userCode);
if (null == user) {
log.warn("Failed to find User [" + userCode +"]");
return null;
}
return user.getId();
}
/**
*
* @param groups
* @return
*/
private Collection<CourseGroupDAO> getCourseGroups (Collection<String> groups) {
Set<CourseGroupDAO> courseGroups = new HashSet<CourseGroupDAO>();
for (String group : groups) {
CourseGroupDAO courseDao = null;
if (null == dao) {
courseDao = new CourseGroupDAO();
courseDao.setCourseId(group);
} else {
courseDao = dao.findCourseGroupById(group);
}
if (null == courseDao) {
System.out.println("Failed to find Group [" + group +"]");
} else {
courseGroups.add(courseDao);
}
}
return courseGroups;
}
protected static String viewDate(Date date, String text) {
if (null == date) {
return text+"[null]";
}
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy");
return text+"["+sdf.format(date)+"]";
}
private static boolean parseBoolean(String data) {
if ("1".equals(data)) {
return true;
}
if ("0".equals(data)) {
return false;
}
return Boolean.parseBoolean(data);
}
/**
*
* @param component
* @return
*/
public static Date baseDate(CourseComponentDAO component) {
if (null != component.getStarts()) {
return component.getStarts();
}
if (null != component.getCloses()) {
return component.getCloses();
}
return null;
}
/**
*
* @param data
* @return
*/
protected static String parse(String data) {
data = data.replaceAll("<", "<");
data = data.replaceAll(">", ">");
data = FormattedText.convertPlaintextToFormattedText(data);
Pattern pattern = Pattern.compile("[A-Z0-9._%+-]+@[A-Z0-9.-]+\\.[A-Z]{2,4}", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(data);
StringBuffer sb = new StringBuffer(data.length());
while (matcher.find()) {
String text = matcher.group(0);
matcher.appendReplacement(sb, "<a class=\"email\" href=\"mailto:"+text+"\">"+text+"</a>" );
}
matcher.appendTail(sb);
pattern = Pattern.compile("(https?|ftps?):\\/\\/[a-z_0-9\\\\\\-]+(\\.([\\w#!:?+=&%@!\\-\\/])+)+", Pattern.CASE_INSENSITIVE);
matcher = pattern.matcher(sb.toString());
sb = new StringBuffer(data.length());
while (matcher.find()) {
String text = matcher.group(0);
matcher.appendReplacement(sb, "<a class=\"url\" href=\""+text+"\" target=\"_blank\">"+text+"</a>" );
}
matcher.appendTail(sb);
return sb.toString();
}
}
| WL-2639 Use objects to reduce arguments
| impl/src/main/java/uk/ac/ox/oucs/vle/XcriOxCapPopulatorImpl.java | WL-2639 Use objects to reduce arguments |
|
Java | apache-2.0 | 9952b2630efa209764f5f07e876519658635c36f | 0 | Sylvain-Bugat/rundeck-slack-plugin,johnpaularthur/rundeck-slack-plugin | package com.github.sbugat.rundeck.plugins;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import com.dtolabs.rundeck.core.plugins.Plugin;
import com.dtolabs.rundeck.plugins.ServiceNameConstants;
import com.dtolabs.rundeck.plugins.descriptions.PluginDescription;
import com.dtolabs.rundeck.plugins.descriptions.PluginProperty;
import com.dtolabs.rundeck.plugins.notification.NotificationPlugin;
/**
* Rundeck slack plugin class.
*
* @author Sylvain Bugat
*
*/
@Plugin(service = ServiceNameConstants.Notification, name = "SlackNotification")
@PluginDescription(title = "Slack")
public class SlackPlugin implements NotificationPlugin {
private static final String SLACK_SUCCESS_COLOR = "good";
private static final String SLACK_FAILED_COLOR = "danger";
@PluginProperty(title = "Incoming WebHook URL", description = "Slack incoming WebHook URL", required = true)
private String slackIncomingWebHookUrl;
@PluginProperty(title = "WebHook channel", description = "Override default WebHook channel")
private String slackOverrideDefaultWebHookChannel;
@PluginProperty(title = "WebHook name", description = "Override default WebHook name")
private String slackOverrideDefaultWebHookName;
@PluginProperty(title = "WebHook emoji", description = "Override default WebHook icon (emoji)")
private String slackOverrideDefaultWebHookEmoji;
@Override
public boolean postNotification(final String trigger, @SuppressWarnings("rawtypes") final Map executionData, @SuppressWarnings("rawtypes") final Map config) {
//TODO to delete
//Debug display
System.out.println(trigger);
System.out.println(slackIncomingWebHookUrl);
for (final Object entry : executionData.keySet()) {
if (null != executionData.get(entry)) {
System.out.println(entry + " -> " + executionData.get(entry) + executionData.get(entry).getClass().getName());
} else {
System.out.println(entry + " -> " + executionData.get(entry));
}
}
System.out.println("");
for (final Object entry : config.keySet()) {
if (null != config.get(entry)) {
System.out.println(entry + " -> " + config.get(entry) + config.get(entry).getClass().getName());
} else {
System.out.println(entry + " -> " + config.get(entry));
}
}
try {
//Prepare the connection to Slack
final HttpURLConnection connection = (HttpURLConnection) new URL(slackIncomingWebHookUrl).openConnection();
connection.setRequestMethod("POST");
connection.setRequestProperty("charset", StandardCharsets.UTF_8.name());
connection.setUseCaches(false);
connection.setDoInput(true);
connection.setDoOutput(true);
//Send the WebHook message
final DataOutputStream wr = new DataOutputStream(connection.getOutputStream());
wr.writeBytes("payload=" + URLEncoder.encode("{" + getMessageOptions() + getMessage(trigger, executionData, config) + "}", StandardCharsets.UTF_8.name()));
wr.close();
//Get the HTTP response code
System.out.println(connection.getResponseCode());
} catch (final IOException e) {
e.printStackTrace();
return false;
}
return true;
}
public String getSlackIncomingWebHookUrl() {
return slackIncomingWebHookUrl;
}
public void setSlackIncomingWebHookUrl(final String slackIncomingWebHookUrl) {
this.slackIncomingWebHookUrl = slackIncomingWebHookUrl;
}
/**
* Return a message with overrided options.
*
* @return optional message with channel, username and emoji to use
*/
private String getMessageOptions() {
final StringBuilder stringBuilder = new StringBuilder();
if (null != slackOverrideDefaultWebHookChannel) {
stringBuilder.append("\"channel\":");
stringBuilder.append("\"" + slackOverrideDefaultWebHookChannel + "\", ");
}
if (null != slackOverrideDefaultWebHookName) {
stringBuilder.append("\"username\":");
stringBuilder.append("\"" + slackOverrideDefaultWebHookName + "\", ");
}
if (null != slackOverrideDefaultWebHookEmoji) {
stringBuilder.append("\"icon_emoji\":");
stringBuilder.append("\"" + slackOverrideDefaultWebHookEmoji + "\", ");
}
return stringBuilder.toString();
}
/**
* Return a Slack message with the job execution data.
*
* @param trigger execution status
* @param executionData current execution state
* @param config plugin configuration
*
* @return complete job execution message to send to Slack
*/
private String getMessage(final String trigger, @SuppressWarnings("rawtypes") final Map executionData, @SuppressWarnings("rawtypes") final Map config) {
// Success and starting execution are good(green)
final String statusColor;
if ("success" == trigger || "start" == trigger) {
statusColor = SLACK_SUCCESS_COLOR;
} else {
statusColor = SLACK_FAILED_COLOR;
}
@SuppressWarnings("unchecked")
final Map<String, String> jobMap = (Map<String, String>) executionData.get("job");
final String jobStatus;
final String endStatus;
if ("aborted" == executionData.get("status") && null != executionData.get("abortedby")) {
jobStatus = ((String) executionData.get("status")).toUpperCase() + " by " + executionData.get("abortedby");
endStatus = executionData.get("status") + " by " + executionData.get("abortedby");
} else if ("timedout" == executionData.get("status")) {
jobStatus = ((String) executionData.get("status")).toUpperCase();
endStatus = "timed-out";
} else {
jobStatus = ((String) executionData.get("status")).toUpperCase();
endStatus = "ended";
}
// Context map containing additional information
@SuppressWarnings("unchecked")
final Map<String, Map<String, String>> contextMap = (Map<String, Map<String, String>>) executionData.get("context");
final Map<String, String> jobContextMap = contextMap.get("job");
final String projectUrl = jobContextMap.get("serverUrl") + "/" + jobContextMap.get("project");
final StringBuilder formatedGroups = new StringBuilder();
if (null != jobContextMap.get("group")) {
String rootGroups = "";
for (final String group : jobContextMap.get("group").split("/")) {
formatedGroups.append("<" + projectUrl + "/jobs/" + rootGroups + group + "|" + group + ">/");
rootGroups = rootGroups + group + "/";
}
}
final String title = "\"<" + executionData.get("href") + "|#" + executionData.get("id") + " - " + jobStatus + " - " + jobMap.get("name") + "> - <" + projectUrl + "|" + (String) executionData.get("project") + "> - " + formatedGroups + "<" + jobMap.get("href") + "|" + jobMap.get("name") + ">\"";
final Long startTime = (Long) executionData.get("dateStartedUnixtime");
final Long endTime = (Long) executionData.get("dateEndedUnixtime");
final DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, Locale.getDefault());
final String duration;
if ("start" == trigger) {
duration = "Launched by " + executionData.get("user") + " at " + dateFormat.format(new Date(startTime));
} else {
duration = "Launched by " + executionData.get("user") + " at " + dateFormat.format(new Date(startTime)) + ", " + endStatus + " at " + dateFormat.format(new Date(endTime)) + " (duration: " + (endTime - startTime) / 1000 + "s)";
}
// Download link if the job fails
final String download;
if ("success" != trigger && "start" != trigger) {
download = "\n<" + projectUrl + "/execution/downloadOutput/" + executionData.get("id") + "|Download log ouput>";
} else {
download = "";
}
final Map<String, String> optionContextMap = contextMap.get("option");
final Map<String, String> secureOptionContextMap = contextMap.get("secureOption");
// Option header
final String option;
if (optionContextMap.isEmpty()) {
option = "";
} else if (download.isEmpty()) {
option = "\nJob options:";
} else {
option = ", job options:";
}
// Attachment begin and title
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(" \"attachments\":[");
stringBuilder.append(" {");
stringBuilder.append(" \"title\": " + title + ",");
stringBuilder.append(" \"text\": \"" + duration + download + option + "\",");
stringBuilder.append(" \"color\": \"" + statusColor + "\",");
stringBuilder.append(" \"fields\":[");
// Options part, secure options values are not displayed
boolean firstOption = true;
for (final Map.Entry<String, String> mapEntry : optionContextMap.entrySet()) {
if (!firstOption) {
stringBuilder.append(',');
}
stringBuilder.append(" {");
stringBuilder.append(" \"title\":\"" + mapEntry.getKey() + "\",");
final String value;
if (null != secureOptionContextMap && null != secureOptionContextMap.get(mapEntry.getKey())) {
value = "***********";
} else {
value = mapEntry.getValue();
}
stringBuilder.append(" \"value\":\"" + value + "\",");
stringBuilder.append(" \"short\":true");
stringBuilder.append(" }");
firstOption = false;
}
stringBuilder.append(" ]");
stringBuilder.append(" }");
@SuppressWarnings("unchecked")
final List<String> failedNodeList = (List<String>) executionData.get("failedNodeList");
@SuppressWarnings("unchecked")
final Map<String, Integer> nodeStatus = (Map<String, Integer>) executionData.get("nodestatus");
// Failed node part if a node is failed and if it's not the only one node executed
if (null != failedNodeList && !failedNodeList.isEmpty() && nodeStatus.get("total") > 1) {
stringBuilder.append(",");
stringBuilder.append(" {");
stringBuilder.append(" \"fallback\": \"Failed nodes list\",");
stringBuilder.append(" \"text\": \"Failed nodes:\",");
stringBuilder.append(" \"color\": \"" + statusColor + "\",");
stringBuilder.append(" \"fields\":[");
//Format a list with all failed nodes
boolean firstNode = true;
for (final String failedNode : failedNodeList) {
if (!firstNode) {
stringBuilder.append(',');
}
stringBuilder.append(" {");
stringBuilder.append(" \"title\":\"" + failedNode + "\",");
stringBuilder.append(" \"short\":true");
stringBuilder.append(" }");
firstNode = false;
}
stringBuilder.append(" ]");
stringBuilder.append(" }");
}
stringBuilder.append(" ]");
return stringBuilder.toString();
}
}
| src/main/java/com/github/sbugat/rundeck/plugins/SlackPlugin.java | package com.github.sbugat.rundeck.plugins;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import com.dtolabs.rundeck.core.plugins.Plugin;
import com.dtolabs.rundeck.plugins.ServiceNameConstants;
import com.dtolabs.rundeck.plugins.descriptions.PluginDescription;
import com.dtolabs.rundeck.plugins.descriptions.PluginProperty;
import com.dtolabs.rundeck.plugins.notification.NotificationPlugin;
/**
* Rundeck slack plugin class.
*
* @author Sylvain Bugat
*
*/
@Plugin(service = ServiceNameConstants.Notification, name = "SlackNotification")
@PluginDescription(title = "Slack")
public class SlackPlugin implements NotificationPlugin {
private static final String SLACK_SUCCESS_COLOR = "good";
private static final String SLACK_FAILED_COLOR = "danger";
@PluginProperty(title = "Incoming WebHook URL", description = "Slack incoming WebHook URL", required = true)
private String slackIncomingWebHookUrl;
@PluginProperty(title = "WebHook channel", description = "Override default WebHook channel")
private String slackOverrideDefaultWebHookChannel;
@PluginProperty(title = "WebHook name", description = "Override default WebHook name")
private String slackOverrideDefaultWebHookName;
@PluginProperty(title = "WebHook emoji", description = "Override default WebHook icon (emoji)")
private String slackOverrideDefaultWebHookEmoji;
@Override
public boolean postNotification(final String trigger, @SuppressWarnings("rawtypes") final Map executionData, @SuppressWarnings("rawtypes") final Map config) {
System.out.println(trigger);
System.out.println(slackIncomingWebHookUrl);
for (final Object entry : executionData.keySet()) {
if (null != executionData.get(entry)) {
System.out.println(entry + " -> " + executionData.get(entry) + executionData.get(entry).getClass().getName());
} else {
System.out.println(entry + " -> " + executionData.get(entry));
}
}
System.out.println("");
for (final Object entry : config.keySet()) {
if (null != config.get(entry)) {
System.out.println(entry + " -> " + config.get(entry) + config.get(entry).getClass().getName());
} else {
System.out.println(entry + " -> " + config.get(entry));
}
}
try {
final HttpURLConnection connection = (HttpURLConnection) new URL(slackIncomingWebHookUrl).openConnection();
connection.setRequestMethod("POST");
connection.setRequestProperty("charset", StandardCharsets.UTF_8.name());
connection.setUseCaches(false);
connection.setDoInput(true);
connection.setDoOutput(true);
final DataOutputStream wr = new DataOutputStream(connection.getOutputStream());
wr.writeBytes("payload=" + URLEncoder.encode("{" + getMessageOptions() + getMessage(trigger, executionData, config) + "}", StandardCharsets.UTF_8.name()));
wr.close();
System.out.println(connection.getResponseCode());
} catch (final IOException e) {
e.printStackTrace();
return false;
}
return true;
}
public String getSlackIncomingWebHookUrl() {
return slackIncomingWebHookUrl;
}
public void setSlackIncomingWebHookUrl(final String slackIncomingWebHookUrl) {
this.slackIncomingWebHookUrl = slackIncomingWebHookUrl;
}
private String getMessageOptions() {
final StringBuilder stringBuilder = new StringBuilder();
if (null != slackOverrideDefaultWebHookChannel) {
stringBuilder.append("\"channel\":");
stringBuilder.append("\"" + slackOverrideDefaultWebHookChannel + "\", ");
}
if (null != slackOverrideDefaultWebHookName) {
stringBuilder.append("\"username\":");
stringBuilder.append("\"" + slackOverrideDefaultWebHookName + "\", ");
}
if (null != slackOverrideDefaultWebHookEmoji) {
stringBuilder.append("\"icon_emoji\":");
stringBuilder.append("\"" + slackOverrideDefaultWebHookEmoji + "\", ");
}
return stringBuilder.toString();
}
private String getMessage(final String trigger, @SuppressWarnings("rawtypes") final Map executionData, @SuppressWarnings("rawtypes") final Map config) {
// Success and starting execution are good(green)
final String statusColor;
if ("success" == trigger || "start" == trigger) {
statusColor = SLACK_SUCCESS_COLOR;
} else {
statusColor = SLACK_FAILED_COLOR;
}
@SuppressWarnings("unchecked")
final Map<String, String> jobMap = (Map<String, String>) executionData.get("job");
// Context map containing additional information
@SuppressWarnings("unchecked")
final Map<String, Map<String, String>> contextMap = (Map<String, Map<String, String>>) executionData.get("context");
final Map<String, String> jobContextMap = contextMap.get("job");
final Map<String, String> optionContextMap = contextMap.get("option");
final Map<String, String> secureOptionContextMap = contextMap.get("secureOption");
@SuppressWarnings("unchecked")
final List<String> failedNodeList = (List<String>) executionData.get("failedNodeList");
@SuppressWarnings("unchecked")
final Map<String, Integer> nodeStatus = (Map<String, Integer>) executionData.get("nodestatus");
final String jobStatus;
final String endStatus;
if ("aborted" == executionData.get("status") && null != executionData.get("abortedby")) {
jobStatus = ((String) executionData.get("status")).toUpperCase() + " by " + executionData.get("abortedby");
endStatus = executionData.get("status") + " by " + executionData.get("abortedby");
} else if ("timedout" == executionData.get("status")) {
jobStatus = ((String) executionData.get("status")).toUpperCase();
endStatus = "timed-out";
} else {
jobStatus = ((String) executionData.get("status")).toUpperCase();
endStatus = "ended";
}
final String projectUrl = jobContextMap.get("serverUrl") + "/" + jobContextMap.get("project");
final StringBuilder formatedGroups = new StringBuilder();
if (null != jobContextMap.get("group")) {
String rootGroups = "";
for (final String group : jobContextMap.get("group").split("/")) {
formatedGroups.append("<" + projectUrl + "/jobs/" + rootGroups + group + "|" + group + ">/");
rootGroups = rootGroups + group + "/";
}
}
final String title = "\"<" + executionData.get("href") + "|#" + executionData.get("id") + " - " + jobStatus + " - " + jobMap.get("name") + "> - <" + projectUrl + "|" + (String) executionData.get("project") + "> - " + formatedGroups + "<" + jobMap.get("href") + "|" + jobMap.get("name") + ">\"";
final Long startTime = (Long) executionData.get("dateStartedUnixtime");
final Long endTime = (Long) executionData.get("dateEndedUnixtime");
final DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, Locale.getDefault());
final String duration;
if ("start" == trigger) {
duration = "Launched by " + executionData.get("user") + " at " + dateFormat.format(new Date(startTime));
} else {
duration = "Launched by " + executionData.get("user") + " at " + dateFormat.format(new Date(startTime)) + ", " + endStatus + " at " + dateFormat.format(new Date(endTime)) + " (duration: " + (endTime - startTime) / 1000 + "s)";
}
// Download link if the job fails
final String download;
if ("success" != trigger && "start" != trigger) {
download = "\n<" + projectUrl + "/execution/downloadOutput/" + executionData.get("id") + "|Download log ouput>";
} else {
download = "";
}
// Option header
final String option;
if (optionContextMap.isEmpty()) {
option = "";
} else if (download.isEmpty()) {
option = "\nJob options:";
} else {
option = ", job options:";
}
// Attachment begin and title
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(" \"attachments\":[");
stringBuilder.append(" {");
stringBuilder.append(" \"title\": " + title + ",");
stringBuilder.append(" \"text\": \"" + duration + download + option + "\",");
stringBuilder.append(" \"color\": \"" + statusColor + "\",");
stringBuilder.append(" \"fields\":[");
// Options part, secure options values are not displayed
boolean firstOption = true;
for (final Map.Entry<String, String> mapEntry : optionContextMap.entrySet()) {
if (!firstOption) {
stringBuilder.append(',');
}
stringBuilder.append(" {");
stringBuilder.append(" \"title\":\"" + mapEntry.getKey() + "\",");
final String value;
if (null != secureOptionContextMap && null != secureOptionContextMap.get(mapEntry.getKey())) {
value = "***********";
} else {
value = mapEntry.getValue();
}
stringBuilder.append(" \"value\":\"" + value + "\",");
stringBuilder.append(" \"short\":true");
stringBuilder.append(" }");
firstOption = false;
}
stringBuilder.append(" ]");
stringBuilder.append(" }");
// Failed node part if a node is failed and if it's not the only one node executed
if (null != failedNodeList && !failedNodeList.isEmpty() && nodeStatus.get("total") > 1) {
stringBuilder.append(",");
stringBuilder.append(" {");
stringBuilder.append(" \"fallback\": \"Failed nodes\",");
stringBuilder.append(" \"text\": \"Failed nodes:\",");
stringBuilder.append(" \"color\": \"" + statusColor + "\",");
stringBuilder.append(" \"fields\":[");
boolean firstNode = true;
for (final String failedNode : failedNodeList) {
if (!firstNode) {
stringBuilder.append(',');
}
stringBuilder.append(" {");
stringBuilder.append(" \"title\":\"" + failedNode + "\",");
stringBuilder.append(" \"short\":true");
stringBuilder.append(" }");
firstNode = false;
}
stringBuilder.append(" ]");
stringBuilder.append(" }");
}
stringBuilder.append(" ]");
return stringBuilder.toString();
}
}
| Add comments and code reorder
| src/main/java/com/github/sbugat/rundeck/plugins/SlackPlugin.java | Add comments and code reorder |
|
Java | apache-2.0 | 799e593d93280d3302f681174f4d7290af7c3d6c | 0 | BrianBreniser/PSUCS410AgileProject | /*
* How to use this module:
* Run this program after running make:
* java -cp .:./lib:./lib/commons-net-3.3.jar -ea test
* need the -ea options or else the assert statements WILL NOT RUN
* I recommend doing this from the command line
* in intellij IDEA it works in the included terminal, alt+f12
*/
import java.io.IOException;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPReply;
import java.net.InetAddress;
import java.lang.Process;
import java.lang.Runtime;
public class test {
private static Runtime rt = Runtime.getRuntime();
private static String thesystem = "linux";
/**
* @param args
* Assumes the running system is linux unless an argument of 'windows' or 'Windows' is given
* will run the test suite against a localhost ftp server with static username and password
*/
public static void main(String[] args) {
// Run the ftp server that we will test with
Process pr = null;
if (args.length > 0) {
thesystem = args[0];
}
try {
if (thesystem.equals("windows") || thesystem.equals("Windows")) {
pr = rt.exec("sfk-windows.exe ftpserv -port=2121 -user=testuser -pw=password");
} else {
pr = rt.exec("./sfk ftpserv -port=2121 -user=testuser -pw=password");
}
} catch (IOException ex) {
System.out.println("Oops! Something wrong happened");
ex.printStackTrace();
}
// build our ftp client
ftp_client ftp = new ftp_client();
ftp.directSetupArgs("localhost", "testuser", "password", 2121);
ftp.setupFtp();
/*
* Run your tests in here, please label what the test should do ------------------
*/
// Some formatting to make finding errors easier
System.out.println();
System.out.println();
System.out.println(" --------- Errors: -----------");
System.out.println();
assert (ftp.getRemoteAddress().equals("localhost"));
/*
* Done running tests here -------------------------------------------------------
*/
// end formatting errors
System.out.println();
System.out.println(" No Errors :) ");
System.out.println();
System.out.println();
// kill the ftp server if it was successfully made
if(pr != null) {
pr.destroy();
}
}
}
| test.java | import java.io.IOException;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPReply;
import java.net.InetAddress;
import java.lang.Process;
import java.lang.Runtime;
public class test {
private static Runtime rt = Runtime.getRuntime();
private static String thesystem = "linux";
/**
* @param args
* Assumes the running system is linux unless an argument of 'windows' or 'Windows' is given
* will run the test suite against a localhost ftp server with static username and password
*/
public static void main(String[] args) {
// Run the ftp server that we will test with
Process pr = null;
if (args.length > 0) {
thesystem = args[0];
}
try {
if (thesystem.equals("windows") || thesystem.equals("Windows")) {
pr = rt.exec("sfk-windows.exe ftpserv -port=2121 -user=testuser -pw=password");
} else {
pr = rt.exec("./sfk ftpserv -port=2121 -user=testuser -pw=password");
}
} catch (IOException ex) {
System.out.println("Oops! Something wrong happened");
ex.printStackTrace();
}
// build our ftp client
ftp_client ftp = new ftp_client();
ftp.directSetupArgs("localhost", "testuser", "password", 2121);
ftp.setupFtp();
/*
* Run your tests in here, please label what the test should do ------------------
*/
// Some formatting to make finding errors easier
System.out.println();
System.out.println();
System.out.println(" --------- Errors: -----------");
System.out.println();
assert (ftp.getRemoteAddress().equals("localhost"));
/*
* Done running tests here -------------------------------------------------------
*/
// end formatting errors
System.out.println();
System.out.println(" No Errors :) ");
System.out.println();
System.out.println();
// kill the ftp server if it was successfully made
if(pr != null) {
pr.destroy();
}
}
}
| added some inline documentation to test.java
| test.java | added some inline documentation to test.java |
|
Java | apache-2.0 | 3c5311855a99c8dc1d833d47fe44b5fd37019011 | 0 | kairosdb/kairosdb,ppbizapps/kairosdb,ppbizapps/kairosdb,kairosdb/kairosdb,kairosdb/kairosdb,ppbizapps/kairosdb,kairosdb/kairosdb,kairosdb/kairosdb,ppbizapps/kairosdb,kairosdb/kairosdb,ppbizapps/kairosdb | /*
* Copyright 2016 KairosDB Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kairosdb.core.datastore;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ListMultimap;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import org.kairosdb.core.DataPoint;
import org.kairosdb.core.DataPointListener;
import org.kairosdb.core.KairosDataPointFactory;
import org.kairosdb.core.aggregator.Aggregator;
import org.kairosdb.core.aggregator.LimitAggregator;
import org.kairosdb.core.exception.DatastoreException;
import org.kairosdb.core.groupby.*;
import org.kairosdb.core.reporting.ThreadReporter;
import org.kairosdb.util.MemoryMonitor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.*;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
public class KairosDatastore
{
public static final Logger logger = LoggerFactory.getLogger(KairosDatastore.class);
public static final String QUERY_CACHE_DIR = "kairosdb.query_cache.cache_dir";
public static final String KEEP_CACHE_FILES = "kairosdb.query_cache.keep_cache_files";
public static final String QUERY_METRIC_TIME = "kairosdb.datastore.query_time";
public static final String QUERIES_WAITING_METRIC_NAME = "kairosdb.datastore.queries_waiting";
public static final String QUERY_SAMPLE_SIZE = "kairosdb.datastore.query_sample_size";
public static final String QUERY_ROW_COUNT = "kairosdb.datastore.query_row_count";
private final Datastore m_datastore;
private final QueryQueuingManager m_queuingManager;
private final List<DataPointListener> m_dataPointListeners;
private final KairosDataPointFactory m_dataPointFactory;
private String m_baseCacheDir;
private volatile String m_cacheDir;
private final boolean m_keepCacheFiles;
@SuppressWarnings("ResultOfMethodCallIgnored")
@Inject
public KairosDatastore(Datastore datastore, QueryQueuingManager queuingManager,
List<DataPointListener> dataPointListeners, KairosDataPointFactory dataPointFactory,
@Named(KEEP_CACHE_FILES) boolean keepCacheFiles)
throws DatastoreException
{
m_datastore = checkNotNull(datastore);
m_dataPointListeners = checkNotNull(dataPointListeners);
m_queuingManager = checkNotNull(queuingManager);
m_dataPointFactory = dataPointFactory;
m_baseCacheDir = System.getProperty("java.io.tmpdir") + "/kairos_cache/";
m_keepCacheFiles = keepCacheFiles;
setupCacheDirectory();
}
@SuppressWarnings("UnusedDeclaration")
@Inject(optional = true)
public void setBaseCacheDir(@Named(QUERY_CACHE_DIR) String cacheTempDir)
{
if (cacheTempDir != null && !cacheTempDir.equals(""))
{
m_baseCacheDir = cacheTempDir;
setupCacheDirectory();
}
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private void setupCacheDirectory()
{
cleanDirectory(new File(m_baseCacheDir));
newCacheDirectory();
File cacheDirectory = new File(m_cacheDir);
cacheDirectory.mkdirs();
checkState(cacheDirectory.exists(), "Unable to create Cache directory '" + m_cacheDir + "'");
}
/**
Make sure the folder exists
@param path
*/
private static void ensureFolder(String path)
{
File fPath = new File(path);
if (!fPath.exists())
fPath.mkdirs();
}
public String getCacheDir()
{
ensureFolder(m_cacheDir);
return (m_cacheDir);
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private void newCacheDirectory()
{
String newCacheDir = m_baseCacheDir + "/" + System.currentTimeMillis() + "/";
ensureFolder(newCacheDir);
m_cacheDir = newCacheDir;
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private void cleanDirectory(File directory)
{
if (!directory.exists())
return;
File[] list = directory.listFiles();
if (list != null && list.length > 0)
{
for (File aList : list)
{
if (aList.isDirectory())
cleanDirectory(aList);
aList.delete();
}
}
directory.delete();
}
public void cleanCacheDir(boolean wait)
{
String oldCacheDir = m_cacheDir;
newCacheDirectory();
if (wait)
{
try
{
Thread.sleep(60000);
}
catch (InterruptedException e)
{
logger.error("Sleep interrupted:", e);
}
}
logger.debug("Executing job...");
File dir = new File(oldCacheDir);
logger.debug("Deleting cache files in " + dir.getAbsolutePath());
cleanDirectory(dir);
}
public Datastore getDatastore()
{
return m_datastore;
}
/**
* Close the datastore
*/
public void close() throws InterruptedException, DatastoreException
{
m_datastore.close();
}
public void putDataPoint(String metricName,
ImmutableSortedMap<String, String> tags,
DataPoint dataPoint) throws DatastoreException
{
putDataPoint(metricName, tags, dataPoint, 0);
}
public void putDataPoint(String metricName,
ImmutableSortedMap<String, String> tags,
DataPoint dataPoint, int ttl) throws DatastoreException
{
//Add to datastore first.
m_datastore.putDataPoint(metricName, tags, dataPoint, ttl);
for (DataPointListener dataPointListener : m_dataPointListeners)
{
dataPointListener.dataPoint(metricName, tags, dataPoint);
}
}
public Iterable<String> getMetricNames() throws DatastoreException
{
return (m_datastore.getMetricNames());
}
public Iterable<String> getTagNames() throws DatastoreException
{
return (m_datastore.getTagNames());
}
public Iterable<String> getTagValues() throws DatastoreException
{
return (m_datastore.getTagValues());
}
/**
* Exports the data for a metric query without doing any aggregation or sorting
*
* @param metric metric
* @throws DatastoreException
*/
public void export(QueryMetric metric, QueryCallback callback) throws DatastoreException
{
checkNotNull(metric);
m_datastore.queryDatabase(metric, callback);
}
public List<DataPointGroup> queryTags(QueryMetric metric) throws DatastoreException
{
TagSet tagSet = m_datastore.queryMetricTags(metric);
return Collections.<DataPointGroup>singletonList(new EmptyDataPointGroup(metric.getName(), tagSet));
}
public DatastoreQuery createQuery(QueryMetric metric) throws DatastoreException
{
checkNotNull(metric);
DatastoreQuery dq;
try
{
dq = new DatastoreQueryImpl(metric);
}
catch (UnsupportedEncodingException e)
{
throw new DatastoreException(e);
}
catch (NoSuchAlgorithmException e)
{
throw new DatastoreException(e);
}
catch (InterruptedException e)
{
throw new DatastoreException(e);
}
return (dq);
}
public void delete(QueryMetric metric) throws DatastoreException
{
checkNotNull(metric);
try
{
m_datastore.deleteDataPoints(metric);
}
catch (Exception e)
{
throw new DatastoreException(e);
}
}
private static List<GroupBy> removeTagGroupBy(List<GroupBy> groupBys)
{
List<GroupBy> modifiedGroupBys = new ArrayList<GroupBy>();
for (GroupBy groupBy : groupBys)
{
if (!(groupBy instanceof TagGroupBy))
modifiedGroupBys.add(groupBy);
}
return modifiedGroupBys;
}
private static TagGroupBy getTagGroupBy(List<GroupBy> groupBys)
{
for (GroupBy groupBy : groupBys)
{
if (groupBy instanceof TagGroupBy)
return (TagGroupBy) groupBy;
}
return null;
}
protected List<DataPointGroup> groupByTypeAndTag(String metricName,
List<DataPointRow> rows, TagGroupBy tagGroupBy, Order order)
{
List<DataPointGroup> ret = new ArrayList<DataPointGroup>();
MemoryMonitor mm = new MemoryMonitor(20);
if (rows.isEmpty())
{
ret.add(new SortingDataPointGroup(metricName, order));
}
else
{
ListMultimap<String, DataPointGroup> typeGroups = ArrayListMultimap.create();
//Go through each row grouping them by type
for (DataPointRow row : rows)
{
String groupType = m_dataPointFactory.getGroupType(row.getDatastoreType());
typeGroups.put(groupType, new DataPointGroupRowWrapper(row));
mm.checkMemoryAndThrowException();
}
//Sort the types for predictable results
TreeSet<String> sortedTypes = new TreeSet<String>(typeGroups.keySet());
//Now go through each type group and group by tag if needed.
for (String type : sortedTypes)
{
if (tagGroupBy != null)
{
ListMultimap<String, DataPointGroup> groups = ArrayListMultimap.create();
Map<String, TagGroupByResult> groupByResults = new HashMap<String, TagGroupByResult>();
for (DataPointGroup dataPointGroup : typeGroups.get(type))
{
//Todo: Add code to datastore implementations to filter by the group by tag
LinkedHashMap<String, String> matchingTags = getMatchingTags(dataPointGroup, tagGroupBy.getTagNames());
String tagsKey = getTagsKey(matchingTags);
groups.put(tagsKey, dataPointGroup);
groupByResults.put(tagsKey, new TagGroupByResult(tagGroupBy, matchingTags));
mm.checkMemoryAndThrowException();
}
//Sort groups by tags
TreeSet<String> sortedGroups = new TreeSet<String>(groups.keySet());
for (String key : sortedGroups)
{
SortingDataPointGroup sdpGroup = new SortingDataPointGroup(groups.get(key), groupByResults.get(key), order);
sdpGroup.addGroupByResult(new TypeGroupByResult(type));
ret.add(sdpGroup);
}
}
else
{
ret.add(new SortingDataPointGroup(typeGroups.get(type), new TypeGroupByResult(type), order));
}
}
}
return ret;
}
/**
* Create a unique identifier for this combination of tags to be used as the key of a hash map.
*/
private static String getTagsKey(LinkedHashMap<String, String> tags)
{
StringBuilder builder = new StringBuilder();
for (Map.Entry<String,String> entry : tags.entrySet())
{
builder.append(entry.getKey()).append(entry.getValue());
}
return builder.toString();
}
/**
Tags are inserted in the order specified in tagNames which is the order
from the query. We use a linked hashmap so that order is preserved and
the group by responses are sorted in the order specified in the query.
@param datapointGroup
@param tagNames
@return
*/
private static LinkedHashMap<String, String> getMatchingTags(DataPointGroup datapointGroup, List<String> tagNames)
{
LinkedHashMap<String, String> matchingTags = new LinkedHashMap<String, String>();
for (String tagName : tagNames)
{
Set<String> tagValues = datapointGroup.getTagValues(tagName);
if (tagValues != null)
{
String tagValue = tagValues.iterator().next();
matchingTags.put(tagName, tagValue != null ? tagValue : "");
}
}
return matchingTags;
}
private static String calculateFilenameHash(QueryMetric metric) throws NoSuchAlgorithmException, UnsupportedEncodingException
{
String hashString = metric.getCacheString();
if (hashString == null)
hashString = String.valueOf(System.currentTimeMillis());
MessageDigest messageDigest = MessageDigest.getInstance("MD5");
byte[] digest = messageDigest.digest(hashString.getBytes("UTF-8"));
return new BigInteger(1, digest).toString(16);
}
private class DatastoreQueryImpl implements DatastoreQuery
{
private String m_cacheFilename;
private QueryMetric m_metric;
private List<DataPointGroup> m_results;
private int m_dataPointCount;
private int m_rowCount;
public DatastoreQueryImpl(QueryMetric metric)
throws UnsupportedEncodingException, NoSuchAlgorithmException,
InterruptedException, DatastoreException
{
//Report number of queries waiting
int waitingCount = m_queuingManager.getQueryWaitingCount();
if (waitingCount != 0)
{
ThreadReporter.addDataPoint(QUERIES_WAITING_METRIC_NAME, waitingCount);
}
m_metric = metric;
m_cacheFilename = calculateFilenameHash(metric);
m_queuingManager.waitForTimeToRun(m_cacheFilename);
}
public int getSampleSize()
{
return m_dataPointCount;
}
public int getRowCount() { return m_rowCount; }
@Override
public List<DataPointGroup> execute() throws DatastoreException
{
long queryStartTime = System.currentTimeMillis();
CachedSearchResult cachedResults = null;
List<DataPointRow> returnedRows = null;
try
{
String tempFile = m_cacheDir + m_cacheFilename;
if (m_metric.getCacheTime() > 0)
{
cachedResults = CachedSearchResult.openCachedSearchResult(m_metric.getName(),
tempFile, m_metric.getCacheTime(), m_dataPointFactory, m_keepCacheFiles);
if (cachedResults != null)
{
returnedRows = cachedResults.getRows();
logger.debug("Cache HIT!");
}
}
if (cachedResults == null)
{
logger.debug("Cache MISS!");
cachedResults = CachedSearchResult.createCachedSearchResult(m_metric.getName(),
tempFile, m_dataPointFactory, m_keepCacheFiles);
m_datastore.queryDatabase(m_metric, cachedResults);
returnedRows = cachedResults.getRows();
}
}
catch (Exception e)
{
throw new DatastoreException(e);
}
//Get data point count
for (DataPointRow returnedRow : returnedRows)
{
m_dataPointCount += returnedRow.getDataPointCount();
}
m_rowCount = returnedRows.size();
ThreadReporter.addDataPoint(QUERY_SAMPLE_SIZE, m_dataPointCount);
ThreadReporter.addDataPoint(QUERY_ROW_COUNT, m_rowCount);
List<DataPointGroup> queryResults = groupByTypeAndTag(m_metric.getName(),
returnedRows, getTagGroupBy(m_metric.getGroupBys()), m_metric.getOrder());
// Now group for all other types of group bys.
Grouper grouper = new Grouper(m_dataPointFactory);
try
{
queryResults = grouper.group(removeTagGroupBy(m_metric.getGroupBys()), queryResults);
}
catch (IOException e)
{
throw new DatastoreException(e);
}
m_results = new ArrayList<DataPointGroup>();
for (DataPointGroup queryResult : queryResults)
{
String groupType = DataPoint.GROUP_NUMBER;
//todo May want to make group type a first class citizen in DataPointGroup
for (GroupByResult groupByResult : queryResult.getGroupByResult())
{
if (groupByResult instanceof TypeGroupByResult)
{
groupType = ((TypeGroupByResult)groupByResult).getType();
}
}
DataPointGroup aggregatedGroup = queryResult;
List<Aggregator> aggregators = m_metric.getAggregators();
if (m_metric.getLimit() != 0)
{
aggregatedGroup = new LimitAggregator(m_metric.getLimit()).aggregate(aggregatedGroup);
}
//This will pipe the aggregators together.
for (Aggregator aggregator : aggregators)
{
//Make sure the aggregator can handle this type of data.
if (aggregator.canAggregate(groupType)) {
aggregatedGroup = aggregator.aggregate(aggregatedGroup);
groupType = aggregator.getAggregatedGroupType(groupType);
}
}
m_results.add(aggregatedGroup);
}
//Report how long query took
ThreadReporter.addDataPoint(QUERY_METRIC_TIME, System.currentTimeMillis() - queryStartTime);
return (m_results);
}
@Override
public void close()
{
try
{
if (m_results != null)
{
for (DataPointGroup result : m_results)
{
result.close();
}
}
}
finally
{ //This must get done
m_queuingManager.done(m_cacheFilename);
}
}
}
}
| src/main/java/org/kairosdb/core/datastore/KairosDatastore.java | /*
* Copyright 2016 KairosDB Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kairosdb.core.datastore;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ListMultimap;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import org.kairosdb.core.DataPoint;
import org.kairosdb.core.DataPointListener;
import org.kairosdb.core.KairosDataPointFactory;
import org.kairosdb.core.aggregator.Aggregator;
import org.kairosdb.core.aggregator.LimitAggregator;
import org.kairosdb.core.exception.DatastoreException;
import org.kairosdb.core.groupby.*;
import org.kairosdb.core.reporting.ThreadReporter;
import org.kairosdb.util.MemoryMonitor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.*;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
public class KairosDatastore
{
public static final Logger logger = LoggerFactory.getLogger(KairosDatastore.class);
public static final String QUERY_CACHE_DIR = "kairosdb.query_cache.cache_dir";
public static final String KEEP_CACHE_FILES = "kairosdb.query_cache.keep_cache_files";
public static final String QUERY_METRIC_TIME = "kairosdb.datastore.query_time";
public static final String QUERIES_WAITING_METRIC_NAME = "kairosdb.datastore.queries_waiting";
public static final String QUERY_SAMPLE_SIZE = "kairosdb.datastore.query_sample_size";
public static final String QUERY_ROW_COUNT = "kairosdb.datastore.query_row_count";
private final Datastore m_datastore;
private final QueryQueuingManager m_queuingManager;
private final List<DataPointListener> m_dataPointListeners;
private final KairosDataPointFactory m_dataPointFactory;
private String m_baseCacheDir;
private volatile String m_cacheDir;
private final boolean m_keepCacheFiles;
@SuppressWarnings("ResultOfMethodCallIgnored")
@Inject
public KairosDatastore(Datastore datastore, QueryQueuingManager queuingManager,
List<DataPointListener> dataPointListeners, KairosDataPointFactory dataPointFactory,
@Named(KEEP_CACHE_FILES) boolean keepCacheFiles)
throws DatastoreException
{
m_datastore = checkNotNull(datastore);
m_dataPointListeners = checkNotNull(dataPointListeners);
m_queuingManager = checkNotNull(queuingManager);
m_dataPointFactory = dataPointFactory;
m_baseCacheDir = System.getProperty("java.io.tmpdir") + "/kairos_cache/";
m_keepCacheFiles = keepCacheFiles;
setupCacheDirectory();
}
@SuppressWarnings("UnusedDeclaration")
@Inject(optional = true)
public void setBaseCacheDir(@Named(QUERY_CACHE_DIR) String cacheTempDir)
{
if (cacheTempDir != null && !cacheTempDir.equals(""))
{
m_baseCacheDir = cacheTempDir;
setupCacheDirectory();
}
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private void setupCacheDirectory()
{
cleanDirectory(new File(m_baseCacheDir));
newCacheDirectory();
File cacheDirectory = new File(m_cacheDir);
cacheDirectory.mkdirs();
checkState(cacheDirectory.exists(), "Cache directory not created");
}
/**
Make sure the folder exists
@param path
*/
private static void ensureFolder(String path)
{
File fPath = new File(path);
if (!fPath.exists())
fPath.mkdirs();
}
public String getCacheDir()
{
ensureFolder(m_cacheDir);
return (m_cacheDir);
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private void newCacheDirectory()
{
String newCacheDir = m_baseCacheDir + "/" + System.currentTimeMillis() + "/";
ensureFolder(newCacheDir);
m_cacheDir = newCacheDir;
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private void cleanDirectory(File directory)
{
if (!directory.exists())
return;
File[] list = directory.listFiles();
if (list != null && list.length > 0)
{
for (File aList : list)
{
if (aList.isDirectory())
cleanDirectory(aList);
aList.delete();
}
}
directory.delete();
}
public void cleanCacheDir(boolean wait)
{
String oldCacheDir = m_cacheDir;
newCacheDirectory();
if (wait)
{
try
{
Thread.sleep(60000);
}
catch (InterruptedException e)
{
logger.error("Sleep interrupted:", e);
}
}
logger.debug("Executing job...");
File dir = new File(oldCacheDir);
logger.debug("Deleting cache files in " + dir.getAbsolutePath());
cleanDirectory(dir);
}
public Datastore getDatastore()
{
return m_datastore;
}
/**
* Close the datastore
*/
public void close() throws InterruptedException, DatastoreException
{
m_datastore.close();
}
public void putDataPoint(String metricName,
ImmutableSortedMap<String, String> tags,
DataPoint dataPoint) throws DatastoreException
{
putDataPoint(metricName, tags, dataPoint, 0);
}
public void putDataPoint(String metricName,
ImmutableSortedMap<String, String> tags,
DataPoint dataPoint, int ttl) throws DatastoreException
{
//Add to datastore first.
m_datastore.putDataPoint(metricName, tags, dataPoint, ttl);
for (DataPointListener dataPointListener : m_dataPointListeners)
{
dataPointListener.dataPoint(metricName, tags, dataPoint);
}
}
public Iterable<String> getMetricNames() throws DatastoreException
{
return (m_datastore.getMetricNames());
}
public Iterable<String> getTagNames() throws DatastoreException
{
return (m_datastore.getTagNames());
}
public Iterable<String> getTagValues() throws DatastoreException
{
return (m_datastore.getTagValues());
}
/**
* Exports the data for a metric query without doing any aggregation or sorting
*
* @param metric metric
* @throws DatastoreException
*/
public void export(QueryMetric metric, QueryCallback callback) throws DatastoreException
{
checkNotNull(metric);
m_datastore.queryDatabase(metric, callback);
}
public List<DataPointGroup> queryTags(QueryMetric metric) throws DatastoreException
{
TagSet tagSet = m_datastore.queryMetricTags(metric);
return Collections.<DataPointGroup>singletonList(new EmptyDataPointGroup(metric.getName(), tagSet));
}
public DatastoreQuery createQuery(QueryMetric metric) throws DatastoreException
{
checkNotNull(metric);
DatastoreQuery dq;
try
{
dq = new DatastoreQueryImpl(metric);
}
catch (UnsupportedEncodingException e)
{
throw new DatastoreException(e);
}
catch (NoSuchAlgorithmException e)
{
throw new DatastoreException(e);
}
catch (InterruptedException e)
{
throw new DatastoreException(e);
}
return (dq);
}
public void delete(QueryMetric metric) throws DatastoreException
{
checkNotNull(metric);
try
{
m_datastore.deleteDataPoints(metric);
}
catch (Exception e)
{
throw new DatastoreException(e);
}
}
private static List<GroupBy> removeTagGroupBy(List<GroupBy> groupBys)
{
List<GroupBy> modifiedGroupBys = new ArrayList<GroupBy>();
for (GroupBy groupBy : groupBys)
{
if (!(groupBy instanceof TagGroupBy))
modifiedGroupBys.add(groupBy);
}
return modifiedGroupBys;
}
private static TagGroupBy getTagGroupBy(List<GroupBy> groupBys)
{
for (GroupBy groupBy : groupBys)
{
if (groupBy instanceof TagGroupBy)
return (TagGroupBy) groupBy;
}
return null;
}
protected List<DataPointGroup> groupByTypeAndTag(String metricName,
List<DataPointRow> rows, TagGroupBy tagGroupBy, Order order)
{
List<DataPointGroup> ret = new ArrayList<DataPointGroup>();
MemoryMonitor mm = new MemoryMonitor(20);
if (rows.isEmpty())
{
ret.add(new SortingDataPointGroup(metricName, order));
}
else
{
ListMultimap<String, DataPointGroup> typeGroups = ArrayListMultimap.create();
//Go through each row grouping them by type
for (DataPointRow row : rows)
{
String groupType = m_dataPointFactory.getGroupType(row.getDatastoreType());
typeGroups.put(groupType, new DataPointGroupRowWrapper(row));
mm.checkMemoryAndThrowException();
}
//Sort the types for predictable results
TreeSet<String> sortedTypes = new TreeSet<String>(typeGroups.keySet());
//Now go through each type group and group by tag if needed.
for (String type : sortedTypes)
{
if (tagGroupBy != null)
{
ListMultimap<String, DataPointGroup> groups = ArrayListMultimap.create();
Map<String, TagGroupByResult> groupByResults = new HashMap<String, TagGroupByResult>();
for (DataPointGroup dataPointGroup : typeGroups.get(type))
{
//Todo: Add code to datastore implementations to filter by the group by tag
LinkedHashMap<String, String> matchingTags = getMatchingTags(dataPointGroup, tagGroupBy.getTagNames());
String tagsKey = getTagsKey(matchingTags);
groups.put(tagsKey, dataPointGroup);
groupByResults.put(tagsKey, new TagGroupByResult(tagGroupBy, matchingTags));
mm.checkMemoryAndThrowException();
}
//Sort groups by tags
TreeSet<String> sortedGroups = new TreeSet<String>(groups.keySet());
for (String key : sortedGroups)
{
SortingDataPointGroup sdpGroup = new SortingDataPointGroup(groups.get(key), groupByResults.get(key), order);
sdpGroup.addGroupByResult(new TypeGroupByResult(type));
ret.add(sdpGroup);
}
}
else
{
ret.add(new SortingDataPointGroup(typeGroups.get(type), new TypeGroupByResult(type), order));
}
}
}
return ret;
}
/**
* Create a unique identifier for this combination of tags to be used as the key of a hash map.
*/
private static String getTagsKey(LinkedHashMap<String, String> tags)
{
StringBuilder builder = new StringBuilder();
for (Map.Entry<String,String> entry : tags.entrySet())
{
builder.append(entry.getKey()).append(entry.getValue());
}
return builder.toString();
}
/**
Tags are inserted in the order specified in tagNames which is the order
from the query. We use a linked hashmap so that order is preserved and
the group by responses are sorted in the order specified in the query.
@param datapointGroup
@param tagNames
@return
*/
private static LinkedHashMap<String, String> getMatchingTags(DataPointGroup datapointGroup, List<String> tagNames)
{
LinkedHashMap<String, String> matchingTags = new LinkedHashMap<String, String>();
for (String tagName : tagNames)
{
Set<String> tagValues = datapointGroup.getTagValues(tagName);
if (tagValues != null)
{
String tagValue = tagValues.iterator().next();
matchingTags.put(tagName, tagValue != null ? tagValue : "");
}
}
return matchingTags;
}
private static String calculateFilenameHash(QueryMetric metric) throws NoSuchAlgorithmException, UnsupportedEncodingException
{
String hashString = metric.getCacheString();
if (hashString == null)
hashString = String.valueOf(System.currentTimeMillis());
MessageDigest messageDigest = MessageDigest.getInstance("MD5");
byte[] digest = messageDigest.digest(hashString.getBytes("UTF-8"));
return new BigInteger(1, digest).toString(16);
}
private class DatastoreQueryImpl implements DatastoreQuery
{
private String m_cacheFilename;
private QueryMetric m_metric;
private List<DataPointGroup> m_results;
private int m_dataPointCount;
private int m_rowCount;
public DatastoreQueryImpl(QueryMetric metric)
throws UnsupportedEncodingException, NoSuchAlgorithmException,
InterruptedException, DatastoreException
{
//Report number of queries waiting
int waitingCount = m_queuingManager.getQueryWaitingCount();
if (waitingCount != 0)
{
ThreadReporter.addDataPoint(QUERIES_WAITING_METRIC_NAME, waitingCount);
}
m_metric = metric;
m_cacheFilename = calculateFilenameHash(metric);
m_queuingManager.waitForTimeToRun(m_cacheFilename);
}
public int getSampleSize()
{
return m_dataPointCount;
}
public int getRowCount() { return m_rowCount; }
@Override
public List<DataPointGroup> execute() throws DatastoreException
{
long queryStartTime = System.currentTimeMillis();
CachedSearchResult cachedResults = null;
List<DataPointRow> returnedRows = null;
try
{
String tempFile = m_cacheDir + m_cacheFilename;
if (m_metric.getCacheTime() > 0)
{
cachedResults = CachedSearchResult.openCachedSearchResult(m_metric.getName(),
tempFile, m_metric.getCacheTime(), m_dataPointFactory, m_keepCacheFiles);
if (cachedResults != null)
{
returnedRows = cachedResults.getRows();
logger.debug("Cache HIT!");
}
}
if (cachedResults == null)
{
logger.debug("Cache MISS!");
cachedResults = CachedSearchResult.createCachedSearchResult(m_metric.getName(),
tempFile, m_dataPointFactory, m_keepCacheFiles);
m_datastore.queryDatabase(m_metric, cachedResults);
returnedRows = cachedResults.getRows();
}
}
catch (Exception e)
{
throw new DatastoreException(e);
}
//Get data point count
for (DataPointRow returnedRow : returnedRows)
{
m_dataPointCount += returnedRow.getDataPointCount();
}
m_rowCount = returnedRows.size();
ThreadReporter.addDataPoint(QUERY_SAMPLE_SIZE, m_dataPointCount);
ThreadReporter.addDataPoint(QUERY_ROW_COUNT, m_rowCount);
List<DataPointGroup> queryResults = groupByTypeAndTag(m_metric.getName(),
returnedRows, getTagGroupBy(m_metric.getGroupBys()), m_metric.getOrder());
// Now group for all other types of group bys.
Grouper grouper = new Grouper(m_dataPointFactory);
try
{
queryResults = grouper.group(removeTagGroupBy(m_metric.getGroupBys()), queryResults);
}
catch (IOException e)
{
throw new DatastoreException(e);
}
m_results = new ArrayList<DataPointGroup>();
for (DataPointGroup queryResult : queryResults)
{
String groupType = DataPoint.GROUP_NUMBER;
//todo May want to make group type a first class citizen in DataPointGroup
for (GroupByResult groupByResult : queryResult.getGroupByResult())
{
if (groupByResult instanceof TypeGroupByResult)
{
groupType = ((TypeGroupByResult)groupByResult).getType();
}
}
DataPointGroup aggregatedGroup = queryResult;
List<Aggregator> aggregators = m_metric.getAggregators();
if (m_metric.getLimit() != 0)
{
aggregatedGroup = new LimitAggregator(m_metric.getLimit()).aggregate(aggregatedGroup);
}
//This will pipe the aggregators together.
for (Aggregator aggregator : aggregators)
{
//Make sure the aggregator can handle this type of data.
if (aggregator.canAggregate(groupType)) {
aggregatedGroup = aggregator.aggregate(aggregatedGroup);
groupType = aggregator.getAggregatedGroupType(groupType);
}
}
m_results.add(aggregatedGroup);
}
//Report how long query took
ThreadReporter.addDataPoint(QUERY_METRIC_TIME, System.currentTimeMillis() - queryStartTime);
return (m_results);
}
@Override
public void close()
{
try
{
if (m_results != null)
{
for (DataPointGroup result : m_results)
{
result.close();
}
}
}
finally
{ //This must get done
m_queuingManager.done(m_cacheFilename);
}
}
}
}
| Add simple exception message for cache director creation
| src/main/java/org/kairosdb/core/datastore/KairosDatastore.java | Add simple exception message for cache director creation |
|
Java | apache-2.0 | 8dd2841ac1d6c7a06eaf9d95aec9b080213a771e | 0 | manojrg86/seleniumtestsframework,malcolmshen/seleniumtestsframework,SaiVDivya04/Automation-Code-,TestingForum/seleniumtestsframework,pratikbarjatya/seleniumtestsframework,malcolmshen/seleniumtestsframework,pratikbarjatya/seleniumtestsframework,tarun3kumar/seleniumtestsframework,pratikbarjatya/seleniumtestsframework,tarun3kumar/seleniumtestsframework,tarun3kumar/seleniumtestsframework,malcolmshen/seleniumtestsframework,manojrg86/seleniumtestsframework,SaiVDivya04/Automation-Code-,SaiVDivya04/Automation-Code-,TestingForum/seleniumtestsframework,TestingForum/seleniumtestsframework,manojrg86/seleniumtestsframework | package com.seleniumtests.reporter;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Method;
import java.net.URISyntaxException;
import java.net.URL;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.testng.IInvokedMethod;
import org.testng.IInvokedMethodListener;
import org.testng.IReporter;
import org.testng.IResultMap;
import org.testng.ISuite;
import org.testng.ISuiteResult;
import org.testng.ITestContext;
import org.testng.ITestListener;
import org.testng.ITestNGMethod;
import org.testng.ITestResult;
import org.testng.Reporter;
import org.testng.internal.ResultMap;
import org.testng.internal.TestResult;
import org.testng.internal.Utils;
import org.testng.xml.XmlSuite;
import com.seleniumtests.controller.AbstractPageListener;
import com.seleniumtests.controller.Assertion;
import com.seleniumtests.controller.Context;
import com.seleniumtests.controller.ContextManager;
import com.seleniumtests.controller.Logging;
import com.seleniumtests.controller.TestRetryAnalyzer;
import com.seleniumtests.driver.web.ScreenShot;
import com.seleniumtests.helper.StringHelper;
import com.thoughtworks.qdox.JavaDocBuilder;
import com.thoughtworks.qdox.model.JavaClass;
import com.thoughtworks.qdox.model.JavaMethod;
import com.thoughtworks.qdox.model.Type;
@SuppressWarnings("deprecation")
public class HTMLReporter implements IReporter, ITestListener,IInvokedMethodListener {
// ~ Inner Classes --------------------------------------------------------
/** Arranges methods by classname and method name */
protected class TestMethodSorter<T extends ITestNGMethod> implements Comparator<T> {// KEEPME
// ~ Methods
// -------------------------------------------------------------
/** Arranges methods by classname and method name */
public int compare(T o1, T o2) {
int r = ((T) o1).getTestClass().getName().compareTo(o2.getTestClass().getName());
if (r == 0) {
r = ((T) o1).getMethodName().compareTo(o2.getMethodName());
}
return r;
}
}
/** Arranges methods by classname and method name */
protected class TestResultSorter<T extends ITestResult> implements Comparator<T> {// KEEPME
// ~ Methods
// -------------------------------------------------------------
/** Arranges methods by classname and method name */
public int compare(T o1, T o2) {
String sig1 = StringHelper.constructMethodSignature(o1.getMethod().getMethod(), o1.getParameters());
String sig2 = StringHelper.constructMethodSignature(o2.getMethod().getMethod(), o2.getParameters());
return sig1.compareTo(sig2);
}
}
private static Logger logger = Logging.getLogger(HTMLReporter.class);
protected static String escape(String string) {
if (null == string)
return string;
//return string.replaceAll("<", "<").replaceAll(">", ">").replaceAll("\n", "<br/>");
return string.replaceAll("\n", "<br/>");
}
public static void main(String[] args)
{
String osName = System.getProperty("os.name");
System.out.println(osName);
}
public static void writeResourceToFile(File file, String resourceName, Class<?> clasz) throws IOException {
InputStream inputStream = clasz.getResourceAsStream("/" + resourceName);
if (inputStream == null) {
logger.error("Couldn't find resource on the class path: " + resourceName);
}
else {
try {
FileOutputStream outputStream = new FileOutputStream(file);
try {
int nread;
byte[] buffer = new byte[4096];
while (0 < (nread = inputStream.read(buffer))) {
outputStream.write(buffer, 0, nread);
}
} finally {
outputStream.close();
}
} finally {
inputStream.close();
}
}
}
private Map<String, Boolean> isRetryHandleNeeded = new HashMap<String, Boolean>();
private Map<String, IResultMap> failedTests = new HashMap<String, IResultMap>();
private Map<String, IResultMap> skippedTests = new HashMap<String, IResultMap>();
// ~ Instance fields ------------------------------------------------------
private String m_root = "resources/images/mktree/";
protected PrintWriter m_out;
private String uuid = new GregorianCalendar().getTime().toString();
private int m_treeId = 0;
private String outputDirectory;
private String resources;
private JavaDocBuilder builder = null;
private File report;
// static{
// soaReporter = new SOAReporter();
// }
// ~ Methods --------------------------------------------------------------
// Remove enforced hook. TO enable SOAReport, use <listener> in testng.xml
// private static SOAReporter soaReporter = null;
// private HashMap<String, List<CALEvent>> calEvent = new HashMap<String,
// List<CALEvent>>();
Map<String, ITestResult> methodsByGroup = null;
private void addAllTestResults(Set<ITestResult> testResults, IResultMap resultMap) {
if (resultMap != null) {
testResults.addAll(resultMap.getAllResults());
}
}
public void afterInvocation(IInvokedMethod method, ITestResult result) {
Reporter.setCurrentTestResult(result);
ScreenShot screenShot = ContextManager.getThreadContext().getExceptionScreenShot();
//Handle Last Exception only for failed test cases
if(!result.isSuccess() && ContextManager.getThreadContext() != null && screenShot!=null)
{
Logging.log("<div><table><tr bgcolor=\"yellow\"><td><b>-- Screenshot of current web page with webdriver exception --</b><td></tr></table></div>");
Logging.logWebOutput(screenShot.getTitle(), Logging.buildScreenshotLog(screenShot), true);
}
//Handle Soft Assertion
if (method.isTestMethod()) {
List<Throwable> verificationFailures = Assertion.getVerificationFailures();
int size = verificationFailures.size();
//Assertion.fail("Test case faield with "+Assertion.getVerificationFailures().size()+" errors!");
if(size==0)return;
else
if(result.getStatus()==TestResult.FAILURE) return;
result.setStatus(TestResult.FAILURE);
if (size == 1) {
result.setThrowable(verificationFailures.get(0));
} else {
//create a failure message with all failures and stack traces (except last failure)
StringBuffer failureMessage = new StringBuffer("Multiple failures (").append(size).append("):nn");
for (int i = 0; i < size-1; i++) {
failureMessage.append("Failure ").append(i+1).append(" of ").append(size).append(":n");
Throwable t = verificationFailures.get(i);
String fullStackTrace = Utils.stackTrace(t, false)[1];
failureMessage.append(fullStackTrace).append("nn");
}
//final failure
Throwable last = verificationFailures.get(size-1);
failureMessage.append("Failure ").append(size).append(" of ").append(size).append(":n");
failureMessage.append(last.toString());
//set merged throwable
Throwable merged = new Throwable(failureMessage.toString());
merged.setStackTrace(last.getStackTrace());
result.setThrowable(merged);
}
}
}
public void beforeInvocation(IInvokedMethod arg0, ITestResult arg1) {}
protected void copyResources() throws Exception {
new File(outputDirectory + File.separator + "resources").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "css").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "images").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "images" + File.separator + "lightbox").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "images" + File.separator + "mktree").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "images" + File.separator + "yukontoolbox").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "js").mkdir();
List<String> resources = new ArrayList<String>();
resources.add("reporter" + File.separator + "css" + File.separator + "report.css");
resources.add("reporter" + File.separator + "css" + File.separator + "jquery.lightbox-0.5.css");
resources.add("reporter" + File.separator + "css" + File.separator + "mktree.css");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "seleniumtests_lightbox-blank.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "seleniumtests_lightbox-btn-close.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "seleniumtests_lightbox-btn-next.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "seleniumtests_lightbox-btn-prev.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "seleniumtests_lightbox-ico-loading.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "seleniumtests_bullet.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "seleniumtests_minus.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "seleniumtests_plus.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "seleniumtests_test1.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "seleniumtests_test2.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "seleniumtests_test3.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "seleniumtests_test3.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_footer_grad.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_mid.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_grey_bl.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_grey_br.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_hovertab_l.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_hovertab_r.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_tabbed_nav_goldgradbg.png");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_table_sep_left.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_table_sep_right.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_table_zebrastripe_left.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_table_zebrastripe_right.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_yellow_tl.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_yellow_tr.gif");
resources.add("reporter" + File.separator + "js" + File.separator + "jquery-1.3.min.js");
resources.add("reporter" + File.separator + "js" + File.separator + "jquery.lightbox-0.5.min.js");
resources.add("reporter" + File.separator + "js" + File.separator + "mktree.js");
resources.add("reporter" + File.separator + "js" + File.separator + "report.js");
resources.add("reporter" + File.separator + "js" + File.separator + "browserdetect.js");
for (String resourceName : resources) {
File f = new File(outputDirectory, resourceName.replace("reporter", "resources"));
resourceName = resourceName.replaceAll("\\\\", "/");
logger.debug("about to write resource " + resourceName + " to the file " + f.getAbsolutePath());
writeResourceToFile(f, resourceName, HTMLReporter.class);
}
}
protected PrintWriter createWriter(String outdir) throws IOException {
System.setProperty("file.encoding", "UTF8");
uuid = uuid.replaceAll(" ", "-").replaceAll(":", "-");
File f = new File(outdir, "SeleniumFrameworkTestReport-" + uuid + ".html");
// ConfigLoader.getInstance().setProperty("report",
// f.getAbsolutePath());
logger.info("generating report " + f.getAbsolutePath());
report = f;
//return new PrintWriter(new BufferedWriter(new FileWriter(f)));
// handle garbled code issue
OutputStream out = new FileOutputStream(f);
Writer writer = new BufferedWriter(new OutputStreamWriter(out,"utf-8"));
return new PrintWriter(writer);
}
/** Finishes HTML stream */
protected void endHtml(PrintWriter out) {
out.println("</body></html>");
}
public void executeCmd(String browserPath,String theUrl) {
String cmdLine = null;
String osName = System.getProperty("os.name");
if (osName.startsWith("Windows")) {
cmdLine = "start " + theUrl;
// on NT, you need to start cmd.exe because start is not
// an external command but internal, you need to start the
// command interpreter
// cmdLine = "cmd.exe /c " + cmdLine;
cmdLine = "rundll32 SHELL32.DLL,ShellExec_RunDLL " + browserPath + " " + theUrl;
} else if(osName.startsWith("Mac"))
{
cmdLine = "open " + theUrl;
}
else {
// Linux
cmdLine = "open " + browserPath + " " + theUrl;
}
try {
Runtime.getRuntime().exec(cmdLine);
} catch (Exception e) {
logger.info(e);
}
}
protected void generateCalErrorReport(String exception, StringBuffer contentBuffer) {
contentBuffer.append(" <div class='stContainer' ><a href='javascript:void(0);' class='exceptionlnk'>Detail</a>");
contentBuffer.append("<div class='exception' style='display:none'>");
contentBuffer.append(exception);
contentBuffer.append("</div></div>");
}
/*
* protected String generateCALErrorHTML(ITestContext tc, ISuite suite,
* StringBuffer calcount) {
*
* StringBuffer res = new StringBuffer(); try { VelocityEngine ve = new
* VelocityEngine(); ve.setProperty("resource.loader", "class");
* ve.setProperty("class.resource.loader.class",
* "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
* ve.init(); generateCALErrorPanel(ve, res, "failed", suite, tc, calcount);
* } catch (Exception e) { logger.error(e.getMessage()); }
*
* return res.toString(); }
*/
/*
* private void generateCALErrorPanel(VelocityEngine ve, StringBuffer res,
* String style, ISuite suite, ITestContext ctx, StringBuffer sbCalcount) {
*
* Set<ITestResult> testResults = new HashSet<ITestResult>();
*
* addAllTestResults(testResults, ctx.getPassedTests());
* addAllTestResults(testResults, ctx.getFailedTests());
* addAllTestResults(testResults, ctx.getSkippedTests());
* addAllTestResults(testResults, ctx.getPassedConfigurations());
* addAllTestResults(testResults, ctx.getSkippedConfigurations());
* addAllTestResults(testResults, ctx.getFailedConfigurations());
* addAllTestResults(testResults,
* ctx.getFailedButWithinSuccessPercentageTests());
*
* int cal = 0;
*
* if (calEvent.isEmpty()) { res.append(
* "<div class='method passed'><div class='yuk_goldgrad_tl'><div class='yuk_goldgrad_tr'>"
* + "<div class='yuk_goldgrad_m'></div></div></div>" +
* "<h3 class='yuk_grad_ltitle_passed'>No CAL Errors found.</h3>" +
* "<div class='yuk_pnl_footerbar'></div>" +
* "<div class='yuk_grey_bm_footer'><div class='yuk_grey_br'>" +
* "<div class='yuk_grey_bl'></div></div></div></div>"); } else { try {
* StringBuffer contentBuffer = null;
*
* Iterator<Entry<String, List<CALEvent>>> i =
* calEvent.entrySet().iterator(); while (i.hasNext()) {
*
* Entry<String, List<CALEvent>> e = (Entry<String, List<CALEvent>>)
* i.next(); if (e.getKey() == null) continue;
*
* contentBuffer = new StringBuffer(); contentBuffer.append(
* "<div class='leftContent' style='float: left; width: 100%;'>");
*
* String cmd = (String) e.getKey(); List<CALEvent> events =
* (List<CALEvent>) e.getValue(); contentBuffer .append(
* "<table cellspacing=0 style='border-style:solid;border-width:1px'><thead><tr><th style='border-spacing:0px;border-style:solid;border-width:1px' >Event Type</th><th style='border-spacing:0px;border-style:solid;border-width:1px'>Event Name</th><th style='border-spacing:0px;border-style:solid;border-width:1px'>Test Case</th></tr></thead><tbody>"
* );
*
* Set<String> eventNameSet = new TreeSet<String>(); for (CALEvent event :
* events) { if (eventNameSet.contains(event.getName())) continue;
*
* eventNameSet.add(event.getName());
*
* contentBuffer.append(
* "<tr><td style='border-spacing:0px;border-style:none;border-width:1px' valign='top'><a href='"
* + event.getCALLinkURL() + "' target=cal>");
* contentBuffer.append(event.getType()); contentBuffer.append("</a>");
* contentBuffer.append(
* "</td><td style='border-spacing:0px;border-style:none;border-width:1px' valign='top'>"
* ); contentBuffer.append(event.getName());
* generateCalErrorReport(event.getPayload().replaceAll("(\\\\r|\\\\n)+",
* "\n").replaceAll("(\\\\t)+", " "), contentBuffer);
*
* contentBuffer.append(
* "</td><td style='border-spacing:0px;border-style:solid;border-width:1px' valign='top'>"
* );
*
* if (event.getTestCaseId() != null &&
* !"null".equalsIgnoreCase(event.getTestCaseId())) {
* contentBuffer.append(Context
* .getSignatureFromTestCaseId(event.getTestCaseId()));
* contentBuffer.append(" - "); contentBuffer.append(event.getTestCaseId());
* contentBuffer.append("</td></tr>"); } else
* contentBuffer.append(" </td></tr>");
*
* cal++; } contentBuffer.append("</tbody></table></div>"); // end of //
* leftContent contentBuffer.append("<div class='clear_both'></div>");
*
* Template t = ve.getTemplate("/templates/report.part.singleTest.html");
* VelocityContext context = new VelocityContext(); context.put("status",
* style); context.put("desc", ""); context.put("time", "");
* context.put("methodName", cmd); context.put("content",
* contentBuffer.toString()); StringWriter writer = new StringWriter();
* t.merge(context, writer); res.append(writer.toString()); }
*
* List<CALEvent> evt = calEvent.get(null); if (evt != null && evt.size() >
* 0) { contentBuffer = new StringBuffer(); contentBuffer.append(
* "<div class='leftContent' style='float: left; width: 100%;'>");
* contentBuffer .append(
* "<table cellspacing=0 style='border-style:solid;border-width:1px'><thead><tr><th style='border-spacing:0px;border-style:solid;border-width:1px' >Event Type</th><th style='border-spacing:0px;border-style:solid;border-width:1px'>Event Name</th><th style='border-spacing:0px;border-style:solid;border-width:1px'>Test Case</th></tr></thead><tbody>"
* );
*
* Set<String> eventNameSet = new TreeSet<String>(); for (CALEvent event :
* evt) { if (eventNameSet.contains(event.getName())) continue;
*
* eventNameSet.add(event.getName());
*
* contentBuffer.append(
* "<tr><td style='border-spacing:0px;border-style:solid;border-width:1px' valign='top'><a href='"
* + event.getCALLinkURL() + "' target=cal>");
* contentBuffer.append(event.getType()); contentBuffer.append("</a>");
* contentBuffer.append(
* "</td><td style='border-spacing:0px;border-style:solid;border-width:1px' valign='top'>"
* ); contentBuffer.append(event.getName());
* generateCalErrorReport(event.getPayload().replaceAll("(\\\\r|\\\\n)+",
* "\n").replaceAll("(\\\\t)+", " "), contentBuffer);
*
* contentBuffer.append(
* "</td><td style='border-spacing:0px;border-style:solid;border-width:1px' valign='top'>"
* );
*
* if (event.getTestCaseId() != null &&
* !"null".equalsIgnoreCase(event.getTestCaseId())) {
* contentBuffer.append(Context
* .getSignatureFromTestCaseId(event.getTestCaseId()));
* contentBuffer.append(" - "); contentBuffer.append(event.getTestCaseId());
* contentBuffer.append("</td></tr>"); } else
* contentBuffer.append(" </td></tr>");
*
* cal++; } contentBuffer.append("</tbody></table></div>"); // end of //
* leftContent contentBuffer.append("<div class='clear_both'></div>");
*
* Template t = ve.getTemplate("/templates/report.part.singleTest.html");
* VelocityContext context = new VelocityContext(); context.put("status",
* style); context.put("desc", ""); context.put("time", "");
* context.put("methodName", ""); context.put("content",
* contentBuffer.toString()); StringWriter writer = new StringWriter();
* t.merge(context, writer); res.append(writer.toString()); }
*
* } catch (Exception e) { e.printStackTrace();
* logger.error("error creating a cal log report for null test case id." +
* e.getMessage()); } } sbCalcount.append(cal); }
*/
protected void generateExceptionReport(Throwable exception, ITestNGMethod method, String title, StringBuffer contentBuffer, String lastline) {// Jerry
// add
// lastline
generateTheStackTrace(exception, method, title, contentBuffer, lastline);
}
protected void generateExceptionReport(Throwable exception, ITestNGMethod method, StringBuffer contentBuffer, String lastline) {
Throwable fortile = exception;
/*
* if (exception instanceof VerificationException) { fortile =
* ((VerificationException)exception).getMaster(); }
*/
String title = fortile.getMessage();
if (title == null) {
try {
title = fortile.getCause().getMessage();
} catch (Throwable e) {
title = e.getMessage();
}
}
generateExceptionReport(exception, method, title, contentBuffer, lastline);
}
protected void generateGlobalErrorHTML(ITestContext tc, ISuite suite, StringBuffer errorCountTabs, StringBuffer errorCountHtmls) {
try {
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
List<AbstractPageListener> pageListenersList = PluginsUtil.getInstance().getPageListeners();
for (AbstractPageListener abstractPageListener : pageListenersList) {
/* ========== to skip creating a tab according to "testResultEffected" property of an instance of com.seleniumtests.controller.AbstractPageListener, added by ziwu, 09/16/2011 ========== */
if (!abstractPageListener.isTestResultEffected()) continue;
/* ========== to skip creating a tab according to "testResultEffected" property of an instance of com.seleniumtests.controller.AbstractPageListener, added by ziwu, 09/16/2011 ========== */
errorCountTabs.append("<li class='tab' id='" + abstractPageListener.getClass().getSimpleName() + "'><a href='#'><span>")
.append(abstractPageListener.getTitle() != null ? abstractPageListener.getTitle() : abstractPageListener.getClass().getSimpleName())
.append(" ( <font color='red'>");
errorCountHtmls.append("<div class='" + abstractPageListener.getClass().getSimpleName() + "' style='width: 98%;margin-left:15px;'>");
generateGlobalErrorsPanel(abstractPageListener, ve, errorCountHtmls, "failed", tc, errorCountTabs);
errorCountHtmls.append("</div>");
errorCountTabs.append("</font> )</span></a></li>");
}
} catch (Exception e) {
logger.error(e.getMessage());
}
}
private void generateGlobalErrorsPanel(AbstractPageListener abstractPageListener, VelocityEngine ve, StringBuffer res, String style, ITestContext tc,
StringBuffer sbCalcount) {
int pageCount = 0;
Set<ITestResult> testResults = new HashSet<ITestResult>();
addAllTestResults(testResults, tc.getPassedTests());
addAllTestResults(testResults, failedTests.get(tc.getName()));
addAllTestResults(testResults, tc.getFailedButWithinSuccessPercentageTests());
Map<String, Map<String, List<String>>> pageListenerLogMap = Logging.getPageListenerLog(abstractPageListener.getClass().getCanonicalName());
if (pageListenerLogMap == null || pageListenerLogMap.isEmpty()) {
res.append("<div class='method passed'><div class='yuk_goldgrad_tl'><div class='yuk_goldgrad_tr'>"
+ "<div class='yuk_goldgrad_m'></div></div></div>" + "<h3 class='yuk_grad_ltitle_passed'>No Errors found.</h3>"
+ "<div class='yuk_pnl_footerbar'></div>" + "<div class='yuk_grey_bm_footer'><div class='yuk_grey_br'>"
+ "<div class='yuk_grey_bl'></div></div></div></div>");
} else {
for (Entry<String, Map<String, List<String>>> pageEntry : pageListenerLogMap.entrySet()) {
StringBuffer contentBuffer = new StringBuffer();
contentBuffer.append("<table class='ex' width='90%'><thead><tr><th>TestMethod</th><th>Errors</th></thead><tbody>");
Map<String, List<String>> errorMap = pageEntry.getValue();
boolean found = false;
for (ITestResult testResult : testResults) {
Method method = testResult.getMethod().getMethod();
String methodInstance = StringHelper.constructMethodSignature(method, testResult.getParameters());
if (errorMap.containsKey(methodInstance)) {
found = true;
contentBuffer.append("<tr><td>" + methodInstance + "</td><td>");
for (String message : errorMap.get(methodInstance)) {
contentBuffer.append(message);
contentBuffer.append("<br>");
}
contentBuffer.append("</td><tr>");
}
}
if (found) {
contentBuffer.append("</tbody></table>");
try {
Template t = ve.getTemplate("/templates/report.part.singlePageError.html");
VelocityContext context = new VelocityContext();
context.put("status", style);
context.put("pageName", pageEntry.getKey());
context.put("content", contentBuffer.toString());
StringWriter writer = new StringWriter();
t.merge(context, writer);
res.append(writer.toString());
} catch (Exception e) {
logger.error("error creating a singlePageError." + e.getMessage());
}
pageCount++;
}
}
}
sbCalcount.append(pageCount);
}
public void generateGroupsArea(Collection<ITestNGMethod> methods) {
Set<String> allGroups = new HashSet<String>();
for (ITestNGMethod method : methods) {
for (int j = 0; j < method.getGroups().length; j++) {
allGroups.add(method.getGroups()[j]);
}
}
m_out.print("Tags :<br/>");
for (String group : allGroups) {
m_out.print("<input type=\"checkbox\" value=\"" + group.replace(' ', '_').replace('(', '_').replace(')', '_') + "\" checked='checked'> " + group
+ " ");
}
}
protected String generateHTML(ITestContext tc, boolean envt, ISuite suite, ITestContext ctx) {
StringBuffer res = new StringBuffer();
try {
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
if (envt) {
if (tc.getFailedConfigurations().getAllResults().size() > 0)
generatePanel(ve, tc.getFailedConfigurations(), res, "failed", suite, ctx, envt);
generatePanel(ve, failedTests.get(tc.getName()), res, "failed", suite, ctx, envt);
if (tc.getFailedConfigurations().getAllResults().size() > 0)
generatePanel(ve, tc.getSkippedConfigurations(), res, "skipped", suite, ctx, envt);
generatePanel(ve, skippedTests.get(tc.getName()), res, "skipped", suite, ctx, envt);
generatePanel(ve, tc.getPassedTests(), res, "passed", suite, ctx, envt);
// generatePanel(ve, tc.getPassedConfigurations(), res,
// "passed", suite, ctx, envt);
} else {
generatePanel(ve, failedTests.get(tc.getName()), res, "failed", suite, ctx, envt);
generatePanel(ve, skippedTests.get(tc.getName()), res, "skipped", suite, ctx, envt);
generatePanel(ve, tc.getPassedTests(), res, "passed", suite, ctx, envt);
}
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
}
return res.toString();
}
protected void generatePanel(VelocityEngine ve, IResultMap map, StringBuffer res, String style, ISuite suite, ITestContext ctx, boolean envt) {
Collection<ITestNGMethod> methodSet = getMethodSet(map);
for (ITestNGMethod method : methodSet) {
boolean methodIsValid = true;
if (envt) {
methodIsValid = Arrays.asList(method.getGroups()).contains("envt");
} else {
methodIsValid = !Arrays.asList(method.getGroups()).contains("envt");
}
if (methodIsValid) {
Collection<ITestResult> resultSet = getResultSet(map, method);
//System.out.println(method.getMethodName()+":"+resultSet.size());
String content = ctx.getName().replace(' ', '_').replace('(', '_').replace(')', '_');
for (ITestResult ans : resultSet) {
StringBuffer contentBuffer = new StringBuffer();
String testName="";
if(ans.getMethod().getXmlTest()!=null)
testName = ans.getMethod().getXmlTest().getName();
else
{
try{
testName = ans.getTestContext().getCurrentXmlTest().getName();
}catch(Exception ex)
{
ex.printStackTrace();
continue;
}catch(Error e)
{
e.printStackTrace();
continue;
}
}
Context testLevelContext = ContextManager.getTestLevelContext(testName);
if (testLevelContext != null ) {
/*String pool = testLevelContext.getPool();*/
String site = testLevelContext.getSite();
String appURL = testLevelContext.getAppURL();
String browser = (String)testLevelContext.getAttribute("browser");
if (browser != null)
browser = browser.replace("*", "");
String browserVersion = (String)testLevelContext.getAttribute("browserVersion");
if (browserVersion != null)
browser = browser + browserVersion;
// contentBuffer.append("<div><i>Pool: "+ pool + ", Site: " + site + ", Browser: " + browser + "</i></div>");
contentBuffer.append("<div><i>App URL: "+appURL+ ", Browser: " + browser + "</i></div>");
}
Object[] parameters = ans.getParameters();
List<String> msgs = Reporter.getOutput(ans);
boolean hasReporterOutput = msgs.size() > 0;
Throwable exception = ans.getThrowable();
boolean hasThrowable = exception != null;
if (hasReporterOutput || hasThrowable) {
contentBuffer.append("<div class='leftContent' style='float: left; width: 100%;'>");
contentBuffer.append("<h4><a href='javascript:void(0);' class='testloglnk'>Test Steps " + (style.equals("passed") ? "[+]" : "[ - ]")
+ "</a></h4>");
contentBuffer.append("<div class='testlog' " + (style.equals("passed") ? "style='display:none'" : "") + ">");
contentBuffer.append("<ol>");
for (String line : msgs) {
DetailedLog logLine = new DetailedLog(line, outputDirectory);
String htmllog;
if (logLine.getHref() != null) {
htmllog = "<a href='" + logLine.getHref() + "' title='" + logLine.getLocation() + "' >" + logLine.getMsg() + "</a>";
} else {
htmllog = logLine.getMsg();
}
htmllog = htmllog.replaceAll("@@lt@@", "<").replace("^^gt^^", ">");//fix for testng 6.7
contentBuffer.append(htmllog);
if(!htmllog.contains("<br>"))contentBuffer.append("<br/>");//handle different in testng6.7
}
contentBuffer.append("</ol>");
// Jerry
String lastLine = "";
for (int lastIdx = msgs.size() - 1; lastIdx >= 0; lastIdx--) {
lastLine = msgs.get(lastIdx).replaceAll("@@lt@@", "<").replace("^^gt^^", ">"); //fix for testng 6.7
if (lastLine.indexOf(">screenshot</a>") != -1) {
break;
}
}
if (hasThrowable) {
generateExceptionReport(exception, method, contentBuffer, lastLine);
}
contentBuffer.append("</div></div>"); // end of
}
//int rq = 0;
/* freynaud */
String treeId = "tree" + m_treeId;
m_treeId++;
if (ans.getStatus() == 3) {
contentBuffer.append("<br>This method has been skipped, because of its dependencies :<br>");
takeCareOfDirectDependencies(suite, method, 0, ctx, treeId, contentBuffer);
}
//rq += 1;
contentBuffer.append("<div class='clear_both'></div>");
content = contentBuffer.toString();
try {
Template t = ve.getTemplate("/templates/report.part.singleTest.html");
VelocityContext context = new VelocityContext();
context.put("status", style);
String javadoc = getJavadocComments(method);
String desc = method.getDescription();
String toDisplay = "no javadoc nor description for this method.";
if (!"".equals(javadoc) && javadoc != null) {
toDisplay = javadoc;
} else if (!"".equals(desc) && desc != null) {
toDisplay = desc;
}
String methodSignature = StringHelper.constructMethodSignature(method.getMethod(), parameters);
if(methodSignature.length()>500)
context.put("methodName", methodSignature.substring(0, 500)+"...");
else
context.put("methodName", methodSignature);
context.put("desc", toDisplay.replaceAll("\r\n\r\n", "\r\n").replaceAll("\n\n", "\n"));
context.put("content", content);
context.put("time", "Time: " + ((ans.getEndMillis() - ans.getStartMillis()) / 1000) + "sec.");
StringWriter writer = new StringWriter();
t.merge(context, writer);
res.append(writer.toString());
} catch (Exception e) {
logger.error("error creating a singleTest." + e.getMessage());
e.printStackTrace();
}
}
}
}
}
public void generateReport(List<XmlSuite> xml, List<ISuite> suites, String outdir) {
ITestContext testCtx = ContextManager.getGlobalContext().getTestNGContext();
if(testCtx == null) {
logger.error("Please check if your class extends from TestPlan!");
return;
}
File f = new File(ContextManager.getGlobalContext().getOutputDirectory());
setOutputDirectory(f.getParentFile().getAbsolutePath());
setResources(getOutputDirectory() + "\\resources");
try {
m_out = createWriter(getOutputDirectory());
startHtml(testCtx, m_out);
generateSuiteSummaryReport(suites, xml.get(0).getName());
generateReportsSection(suites);
endHtml(m_out);
m_out.flush();
m_out.close();
copyResources();
logger.info("report generated.");
//String browserPath = (String) testCtx.getSuite().getParameter(Context.OPEN_REPORT_IN_BROWSER);
String browserPath = (String)ContextManager.getGlobalContext().getAttribute(Context.OPEN_REPORT_IN_BROWSER);
if (browserPath != null && browserPath.trim().length() > 0) {
executeCmd(browserPath , getReportLocation().getAbsolutePath());
}
} catch (Exception e) {
logger.error("output file", e);
return;
}
}
protected void generateReportDetailsContainer(String name, int envtp, int envtf, int envts, int testp, int testf, int tests, String envthtml,
String testhtml) {
try {
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
Template t = ve.getTemplate("/templates/report.part.testDetail.html");
VelocityContext context = new VelocityContext();
context.put("testId", name.toLowerCase().replace(' ', '_').replace('(', '_').replace(')', '_'));
context.put("testName", name);
context.put("envtp", envtp);
context.put("envtf", envtf);
context.put("envts", envts);
context.put("testp", testp);
context.put("testf", testf);
context.put("tests", tests);
context.put("envthtml", envthtml);
context.put("testhtml", testhtml);
StringWriter writer = new StringWriter();
t.merge(context, writer);
m_out.write(writer.toString());
} catch (Exception e) {
logger.error(e.getMessage());
}
}
protected void generateReportDetailsContainer(String name, int envtp, int envtf, int envts, int testp, int testf, int tests, String envthtml,
String testhtml, StringBuffer calCount, String globalErrorTabs, String globalErrorHtmls) {
try {
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
Template t = ve.getTemplate("/templates/report.part.testDetail.html");
VelocityContext context = new VelocityContext();
context.put("testId", name.toLowerCase().replace(' ', '_').replace('(', '_').replace(')', '_'));
context.put("testName", name);
context.put("envtp", envtp);
context.put("envtf", envtf);
context.put("envts", envts);
context.put("testp", testp);
context.put("testf", testf);
context.put("tests", tests);
context.put("envthtml", envthtml);
context.put("testhtml", testhtml);
context.put("calcount", calCount.toString());
context.put("globalerrortabs", globalErrorTabs);
context.put("globalerrorhtmls", globalErrorHtmls);
StringWriter writer = new StringWriter();
t.merge(context, writer);
m_out.write(writer.toString());
} catch (Exception e) {
logger.error(e.getMessage());
}
}
protected void generateReportsSection(List<ISuite> suites) {
m_out.println("<div id='reports'>");
for (ISuite suite : suites) {
Map<String, ISuiteResult> r = suite.getResults();
for (ISuiteResult r2 : r.values()) {
ITestContext tc = r2.getTestContext();
int envtp = getNbInstanceForGroup(true, tc.getPassedTests());
int envtf = getNbInstanceForGroup(true, failedTests.get(tc.getName()));
int envts = getNbInstanceForGroup(true, skippedTests.get(tc.getName()));
// envtp += getNbInstanceForGroup(true,
// tc.getPassedConfigurations());
envtf += getNbInstanceForGroup(true, tc.getFailedConfigurations());
envts += getNbInstanceForGroup(true, tc.getSkippedConfigurations());
int testp = getNbInstanceForGroup(false, tc.getPassedTests());
int testf = getNbInstanceForGroup(false, failedTests.get(tc.getName()));
int tests = getNbInstanceForGroup(false, skippedTests.get(tc.getName()));
String envthtml = generateHTML(tc, true, suite, tc);
String testhtml = generateHTML(tc, false, suite, tc);
StringBuffer calcount = new StringBuffer();
// String calhtml = generateCALErrorHTML(tc, suite, calcount);
StringBuffer globalErrorTabs = new StringBuffer();
StringBuffer globalErrorHtmls = new StringBuffer();
generateGlobalErrorHTML(tc, suite, globalErrorTabs, globalErrorHtmls);
generateReportDetailsContainer(tc.getName(), envtp, envtf, envts, testp, testf, tests, envthtml, testhtml, calcount,
globalErrorTabs.toString(), globalErrorHtmls.toString());
}
}
m_out.println("</div>");
}
public void generateSuiteSummaryReport(List<ISuite> suites, String suiteName) {
NumberFormat formatter = new DecimalFormat("#,##0.0");
// int qty_tests = 0;
int qty_method = 0;
//int qty_pass_m = 0;
int qty_pass_s = 0;
int qty_skip = 0;
int qty_fail = 0;
long time_start = Long.MAX_VALUE;
long time_end = Long.MIN_VALUE;
List<MiniTestResult> tests2 = new ArrayList<MiniTestResult>();
for (ISuite suite : suites) {
Map<String, ISuiteResult> tests = suite.getResults();
for (ISuiteResult r : tests.values()) {
// qty_tests += 1;
ITestContext overview = r.getTestContext();
MiniTestResult mini = new MiniTestResult(overview.getName().replace(' ', '_').replace('(', '_').replace(')', '_'));
int q = getMethodSet(overview.getPassedTests()).size();
//qty_pass_m += q;
q = overview.getAllTestMethods().length;
qty_method += q;
mini.setTotalMethod(q);
q = overview.getPassedTests().size();
qty_pass_s += q;
mini.setInstancesPassed(q);
q = skippedTests.get(overview.getName()).size();//getMethodSet(skippedTests.get(overview.getName())).size();
qty_skip += q;
mini.setInstancesSkipped(q);
if(isRetryHandleNeeded.get(overview.getName()))
q = failedTests.get(overview.getName()).size() ;
else
q = failedTests.get(overview.getName()).size() + getNbInstanceForGroup(true, overview.getFailedConfigurations());
qty_fail += q;
mini.setInstancesFailed(q);
time_start = Math.min(overview.getStartDate().getTime(), time_start);
time_end = Math.max(overview.getEndDate().getTime(), time_end);
tests2.add(mini);
}
}
MiniTestResult total = new MiniTestResult("total");
total.setTotalMethod(qty_method);
total.setInstancesPassed(qty_pass_s);
total.setInstancesFailed(qty_fail);
total.setInstancesSkipped(qty_skip);
try {
// BufferedWriter out = new BufferedWriter(new FileWriter(path));
/* first, get and initialize an engine */
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
Template t = ve.getTemplate("/templates/report.part.summary.html");
VelocityContext context = new VelocityContext();
context.put("suiteName", suiteName);
context.put("totalRunTime", formatter.format((time_end - time_start) / 1000.) + " sec");
context.put("tests", tests2);
context.put("total", total);
StringWriter writer = new StringWriter();
t.merge(context, writer);
m_out.write(writer.toString());
} catch (Exception e) {
logger.error(e.getMessage());
}
}
protected void generateTheStackTrace(Throwable exception, ITestNGMethod method, String title, StringBuffer contentBuffer, String lastline) {
//contentBuffer.append("<div><table><tr bgcolor=\"yellow\"><td>Last Step Screencapture " + lastline + "<td></tr></table></div>");// Jerry
contentBuffer.append(" <div class='stContainer' >" + exception.getClass() + ":" + escape(title)
+ "(<a href='javascript:void(0);' class='exceptionlnk'>stacktrace</a>)");
contentBuffer.append("<div class='exception' style='display:none'>");
StackTraceElement[] s1 = exception.getStackTrace();
Throwable t2 = exception.getCause();
if (t2 == exception) {
t2 = null;
}
for (int x = 0; x < s1.length; x++) {
contentBuffer.append((x > 0 ? "<br/>at " : "") + escape(s1[x].toString()));
}
if (t2 != null) {
generateExceptionReport(t2, method, "Caused by " + t2.getLocalizedMessage(), contentBuffer, "");// jerry
}
contentBuffer.append("</div></div>");
}
protected Collection<ITestNGMethod> getAllMethods(ISuite suite) {
Set<ITestNGMethod> all = new LinkedHashSet<ITestNGMethod>();
Map<String, Collection<ITestNGMethod>> methods = suite.getMethodsByGroups();
for (Entry<String, Collection<ITestNGMethod>> group : methods.entrySet()) {
all.addAll(methods.get(group.getKey()));
}
return all;
}
protected int getDim(Class<?> cls) {
int dim = 0;
while (cls.isArray()) {
dim++;
cls = cls.getComponentType();
}
return dim;
}
public int getEnvConfigTestsCount(IResultMap map) {
int count = 0;
for (ITestNGMethod tm : map.getAllMethods()) {
String[] groups = tm.getGroups();
if (groups != null) {
for (int i = 0; i < groups.length; i++) {
if ("envt".equalsIgnoreCase(groups[i])) {
count++;
break;
}
}
}
}
return count;
}
protected ITestResult getFailedOrSkippedResult(ITestContext ctx, ITestNGMethod method) {
List<ITestResult> res = new LinkedList<ITestResult>();
res.addAll(failedTests.get(ctx.getName()).getResults(method));
if (res.size() != 0) {
return res.get(0);
}
res.addAll(ctx.getPassedTests().getResults(method));
if (res.size() != 0) {
return res.get(0);
}
res.addAll(skippedTests.get(ctx.getName()).getResults(method));
if (res.size() != 0) {
return res.get(0);
}
return null;
}
@SuppressWarnings("rawtypes")
protected JavaDocBuilder getJavaDocBuilder(Class clz) throws URISyntaxException {
String projectPath = new File("").getAbsolutePath();
String packagePath = clz.getPackage().getName().replaceAll("\\.", "/");
if (builder == null) {
builder = new JavaDocBuilder();
URL resource = Thread.currentThread().getContextClassLoader().getResource(packagePath);
File src = new File(resource.toURI());
builder.addSourceTree(src);
// project source folder
File realFolder = new File(projectPath + "/src/main/java/" + packagePath);
if (realFolder.exists())
builder.addSourceTree(realFolder);
}
return builder;
}
/**
*
* @param method
* @return the java doc comment , or null.
*/
protected String getJavadocComments(ITestNGMethod method) {
try {
Method m = method.getMethod();
String javaClass = m.getDeclaringClass().getName();
String javaMethod = m.getName();
JavaClass jc = getJavaDocBuilder(m.getDeclaringClass()).getClassByName(javaClass);
Class<?>[] types = method.getMethod().getParameterTypes();
Type[] qdoxTypes = new Type[types.length];
for (int i = 0; i < types.length; i++) {
// String s = types[i].getName();
String type = getType(types[i]);
int dim = getDim(types[i]);
// System.out.println(s + " - " + type + " - " + dim);
qdoxTypes[i] = new Type(type, dim);
}
JavaMethod jm = jc.getMethodBySignature(javaMethod, qdoxTypes);
return jm.getComment();
} catch (Throwable e) {
logger.error("error loading the javadoc comments for : " + method.getMethodName() + e);
return null;
}
}
/**
* @param tests
* @return
*/
protected Collection<ITestNGMethod> getMethodSet(IResultMap tests) {
Set<ITestNGMethod> r = new TreeSet<ITestNGMethod>(new TestMethodSorter<ITestNGMethod>());
r.addAll(tests.getAllMethods());
return r;
}
protected int getNbInstanceForGroup(boolean envt, IResultMap tests) {
int res = 0;
for (ITestResult result : tests.getAllResults()) {
boolean resultIsAnEnvtRes = Arrays.asList(result.getMethod().getGroups()).contains("envt");
if (resultIsAnEnvtRes) {
if (envt) {
res++;
}
} else {
if (!envt) {
res++;
}
}
}
return res;
}
public String getOutputDirectory() {
return outputDirectory;
}
public File getReportLocation() {
return report;
}
public String getResources() {
return resources;
}
/**
* @param tests
* @return
*/
protected Collection<ITestResult> getResultSet(IResultMap tests, ITestNGMethod method) {
Set<ITestResult> r = new TreeSet<ITestResult>(new TestResultSorter<ITestResult>());
for(ITestResult result : tests.getAllResults())
{
if(result.getMethod().getMethodName().equals(method.getMethodName()))
{
//r.addAll(tests.getResults(method));
r.add(result);
}
}
return r;
}
protected ITestNGMethod getTestNGMethod(ITestContext ctx, String method) {
Collection<ITestNGMethod> methods = new HashSet<ITestNGMethod>();
//jliang
//TestNG 6.3. Skip method does not start with package name. So strip off the package name before comparing.
int index = method.substring(0, method.lastIndexOf(".")).lastIndexOf(".");
String localMethod = method.substring(index+1);
ITestNGMethod[] all = ctx.getAllTestMethods();
for (int i = 0; i < all.length; i++) {
methods.add(all[i]);
}
for (ITestNGMethod m : methods) {
if (m.toString().startsWith(localMethod)) {
return m;
}
}
throw new RuntimeException("method " + method + " not found. " + "Should not happen. Suite " + ctx.getName());
}
protected String getType(Class<?> cls) {
while (cls.isArray()) {
cls = cls.getComponentType();
}
return cls.getName();
}
protected boolean hasDependencies(ITestNGMethod method) {
return ((method.getGroupsDependedUpon().length + method.getMethodsDependedUpon().length) != 0);
}
protected Map<String, ITestResult> initMethodsByGroup() {
methodsByGroup = new HashMap<String, ITestResult>();
return null;
}
public void onFinish(final ITestContext arg0) {
// runEnd = Calendar.getInstance().getTime();
//
// // Thread soaThread = new Thread(){@Override
// // public void run() {
// // if(Context.isCalCollectionEnabled()){
// // soaReporter.onFinish(arg0);
// // }
// // }};
// // soaThread.start();
//
// if (Context.isCalCollectionEnabled()) {
// logger.info("Collecting CAL Events...");
//
// Calendar counter = Calendar.getInstance();
// counter.setTime(runBegin);
// CalEventCollector cal = new CalEventCollector(Context.getRunId());
// ArrayList<CALEvent> events = new ArrayList<CALEvent>();
//
// try {
// Thread.sleep(10000);
// } catch (InterruptedException e1) {
//
// }
//
// while (counter.before(runEnd)) {
//
// events.addAll(cal.collect(Context.getPool().toLowerCase(), counter));
// counter.add(Calendar.HOUR, 1);
// }
//
// events.addAll(cal.collect(Context.getPool().toLowerCase(), counter));
//
// // Sort events into HashMap using cmd id as key
// if (!events.isEmpty()) {
// for (CALEvent e : events) {
// if (calEvent.containsKey(e.getCmd())) {
// calEvent.get(e.getCmd()).add(e);
// } else {
// ArrayList<CALEvent> a = new ArrayList<CALEvent>();
// a.add(e);
// calEvent.put(e.getCmd(), a);
// }
// }
// }
// logger.info("Completed Collecting CAL Events.");
// }
// // if(Context.isCalCollectionEnabled()){
// // try {
// // soaThread.join();
// // } catch (InterruptedException e) {
// // }
// //
// // }
if(isRetryHandleNeeded.get(arg0.getName()))
{
removeIncorrectlySkippedTests(arg0, failedTests.get(arg0.getName()));
removeFailedTestsInTestNG(arg0);
}else
{
failedTests.put(arg0.getName(), arg0.getFailedTests());
skippedTests.put(arg0.getName(), arg0.getSkippedTests());
}
}
public void onStart(ITestContext arg0) {
// runBegin = Calendar.getInstance().getTime();
// if(Context.isCalCollectionEnabled()){
// soaReporter.onStart(arg0);
// }
isRetryHandleNeeded.put(arg0.getName(), false);
failedTests.put(arg0.getName(), new ResultMap());
skippedTests.put(arg0.getName(), new ResultMap());
}
public void onTestFailedButWithinSuccessPercentage(ITestResult arg0) {
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestFailedButWithinSuccessPercentage(arg0);
// }
}
public synchronized void onTestFailure(ITestResult arg0) {
if (arg0.getMethod().getRetryAnalyzer() != null) {
TestRetryAnalyzer testRetryAnalyzer = (TestRetryAnalyzer) arg0.getMethod().getRetryAnalyzer();
if (testRetryAnalyzer.getCount() <= testRetryAnalyzer.getMaxCount()) {
arg0.setStatus(ITestResult.SKIP);
Reporter.setCurrentTestResult(null);
}
else {
IResultMap rMap = failedTests.get(arg0.getTestContext().getName());
rMap.addResult(arg0, arg0.getMethod());
failedTests.put(arg0.getTestContext().getName(), rMap);
}
System.out.println(arg0.getMethod()+" Failed in "+testRetryAnalyzer.getCount()+" times");
isRetryHandleNeeded.put(arg0.getTestContext().getName(), true);
}
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestFailure(arg0);
// }
}
public void onTestSkipped(ITestResult arg0) {
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestSkipped(arg0);
// }
}
public void onTestStart(ITestResult arg0) {
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestStart(arg0);
// }
}
public void onTestSuccess(ITestResult arg0) {
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestSuccess(arg0);
// }
/* if (arg0.getMethod().getRetryAnalyzer() != null) {
TestRetryAnalyzer testRetryAnalyzer = (TestRetryAnalyzer) arg0.getMethod().getRetryAnalyzer();
System.out.println(arg0.getMethod()+" Passed in "+testRetryAnalyzer.getCount()+" times");
isRetryHandleNeeded = true;
}*/
}
/**
* Remote failed test cases in TestNG
* @param tc
* @return
*/
private void removeFailedTestsInTestNG(ITestContext tc)
{
IResultMap returnValue = tc.getFailedTests();
ResultMap removeMap = new ResultMap();
for(ITestResult result : returnValue.getAllResults())
{
boolean isFailed = false;
for(ITestResult resultToCheck : failedTests.get(tc.getName()).getAllResults())
{
if(result.getMethod().equals(resultToCheck.getMethod()) && result.getEndMillis()==resultToCheck.getEndMillis())
{
//logger.info("Keep failed cases:"+result.getMethod().getMethodName());
isFailed = true;
break;
}
}
if(!isFailed)
{
//logger.info("Removed failed cases:"+result.getMethod().getMethodName());
System.out.println("Removed failed cases:"+result.getMethod().getMethodName());
//test.getFailedTests().getAllResults().remove(result);
removeMap.addResult(result, result.getMethod());
//test.getFailedTests().removeResult(result.getMethod());
}
}
for(ITestResult result : removeMap.getAllResults())
{
ITestResult removeResult = null;
for(ITestResult resultToCheck : returnValue.getAllResults())
{
if(result.getMethod().equals(resultToCheck.getMethod()) && result.getEndMillis()==resultToCheck.getEndMillis())
{
removeResult = resultToCheck;
break;
}
}
if(removeResult!=null) returnValue.getAllResults().remove(removeResult);
}
}
/**
* Remove retrying failed test cases from skipped test cases
* @param tc
* @param map
* @return
*/
private void removeIncorrectlySkippedTests(ITestContext tc, IResultMap map)
{
List<ITestNGMethod> failsToRemove = new ArrayList<ITestNGMethod>();
IResultMap returnValue = tc.getSkippedTests();
for(ITestResult result : returnValue.getAllResults())
{
for(ITestResult resultToCheck : map.getAllResults())
{
if(resultToCheck.getMethod().equals(result.getMethod()))
{
failsToRemove.add(resultToCheck.getMethod());
break;
}
}
for(ITestResult resultToCheck : tc.getPassedTests().getAllResults())
{
if(resultToCheck.getMethod().equals(result.getMethod()))
{
failsToRemove.add(resultToCheck.getMethod());
break;
}
}
}
for(ITestNGMethod method : failsToRemove)
{
returnValue.removeResult(method);
}
skippedTests.put(tc.getName(), tc.getSkippedTests());
}
public void setOutputDirectory(String outtimestamped) {
this.outputDirectory = outtimestamped;
}
public void setReportId(String uuid) {
this.uuid = uuid;
}
public void setResources(String resources) {
this.resources = resources;
}
/** Starts HTML stream */
protected void startHtml(ITestContext ctx, PrintWriter out) {
try {
// BufferedWriter out = new BufferedWriter(new FileWriter(path));
/* first, get and initialize an engine */
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
Template t = ve.getTemplate("/templates/report.part.header.html");
VelocityContext context = new VelocityContext();
String userName = System.getProperty("user.name");
context.put("userName", userName);
context.put("currentDate", new Date().toString());
// context.put("runId", (String)
// ContextManager.getGlobalContext().getAttribute(Context.RUN_ID));
// context.put("pool", poolInfo);
// context.put("pool", (String) ContextManager.getGlobalContext().getPool());
// context.put("apipool", (String)
// ContextManager.getGlobalContext().getAttribute(Context.API_POOL));
// context.put("buildTag", build);
String mode = ContextManager.getGlobalContext().getWebRunMode();
String hubUrl = ContextManager.getGlobalContext().getWebDriverGrid();
//context.put("gridHub", "<a href='" + hubUrl + "' target=hub>" + (null == hubUrl? null : new URL(hubUrl).getHost()) + "</a>");
context.put("gridHub", "<a href='" + hubUrl + "' target=hub>" + hubUrl + "</a>");
context.put("mode",mode);
StringBuffer sbGroups = new StringBuffer();
sbGroups.append("envt,test,cal");
List<AbstractPageListener> pageListenerList = PluginsUtil.getInstance().getPageListeners();
if (pageListenerList != null && !pageListenerList.isEmpty()) {
for (AbstractPageListener abstractPageListener : pageListenerList) {
sbGroups.append(",").append(abstractPageListener.getClass().getSimpleName());
}
}
context.put("groups", sbGroups.toString());
StringWriter writer = new StringWriter();
t.merge(context, writer);
out.write(writer.toString());
} catch (Exception e) {
logger.error(e.getMessage());
}
}
protected void takeCareOfDirectDependencies(ISuite suite, ITestNGMethod method, int indent, ITestContext ctx, String treeId, StringBuffer res) {
if (indent == 0) {
res.append("<a href=\"#\" onclick=\"expandTree('" + treeId + "'); return false;\">Expand All</a> ");
res.append("<a href=\"#\" onclick=\"collapseTree('" + treeId + "'); return false;\">Collapse All</a>");
res.append("<ul class=\"mktree\" id=\"" + treeId + "\">");
}
String[] methStr = method.getMethodsDependedUpon();
if (methStr.length != 0) {
// Set<ITestNGMethod> methSet = new LinkedHashSet<ITestNGMethod>();
for (int i = 0; i < methStr.length; i++) {
ITestNGMethod m = getTestNGMethod(ctx, methStr[i]);
String intendstr = "";
for (int j = 0; j < indent; j++) {
intendstr += "\t";
}
String img = "<img src=\"";
img += m_root + "/test" + getFailedOrSkippedResult(ctx, m).getStatus() + ".gif";
img += "\"/>";
res.append(intendstr + "<li>" + img + m);
if (hasDependencies(m)) {
res.append(intendstr + "<ul>");
takeCareOfDirectDependencies(suite, m, indent + 1, ctx, treeId, res);
res.append(intendstr + "</ul>");
}
res.append("</li>");
}
}
for (int i = 0; i < method.getGroupsDependedUpon().length; i++) {
if (methodsByGroup == null) {
// Collection<ITestNGMethod> all = suite.getInvokedMethods();
methodsByGroup = initMethodsByGroup();
// System.out.println(all);
}
String dependentGroup = method.getGroupsDependedUpon()[i];
Set<ITestNGMethod> methods = new LinkedHashSet<ITestNGMethod>();
Collection<ITestNGMethod> c = suite.getMethodsByGroups().get(dependentGroup);
if (c != null)
methods.addAll(c);
res.append("<li><u>Group " + dependentGroup + "</u>");
res.append("<ul>");
for (ITestNGMethod m : methods) {
String intendstr = "";
for (int j = 0; j < indent; j++) {
intendstr += "\t";
}
String img = "<img src=\"";
img += m_root + "/test" + getFailedOrSkippedResult(ctx, m).getStatus() + ".gif";
img += "\"/>";
res.append(intendstr + "<li>" + img + m);
if (hasDependencies(m)) {
res.append(intendstr + "<ul>");
takeCareOfDirectDependencies(suite, m, indent + 1, ctx, treeId, res);
res.append(intendstr + "</ul>");
}
res.append("</li>");
}
res.append("</ul>");
res.append("</li>");
}
if (indent == 0) {
res.append("</ul>");
}
}
} | src/main/java/com/seleniumtests/reporter/HTMLReporter.java | package com.seleniumtests.reporter;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Method;
import java.net.URISyntaxException;
import java.net.URL;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.testng.IInvokedMethod;
import org.testng.IInvokedMethodListener;
import org.testng.IReporter;
import org.testng.IResultMap;
import org.testng.ISuite;
import org.testng.ISuiteResult;
import org.testng.ITestContext;
import org.testng.ITestListener;
import org.testng.ITestNGMethod;
import org.testng.ITestResult;
import org.testng.Reporter;
import org.testng.internal.ResultMap;
import org.testng.internal.TestResult;
import org.testng.internal.Utils;
import org.testng.xml.XmlSuite;
import com.seleniumtests.controller.AbstractPageListener;
import com.seleniumtests.controller.Assertion;
import com.seleniumtests.controller.Context;
import com.seleniumtests.controller.ContextManager;
import com.seleniumtests.controller.Logging;
import com.seleniumtests.controller.TestRetryAnalyzer;
import com.seleniumtests.driver.web.ScreenShot;
import com.seleniumtests.helper.StringHelper;
import com.thoughtworks.qdox.JavaDocBuilder;
import com.thoughtworks.qdox.model.JavaClass;
import com.thoughtworks.qdox.model.JavaMethod;
import com.thoughtworks.qdox.model.Type;
@SuppressWarnings("deprecation")
public class HTMLReporter implements IReporter, ITestListener,IInvokedMethodListener {
// ~ Inner Classes --------------------------------------------------------
/** Arranges methods by classname and method name */
protected class TestMethodSorter<T extends ITestNGMethod> implements Comparator<T> {// KEEPME
// ~ Methods
// -------------------------------------------------------------
/** Arranges methods by classname and method name */
public int compare(T o1, T o2) {
int r = ((T) o1).getTestClass().getName().compareTo(o2.getTestClass().getName());
if (r == 0) {
r = ((T) o1).getMethodName().compareTo(o2.getMethodName());
}
return r;
}
}
/** Arranges methods by classname and method name */
protected class TestResultSorter<T extends ITestResult> implements Comparator<T> {// KEEPME
// ~ Methods
// -------------------------------------------------------------
/** Arranges methods by classname and method name */
public int compare(T o1, T o2) {
String sig1 = StringHelper.constructMethodSignature(o1.getMethod().getMethod(), o1.getParameters());
String sig2 = StringHelper.constructMethodSignature(o2.getMethod().getMethod(), o2.getParameters());
return sig1.compareTo(sig2);
}
}
private static Logger logger = Logging.getLogger(HTMLReporter.class);
protected static String escape(String string) {
if (null == string)
return string;
//return string.replaceAll("<", "<").replaceAll(">", ">").replaceAll("\n", "<br/>");
return string.replaceAll("\n", "<br/>");
}
public static void main(String[] args)
{
String osName = System.getProperty("os.name");
System.out.println(osName);
}
public static void writeResourceToFile(File file, String resourceName, Class<?> clasz) throws IOException {
InputStream inputStream = clasz.getResourceAsStream("/" + resourceName);
if (inputStream == null) {
logger.error("Couldn't find resource on the class path: " + resourceName);
}
else {
try {
FileOutputStream outputStream = new FileOutputStream(file);
try {
int nread;
byte[] buffer = new byte[4096];
while (0 < (nread = inputStream.read(buffer))) {
outputStream.write(buffer, 0, nread);
}
} finally {
outputStream.close();
}
} finally {
inputStream.close();
}
}
}
private Map<String, Boolean> isRetryHandleNeeded = new HashMap<String, Boolean>();
private Map<String, IResultMap> failedTests = new HashMap<String, IResultMap>();
private Map<String, IResultMap> skippedTests = new HashMap<String, IResultMap>();
// ~ Instance fields ------------------------------------------------------
private String m_root = "resources/images/mktree/";
protected PrintWriter m_out;
private String uuid = new GregorianCalendar().getTime().toString();
private int m_treeId = 0;
private String outputDirectory;
private String resources;
private JavaDocBuilder builder = null;
private File report;
// static{
// soaReporter = new SOAReporter();
// }
// ~ Methods --------------------------------------------------------------
// Remove enforced hook. TO enable SOAReport, use <listener> in testng.xml
// private static SOAReporter soaReporter = null;
// private HashMap<String, List<CALEvent>> calEvent = new HashMap<String,
// List<CALEvent>>();
Map<String, ITestResult> methodsByGroup = null;
private void addAllTestResults(Set<ITestResult> testResults, IResultMap resultMap) {
if (resultMap != null) {
testResults.addAll(resultMap.getAllResults());
}
}
public void afterInvocation(IInvokedMethod method, ITestResult result) {
Reporter.setCurrentTestResult(result);
ScreenShot screenShot = ContextManager.getThreadContext().getExceptionScreenShot();
//Handle Last Exception only for failed test cases
if(!result.isSuccess() && ContextManager.getThreadContext() != null && screenShot!=null)
{
Logging.log("<div><table><tr bgcolor=\"yellow\"><td><b>-- Screenshot of current web page with webdriver exception --</b><td></tr></table></div>");
Logging.logWebOutput(screenShot.getTitle(), Logging.buildScreenshotLog(screenShot), true);
}
//Handle Soft Assertion
if (method.isTestMethod()) {
List<Throwable> verificationFailures = Assertion.getVerificationFailures();
int size = verificationFailures.size();
//Assertion.fail("Test case faield with "+Assertion.getVerificationFailures().size()+" errors!");
if(size==0)return;
else
if(result.getStatus()==TestResult.FAILURE) return;
result.setStatus(TestResult.FAILURE);
if (size == 1) {
result.setThrowable(verificationFailures.get(0));
} else {
//create a failure message with all failures and stack traces (except last failure)
StringBuffer failureMessage = new StringBuffer("Multiple failures (").append(size).append("):nn");
for (int i = 0; i < size-1; i++) {
failureMessage.append("Failure ").append(i+1).append(" of ").append(size).append(":n");
Throwable t = verificationFailures.get(i);
String fullStackTrace = Utils.stackTrace(t, false)[1];
failureMessage.append(fullStackTrace).append("nn");
}
//final failure
Throwable last = verificationFailures.get(size-1);
failureMessage.append("Failure ").append(size).append(" of ").append(size).append(":n");
failureMessage.append(last.toString());
//set merged throwable
Throwable merged = new Throwable(failureMessage.toString());
merged.setStackTrace(last.getStackTrace());
result.setThrowable(merged);
}
}
}
public void beforeInvocation(IInvokedMethod arg0, ITestResult arg1) {}
protected void copyResources() throws Exception {
new File(outputDirectory + File.separator + "resources").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "css").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "images").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "images" + File.separator + "lightbox").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "images" + File.separator + "mktree").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "images" + File.separator + "yukontoolbox").mkdir();
new File(outputDirectory + File.separator + "resources" + File.separator + "js").mkdir();
List<String> resources = new ArrayList<String>();
resources.add("reporter" + File.separator + "css" + File.separator + "report.css");
resources.add("reporter" + File.separator + "css" + File.separator + "jquery.lightbox-0.5.css");
resources.add("reporter" + File.separator + "css" + File.separator + "mktree.css");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "lightbox-blank.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "lightbox-btn-close.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "lightbox-btn-next.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "lightbox-btn-prev.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "lightbox" + File.separator + "lightbox-ico-loading.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "bullet.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "minus.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "plus.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "test1.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "test2.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "test3.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "mktree" + File.separator + "test3.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_footer_grad.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_mid.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_grey_bl.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_grey_br.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_hovertab_l.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_hovertab_r.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_tabbed_nav_goldgradbg.png");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_table_sep_left.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_table_sep_right.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_table_zebrastripe_left.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_table_zebrastripe_right.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_yellow_tl.gif");
resources.add("reporter" + File.separator + "images" + File.separator + "yukontoolbox" + File.separator + "seleniumtests_yellow_tr.gif");
resources.add("reporter" + File.separator + "js" + File.separator + "jquery-1.3.min.js");
resources.add("reporter" + File.separator + "js" + File.separator + "jquery.lightbox-0.5.min.js");
resources.add("reporter" + File.separator + "js" + File.separator + "mktree.js");
resources.add("reporter" + File.separator + "js" + File.separator + "report.js");
resources.add("reporter" + File.separator + "js" + File.separator + "browserdetect.js");
for (String resourceName : resources) {
File f = new File(outputDirectory, resourceName.replace("reporter", "resources"));
resourceName = resourceName.replaceAll("\\\\", "/");
logger.debug("about to write resource " + resourceName + " to the file " + f.getAbsolutePath());
writeResourceToFile(f, resourceName, HTMLReporter.class);
}
}
protected PrintWriter createWriter(String outdir) throws IOException {
System.setProperty("file.encoding", "UTF8");
uuid = uuid.replaceAll(" ", "-").replaceAll(":", "-");
File f = new File(outdir, "SeleniumFrameworkTestReport-" + uuid + ".html");
// ConfigLoader.getInstance().setProperty("report",
// f.getAbsolutePath());
logger.info("generating report " + f.getAbsolutePath());
report = f;
//return new PrintWriter(new BufferedWriter(new FileWriter(f)));
// handle garbled code issue
OutputStream out = new FileOutputStream(f);
Writer writer = new BufferedWriter(new OutputStreamWriter(out,"utf-8"));
return new PrintWriter(writer);
}
/** Finishes HTML stream */
protected void endHtml(PrintWriter out) {
out.println("</body></html>");
}
public void executeCmd(String browserPath,String theUrl) {
String cmdLine = null;
String osName = System.getProperty("os.name");
if (osName.startsWith("Windows")) {
cmdLine = "start " + theUrl;
// on NT, you need to start cmd.exe because start is not
// an external command but internal, you need to start the
// command interpreter
// cmdLine = "cmd.exe /c " + cmdLine;
cmdLine = "rundll32 SHELL32.DLL,ShellExec_RunDLL " + browserPath + " " + theUrl;
} else if(osName.startsWith("Mac"))
{
cmdLine = "open " + theUrl;
}
else {
// Linux
cmdLine = "open " + browserPath + " " + theUrl;
}
try {
Runtime.getRuntime().exec(cmdLine);
} catch (Exception e) {
logger.info(e);
}
}
protected void generateCalErrorReport(String exception, StringBuffer contentBuffer) {
contentBuffer.append(" <div class='stContainer' ><a href='javascript:void(0);' class='exceptionlnk'>Detail</a>");
contentBuffer.append("<div class='exception' style='display:none'>");
contentBuffer.append(exception);
contentBuffer.append("</div></div>");
}
/*
* protected String generateCALErrorHTML(ITestContext tc, ISuite suite,
* StringBuffer calcount) {
*
* StringBuffer res = new StringBuffer(); try { VelocityEngine ve = new
* VelocityEngine(); ve.setProperty("resource.loader", "class");
* ve.setProperty("class.resource.loader.class",
* "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
* ve.init(); generateCALErrorPanel(ve, res, "failed", suite, tc, calcount);
* } catch (Exception e) { logger.error(e.getMessage()); }
*
* return res.toString(); }
*/
/*
* private void generateCALErrorPanel(VelocityEngine ve, StringBuffer res,
* String style, ISuite suite, ITestContext ctx, StringBuffer sbCalcount) {
*
* Set<ITestResult> testResults = new HashSet<ITestResult>();
*
* addAllTestResults(testResults, ctx.getPassedTests());
* addAllTestResults(testResults, ctx.getFailedTests());
* addAllTestResults(testResults, ctx.getSkippedTests());
* addAllTestResults(testResults, ctx.getPassedConfigurations());
* addAllTestResults(testResults, ctx.getSkippedConfigurations());
* addAllTestResults(testResults, ctx.getFailedConfigurations());
* addAllTestResults(testResults,
* ctx.getFailedButWithinSuccessPercentageTests());
*
* int cal = 0;
*
* if (calEvent.isEmpty()) { res.append(
* "<div class='method passed'><div class='yuk_goldgrad_tl'><div class='yuk_goldgrad_tr'>"
* + "<div class='yuk_goldgrad_m'></div></div></div>" +
* "<h3 class='yuk_grad_ltitle_passed'>No CAL Errors found.</h3>" +
* "<div class='yuk_pnl_footerbar'></div>" +
* "<div class='yuk_grey_bm_footer'><div class='yuk_grey_br'>" +
* "<div class='yuk_grey_bl'></div></div></div></div>"); } else { try {
* StringBuffer contentBuffer = null;
*
* Iterator<Entry<String, List<CALEvent>>> i =
* calEvent.entrySet().iterator(); while (i.hasNext()) {
*
* Entry<String, List<CALEvent>> e = (Entry<String, List<CALEvent>>)
* i.next(); if (e.getKey() == null) continue;
*
* contentBuffer = new StringBuffer(); contentBuffer.append(
* "<div class='leftContent' style='float: left; width: 100%;'>");
*
* String cmd = (String) e.getKey(); List<CALEvent> events =
* (List<CALEvent>) e.getValue(); contentBuffer .append(
* "<table cellspacing=0 style='border-style:solid;border-width:1px'><thead><tr><th style='border-spacing:0px;border-style:solid;border-width:1px' >Event Type</th><th style='border-spacing:0px;border-style:solid;border-width:1px'>Event Name</th><th style='border-spacing:0px;border-style:solid;border-width:1px'>Test Case</th></tr></thead><tbody>"
* );
*
* Set<String> eventNameSet = new TreeSet<String>(); for (CALEvent event :
* events) { if (eventNameSet.contains(event.getName())) continue;
*
* eventNameSet.add(event.getName());
*
* contentBuffer.append(
* "<tr><td style='border-spacing:0px;border-style:none;border-width:1px' valign='top'><a href='"
* + event.getCALLinkURL() + "' target=cal>");
* contentBuffer.append(event.getType()); contentBuffer.append("</a>");
* contentBuffer.append(
* "</td><td style='border-spacing:0px;border-style:none;border-width:1px' valign='top'>"
* ); contentBuffer.append(event.getName());
* generateCalErrorReport(event.getPayload().replaceAll("(\\\\r|\\\\n)+",
* "\n").replaceAll("(\\\\t)+", " "), contentBuffer);
*
* contentBuffer.append(
* "</td><td style='border-spacing:0px;border-style:solid;border-width:1px' valign='top'>"
* );
*
* if (event.getTestCaseId() != null &&
* !"null".equalsIgnoreCase(event.getTestCaseId())) {
* contentBuffer.append(Context
* .getSignatureFromTestCaseId(event.getTestCaseId()));
* contentBuffer.append(" - "); contentBuffer.append(event.getTestCaseId());
* contentBuffer.append("</td></tr>"); } else
* contentBuffer.append(" </td></tr>");
*
* cal++; } contentBuffer.append("</tbody></table></div>"); // end of //
* leftContent contentBuffer.append("<div class='clear_both'></div>");
*
* Template t = ve.getTemplate("/templates/report.part.singleTest.html");
* VelocityContext context = new VelocityContext(); context.put("status",
* style); context.put("desc", ""); context.put("time", "");
* context.put("methodName", cmd); context.put("content",
* contentBuffer.toString()); StringWriter writer = new StringWriter();
* t.merge(context, writer); res.append(writer.toString()); }
*
* List<CALEvent> evt = calEvent.get(null); if (evt != null && evt.size() >
* 0) { contentBuffer = new StringBuffer(); contentBuffer.append(
* "<div class='leftContent' style='float: left; width: 100%;'>");
* contentBuffer .append(
* "<table cellspacing=0 style='border-style:solid;border-width:1px'><thead><tr><th style='border-spacing:0px;border-style:solid;border-width:1px' >Event Type</th><th style='border-spacing:0px;border-style:solid;border-width:1px'>Event Name</th><th style='border-spacing:0px;border-style:solid;border-width:1px'>Test Case</th></tr></thead><tbody>"
* );
*
* Set<String> eventNameSet = new TreeSet<String>(); for (CALEvent event :
* evt) { if (eventNameSet.contains(event.getName())) continue;
*
* eventNameSet.add(event.getName());
*
* contentBuffer.append(
* "<tr><td style='border-spacing:0px;border-style:solid;border-width:1px' valign='top'><a href='"
* + event.getCALLinkURL() + "' target=cal>");
* contentBuffer.append(event.getType()); contentBuffer.append("</a>");
* contentBuffer.append(
* "</td><td style='border-spacing:0px;border-style:solid;border-width:1px' valign='top'>"
* ); contentBuffer.append(event.getName());
* generateCalErrorReport(event.getPayload().replaceAll("(\\\\r|\\\\n)+",
* "\n").replaceAll("(\\\\t)+", " "), contentBuffer);
*
* contentBuffer.append(
* "</td><td style='border-spacing:0px;border-style:solid;border-width:1px' valign='top'>"
* );
*
* if (event.getTestCaseId() != null &&
* !"null".equalsIgnoreCase(event.getTestCaseId())) {
* contentBuffer.append(Context
* .getSignatureFromTestCaseId(event.getTestCaseId()));
* contentBuffer.append(" - "); contentBuffer.append(event.getTestCaseId());
* contentBuffer.append("</td></tr>"); } else
* contentBuffer.append(" </td></tr>");
*
* cal++; } contentBuffer.append("</tbody></table></div>"); // end of //
* leftContent contentBuffer.append("<div class='clear_both'></div>");
*
* Template t = ve.getTemplate("/templates/report.part.singleTest.html");
* VelocityContext context = new VelocityContext(); context.put("status",
* style); context.put("desc", ""); context.put("time", "");
* context.put("methodName", ""); context.put("content",
* contentBuffer.toString()); StringWriter writer = new StringWriter();
* t.merge(context, writer); res.append(writer.toString()); }
*
* } catch (Exception e) { e.printStackTrace();
* logger.error("error creating a cal log report for null test case id." +
* e.getMessage()); } } sbCalcount.append(cal); }
*/
protected void generateExceptionReport(Throwable exception, ITestNGMethod method, String title, StringBuffer contentBuffer, String lastline) {// Jerry
// add
// lastline
generateTheStackTrace(exception, method, title, contentBuffer, lastline);
}
protected void generateExceptionReport(Throwable exception, ITestNGMethod method, StringBuffer contentBuffer, String lastline) {
Throwable fortile = exception;
/*
* if (exception instanceof VerificationException) { fortile =
* ((VerificationException)exception).getMaster(); }
*/
String title = fortile.getMessage();
if (title == null) {
try {
title = fortile.getCause().getMessage();
} catch (Throwable e) {
title = e.getMessage();
}
}
generateExceptionReport(exception, method, title, contentBuffer, lastline);
}
protected void generateGlobalErrorHTML(ITestContext tc, ISuite suite, StringBuffer errorCountTabs, StringBuffer errorCountHtmls) {
try {
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
List<AbstractPageListener> pageListenersList = PluginsUtil.getInstance().getPageListeners();
for (AbstractPageListener abstractPageListener : pageListenersList) {
/* ========== to skip creating a tab according to "testResultEffected" property of an instance of com.seleniumtests.controller.AbstractPageListener, added by ziwu, 09/16/2011 ========== */
if (!abstractPageListener.isTestResultEffected()) continue;
/* ========== to skip creating a tab according to "testResultEffected" property of an instance of com.seleniumtests.controller.AbstractPageListener, added by ziwu, 09/16/2011 ========== */
errorCountTabs.append("<li class='tab' id='" + abstractPageListener.getClass().getSimpleName() + "'><a href='#'><span>")
.append(abstractPageListener.getTitle() != null ? abstractPageListener.getTitle() : abstractPageListener.getClass().getSimpleName())
.append(" ( <font color='red'>");
errorCountHtmls.append("<div class='" + abstractPageListener.getClass().getSimpleName() + "' style='width: 98%;margin-left:15px;'>");
generateGlobalErrorsPanel(abstractPageListener, ve, errorCountHtmls, "failed", tc, errorCountTabs);
errorCountHtmls.append("</div>");
errorCountTabs.append("</font> )</span></a></li>");
}
} catch (Exception e) {
logger.error(e.getMessage());
}
}
private void generateGlobalErrorsPanel(AbstractPageListener abstractPageListener, VelocityEngine ve, StringBuffer res, String style, ITestContext tc,
StringBuffer sbCalcount) {
int pageCount = 0;
Set<ITestResult> testResults = new HashSet<ITestResult>();
addAllTestResults(testResults, tc.getPassedTests());
addAllTestResults(testResults, failedTests.get(tc.getName()));
addAllTestResults(testResults, tc.getFailedButWithinSuccessPercentageTests());
Map<String, Map<String, List<String>>> pageListenerLogMap = Logging.getPageListenerLog(abstractPageListener.getClass().getCanonicalName());
if (pageListenerLogMap == null || pageListenerLogMap.isEmpty()) {
res.append("<div class='method passed'><div class='yuk_goldgrad_tl'><div class='yuk_goldgrad_tr'>"
+ "<div class='yuk_goldgrad_m'></div></div></div>" + "<h3 class='yuk_grad_ltitle_passed'>No Errors found.</h3>"
+ "<div class='yuk_pnl_footerbar'></div>" + "<div class='yuk_grey_bm_footer'><div class='yuk_grey_br'>"
+ "<div class='yuk_grey_bl'></div></div></div></div>");
} else {
for (Entry<String, Map<String, List<String>>> pageEntry : pageListenerLogMap.entrySet()) {
StringBuffer contentBuffer = new StringBuffer();
contentBuffer.append("<table class='ex' width='90%'><thead><tr><th>TestMethod</th><th>Errors</th></thead><tbody>");
Map<String, List<String>> errorMap = pageEntry.getValue();
boolean found = false;
for (ITestResult testResult : testResults) {
Method method = testResult.getMethod().getMethod();
String methodInstance = StringHelper.constructMethodSignature(method, testResult.getParameters());
if (errorMap.containsKey(methodInstance)) {
found = true;
contentBuffer.append("<tr><td>" + methodInstance + "</td><td>");
for (String message : errorMap.get(methodInstance)) {
contentBuffer.append(message);
contentBuffer.append("<br>");
}
contentBuffer.append("</td><tr>");
}
}
if (found) {
contentBuffer.append("</tbody></table>");
try {
Template t = ve.getTemplate("/templates/report.part.singlePageError.html");
VelocityContext context = new VelocityContext();
context.put("status", style);
context.put("pageName", pageEntry.getKey());
context.put("content", contentBuffer.toString());
StringWriter writer = new StringWriter();
t.merge(context, writer);
res.append(writer.toString());
} catch (Exception e) {
logger.error("error creating a singlePageError." + e.getMessage());
}
pageCount++;
}
}
}
sbCalcount.append(pageCount);
}
public void generateGroupsArea(Collection<ITestNGMethod> methods) {
Set<String> allGroups = new HashSet<String>();
for (ITestNGMethod method : methods) {
for (int j = 0; j < method.getGroups().length; j++) {
allGroups.add(method.getGroups()[j]);
}
}
m_out.print("Tags :<br/>");
for (String group : allGroups) {
m_out.print("<input type=\"checkbox\" value=\"" + group.replace(' ', '_').replace('(', '_').replace(')', '_') + "\" checked='checked'> " + group
+ " ");
}
}
protected String generateHTML(ITestContext tc, boolean envt, ISuite suite, ITestContext ctx) {
StringBuffer res = new StringBuffer();
try {
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
if (envt) {
if (tc.getFailedConfigurations().getAllResults().size() > 0)
generatePanel(ve, tc.getFailedConfigurations(), res, "failed", suite, ctx, envt);
generatePanel(ve, failedTests.get(tc.getName()), res, "failed", suite, ctx, envt);
if (tc.getFailedConfigurations().getAllResults().size() > 0)
generatePanel(ve, tc.getSkippedConfigurations(), res, "skipped", suite, ctx, envt);
generatePanel(ve, skippedTests.get(tc.getName()), res, "skipped", suite, ctx, envt);
generatePanel(ve, tc.getPassedTests(), res, "passed", suite, ctx, envt);
// generatePanel(ve, tc.getPassedConfigurations(), res,
// "passed", suite, ctx, envt);
} else {
generatePanel(ve, failedTests.get(tc.getName()), res, "failed", suite, ctx, envt);
generatePanel(ve, skippedTests.get(tc.getName()), res, "skipped", suite, ctx, envt);
generatePanel(ve, tc.getPassedTests(), res, "passed", suite, ctx, envt);
}
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
}
return res.toString();
}
protected void generatePanel(VelocityEngine ve, IResultMap map, StringBuffer res, String style, ISuite suite, ITestContext ctx, boolean envt) {
Collection<ITestNGMethod> methodSet = getMethodSet(map);
for (ITestNGMethod method : methodSet) {
boolean methodIsValid = true;
if (envt) {
methodIsValid = Arrays.asList(method.getGroups()).contains("envt");
} else {
methodIsValid = !Arrays.asList(method.getGroups()).contains("envt");
}
if (methodIsValid) {
Collection<ITestResult> resultSet = getResultSet(map, method);
//System.out.println(method.getMethodName()+":"+resultSet.size());
String content = ctx.getName().replace(' ', '_').replace('(', '_').replace(')', '_');
for (ITestResult ans : resultSet) {
StringBuffer contentBuffer = new StringBuffer();
String testName="";
if(ans.getMethod().getXmlTest()!=null)
testName = ans.getMethod().getXmlTest().getName();
else
{
try{
testName = ans.getTestContext().getCurrentXmlTest().getName();
}catch(Exception ex)
{
ex.printStackTrace();
continue;
}catch(Error e)
{
e.printStackTrace();
continue;
}
}
Context testLevelContext = ContextManager.getTestLevelContext(testName);
if (testLevelContext != null ) {
/*String pool = testLevelContext.getPool();*/
String site = testLevelContext.getSite();
String appURL = testLevelContext.getAppURL();
String browser = (String)testLevelContext.getAttribute("browser");
if (browser != null)
browser = browser.replace("*", "");
String browserVersion = (String)testLevelContext.getAttribute("browserVersion");
if (browserVersion != null)
browser = browser + browserVersion;
// contentBuffer.append("<div><i>Pool: "+ pool + ", Site: " + site + ", Browser: " + browser + "</i></div>");
contentBuffer.append("<div><i>App URL: "+appURL+ ", Browser: " + browser + "</i></div>");
}
Object[] parameters = ans.getParameters();
List<String> msgs = Reporter.getOutput(ans);
boolean hasReporterOutput = msgs.size() > 0;
Throwable exception = ans.getThrowable();
boolean hasThrowable = exception != null;
if (hasReporterOutput || hasThrowable) {
contentBuffer.append("<div class='leftContent' style='float: left; width: 100%;'>");
contentBuffer.append("<h4><a href='javascript:void(0);' class='testloglnk'>Test Steps " + (style.equals("passed") ? "[+]" : "[ - ]")
+ "</a></h4>");
contentBuffer.append("<div class='testlog' " + (style.equals("passed") ? "style='display:none'" : "") + ">");
contentBuffer.append("<ol>");
for (String line : msgs) {
DetailedLog logLine = new DetailedLog(line, outputDirectory);
String htmllog;
if (logLine.getHref() != null) {
htmllog = "<a href='" + logLine.getHref() + "' title='" + logLine.getLocation() + "' >" + logLine.getMsg() + "</a>";
} else {
htmllog = logLine.getMsg();
}
htmllog = htmllog.replaceAll("@@lt@@", "<").replace("^^gt^^", ">");//fix for testng 6.7
contentBuffer.append(htmllog);
if(!htmllog.contains("<br>"))contentBuffer.append("<br/>");//handle different in testng6.7
}
contentBuffer.append("</ol>");
// Jerry
String lastLine = "";
for (int lastIdx = msgs.size() - 1; lastIdx >= 0; lastIdx--) {
lastLine = msgs.get(lastIdx).replaceAll("@@lt@@", "<").replace("^^gt^^", ">"); //fix for testng 6.7
if (lastLine.indexOf(">screenshot</a>") != -1) {
break;
}
}
if (hasThrowable) {
generateExceptionReport(exception, method, contentBuffer, lastLine);
}
contentBuffer.append("</div></div>"); // end of
}
//int rq = 0;
/* freynaud */
String treeId = "tree" + m_treeId;
m_treeId++;
if (ans.getStatus() == 3) {
contentBuffer.append("<br>This method has been skipped, because of its dependencies :<br>");
takeCareOfDirectDependencies(suite, method, 0, ctx, treeId, contentBuffer);
}
//rq += 1;
contentBuffer.append("<div class='clear_both'></div>");
content = contentBuffer.toString();
try {
Template t = ve.getTemplate("/templates/report.part.singleTest.html");
VelocityContext context = new VelocityContext();
context.put("status", style);
String javadoc = getJavadocComments(method);
String desc = method.getDescription();
String toDisplay = "no javadoc nor description for this method.";
if (!"".equals(javadoc) && javadoc != null) {
toDisplay = javadoc;
} else if (!"".equals(desc) && desc != null) {
toDisplay = desc;
}
String methodSignature = StringHelper.constructMethodSignature(method.getMethod(), parameters);
if(methodSignature.length()>500)
context.put("methodName", methodSignature.substring(0, 500)+"...");
else
context.put("methodName", methodSignature);
context.put("desc", toDisplay.replaceAll("\r\n\r\n", "\r\n").replaceAll("\n\n", "\n"));
context.put("content", content);
context.put("time", "Time: " + ((ans.getEndMillis() - ans.getStartMillis()) / 1000) + "sec.");
StringWriter writer = new StringWriter();
t.merge(context, writer);
res.append(writer.toString());
} catch (Exception e) {
logger.error("error creating a singleTest." + e.getMessage());
e.printStackTrace();
}
}
}
}
}
public void generateReport(List<XmlSuite> xml, List<ISuite> suites, String outdir) {
ITestContext testCtx = ContextManager.getGlobalContext().getTestNGContext();
if(testCtx == null) {
logger.error("Please check if your class extends from TestPlan!");
return;
}
File f = new File(ContextManager.getGlobalContext().getOutputDirectory());
setOutputDirectory(f.getParentFile().getAbsolutePath());
setResources(getOutputDirectory() + "\\resources");
try {
m_out = createWriter(getOutputDirectory());
startHtml(testCtx, m_out);
generateSuiteSummaryReport(suites, xml.get(0).getName());
generateReportsSection(suites);
endHtml(m_out);
m_out.flush();
m_out.close();
copyResources();
logger.info("report generated.");
//String browserPath = (String) testCtx.getSuite().getParameter(Context.OPEN_REPORT_IN_BROWSER);
String browserPath = (String)ContextManager.getGlobalContext().getAttribute(Context.OPEN_REPORT_IN_BROWSER);
if (browserPath != null && browserPath.trim().length() > 0) {
executeCmd(browserPath , getReportLocation().getAbsolutePath());
}
} catch (Exception e) {
logger.error("output file", e);
return;
}
}
protected void generateReportDetailsContainer(String name, int envtp, int envtf, int envts, int testp, int testf, int tests, String envthtml,
String testhtml) {
try {
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
Template t = ve.getTemplate("/templates/report.part.testDetail.html");
VelocityContext context = new VelocityContext();
context.put("testId", name.toLowerCase().replace(' ', '_').replace('(', '_').replace(')', '_'));
context.put("testName", name);
context.put("envtp", envtp);
context.put("envtf", envtf);
context.put("envts", envts);
context.put("testp", testp);
context.put("testf", testf);
context.put("tests", tests);
context.put("envthtml", envthtml);
context.put("testhtml", testhtml);
StringWriter writer = new StringWriter();
t.merge(context, writer);
m_out.write(writer.toString());
} catch (Exception e) {
logger.error(e.getMessage());
}
}
protected void generateReportDetailsContainer(String name, int envtp, int envtf, int envts, int testp, int testf, int tests, String envthtml,
String testhtml, StringBuffer calCount, String globalErrorTabs, String globalErrorHtmls) {
try {
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
Template t = ve.getTemplate("/templates/report.part.testDetail.html");
VelocityContext context = new VelocityContext();
context.put("testId", name.toLowerCase().replace(' ', '_').replace('(', '_').replace(')', '_'));
context.put("testName", name);
context.put("envtp", envtp);
context.put("envtf", envtf);
context.put("envts", envts);
context.put("testp", testp);
context.put("testf", testf);
context.put("tests", tests);
context.put("envthtml", envthtml);
context.put("testhtml", testhtml);
context.put("calcount", calCount.toString());
context.put("globalerrortabs", globalErrorTabs);
context.put("globalerrorhtmls", globalErrorHtmls);
StringWriter writer = new StringWriter();
t.merge(context, writer);
m_out.write(writer.toString());
} catch (Exception e) {
logger.error(e.getMessage());
}
}
protected void generateReportsSection(List<ISuite> suites) {
m_out.println("<div id='reports'>");
for (ISuite suite : suites) {
Map<String, ISuiteResult> r = suite.getResults();
for (ISuiteResult r2 : r.values()) {
ITestContext tc = r2.getTestContext();
int envtp = getNbInstanceForGroup(true, tc.getPassedTests());
int envtf = getNbInstanceForGroup(true, failedTests.get(tc.getName()));
int envts = getNbInstanceForGroup(true, skippedTests.get(tc.getName()));
// envtp += getNbInstanceForGroup(true,
// tc.getPassedConfigurations());
envtf += getNbInstanceForGroup(true, tc.getFailedConfigurations());
envts += getNbInstanceForGroup(true, tc.getSkippedConfigurations());
int testp = getNbInstanceForGroup(false, tc.getPassedTests());
int testf = getNbInstanceForGroup(false, failedTests.get(tc.getName()));
int tests = getNbInstanceForGroup(false, skippedTests.get(tc.getName()));
String envthtml = generateHTML(tc, true, suite, tc);
String testhtml = generateHTML(tc, false, suite, tc);
StringBuffer calcount = new StringBuffer();
// String calhtml = generateCALErrorHTML(tc, suite, calcount);
StringBuffer globalErrorTabs = new StringBuffer();
StringBuffer globalErrorHtmls = new StringBuffer();
generateGlobalErrorHTML(tc, suite, globalErrorTabs, globalErrorHtmls);
generateReportDetailsContainer(tc.getName(), envtp, envtf, envts, testp, testf, tests, envthtml, testhtml, calcount,
globalErrorTabs.toString(), globalErrorHtmls.toString());
}
}
m_out.println("</div>");
}
public void generateSuiteSummaryReport(List<ISuite> suites, String suiteName) {
NumberFormat formatter = new DecimalFormat("#,##0.0");
// int qty_tests = 0;
int qty_method = 0;
//int qty_pass_m = 0;
int qty_pass_s = 0;
int qty_skip = 0;
int qty_fail = 0;
long time_start = Long.MAX_VALUE;
long time_end = Long.MIN_VALUE;
List<MiniTestResult> tests2 = new ArrayList<MiniTestResult>();
for (ISuite suite : suites) {
Map<String, ISuiteResult> tests = suite.getResults();
for (ISuiteResult r : tests.values()) {
// qty_tests += 1;
ITestContext overview = r.getTestContext();
MiniTestResult mini = new MiniTestResult(overview.getName().replace(' ', '_').replace('(', '_').replace(')', '_'));
int q = getMethodSet(overview.getPassedTests()).size();
//qty_pass_m += q;
q = overview.getAllTestMethods().length;
qty_method += q;
mini.setTotalMethod(q);
q = overview.getPassedTests().size();
qty_pass_s += q;
mini.setInstancesPassed(q);
q = skippedTests.get(overview.getName()).size();//getMethodSet(skippedTests.get(overview.getName())).size();
qty_skip += q;
mini.setInstancesSkipped(q);
if(isRetryHandleNeeded.get(overview.getName()))
q = failedTests.get(overview.getName()).size() ;
else
q = failedTests.get(overview.getName()).size() + getNbInstanceForGroup(true, overview.getFailedConfigurations());
qty_fail += q;
mini.setInstancesFailed(q);
time_start = Math.min(overview.getStartDate().getTime(), time_start);
time_end = Math.max(overview.getEndDate().getTime(), time_end);
tests2.add(mini);
}
}
MiniTestResult total = new MiniTestResult("total");
total.setTotalMethod(qty_method);
total.setInstancesPassed(qty_pass_s);
total.setInstancesFailed(qty_fail);
total.setInstancesSkipped(qty_skip);
try {
// BufferedWriter out = new BufferedWriter(new FileWriter(path));
/* first, get and initialize an engine */
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
Template t = ve.getTemplate("/templates/report.part.summary.html");
VelocityContext context = new VelocityContext();
context.put("suiteName", suiteName);
context.put("totalRunTime", formatter.format((time_end - time_start) / 1000.) + " sec");
context.put("tests", tests2);
context.put("total", total);
StringWriter writer = new StringWriter();
t.merge(context, writer);
m_out.write(writer.toString());
} catch (Exception e) {
logger.error(e.getMessage());
}
}
protected void generateTheStackTrace(Throwable exception, ITestNGMethod method, String title, StringBuffer contentBuffer, String lastline) {
//contentBuffer.append("<div><table><tr bgcolor=\"yellow\"><td>Last Step Screencapture " + lastline + "<td></tr></table></div>");// Jerry
contentBuffer.append(" <div class='stContainer' >" + exception.getClass() + ":" + escape(title)
+ "(<a href='javascript:void(0);' class='exceptionlnk'>stacktrace</a>)");
contentBuffer.append("<div class='exception' style='display:none'>");
StackTraceElement[] s1 = exception.getStackTrace();
Throwable t2 = exception.getCause();
if (t2 == exception) {
t2 = null;
}
for (int x = 0; x < s1.length; x++) {
contentBuffer.append((x > 0 ? "<br/>at " : "") + escape(s1[x].toString()));
}
if (t2 != null) {
generateExceptionReport(t2, method, "Caused by " + t2.getLocalizedMessage(), contentBuffer, "");// jerry
}
contentBuffer.append("</div></div>");
}
protected Collection<ITestNGMethod> getAllMethods(ISuite suite) {
Set<ITestNGMethod> all = new LinkedHashSet<ITestNGMethod>();
Map<String, Collection<ITestNGMethod>> methods = suite.getMethodsByGroups();
for (Entry<String, Collection<ITestNGMethod>> group : methods.entrySet()) {
all.addAll(methods.get(group.getKey()));
}
return all;
}
protected int getDim(Class<?> cls) {
int dim = 0;
while (cls.isArray()) {
dim++;
cls = cls.getComponentType();
}
return dim;
}
public int getEnvConfigTestsCount(IResultMap map) {
int count = 0;
for (ITestNGMethod tm : map.getAllMethods()) {
String[] groups = tm.getGroups();
if (groups != null) {
for (int i = 0; i < groups.length; i++) {
if ("envt".equalsIgnoreCase(groups[i])) {
count++;
break;
}
}
}
}
return count;
}
protected ITestResult getFailedOrSkippedResult(ITestContext ctx, ITestNGMethod method) {
List<ITestResult> res = new LinkedList<ITestResult>();
res.addAll(failedTests.get(ctx.getName()).getResults(method));
if (res.size() != 0) {
return res.get(0);
}
res.addAll(ctx.getPassedTests().getResults(method));
if (res.size() != 0) {
return res.get(0);
}
res.addAll(skippedTests.get(ctx.getName()).getResults(method));
if (res.size() != 0) {
return res.get(0);
}
return null;
}
@SuppressWarnings("rawtypes")
protected JavaDocBuilder getJavaDocBuilder(Class clz) throws URISyntaxException {
String projectPath = new File("").getAbsolutePath();
String packagePath = clz.getPackage().getName().replaceAll("\\.", "/");
if (builder == null) {
builder = new JavaDocBuilder();
URL resource = Thread.currentThread().getContextClassLoader().getResource(packagePath);
File src = new File(resource.toURI());
builder.addSourceTree(src);
// project source folder
File realFolder = new File(projectPath + "/src/main/java/" + packagePath);
if (realFolder.exists())
builder.addSourceTree(realFolder);
}
return builder;
}
/**
*
* @param method
* @return the java doc comment , or null.
*/
protected String getJavadocComments(ITestNGMethod method) {
try {
Method m = method.getMethod();
String javaClass = m.getDeclaringClass().getName();
String javaMethod = m.getName();
JavaClass jc = getJavaDocBuilder(m.getDeclaringClass()).getClassByName(javaClass);
Class<?>[] types = method.getMethod().getParameterTypes();
Type[] qdoxTypes = new Type[types.length];
for (int i = 0; i < types.length; i++) {
// String s = types[i].getName();
String type = getType(types[i]);
int dim = getDim(types[i]);
// System.out.println(s + " - " + type + " - " + dim);
qdoxTypes[i] = new Type(type, dim);
}
JavaMethod jm = jc.getMethodBySignature(javaMethod, qdoxTypes);
return jm.getComment();
} catch (Throwable e) {
logger.error("error loading the javadoc comments for : " + method.getMethodName() + e);
return null;
}
}
/**
* @param tests
* @return
*/
protected Collection<ITestNGMethod> getMethodSet(IResultMap tests) {
Set<ITestNGMethod> r = new TreeSet<ITestNGMethod>(new TestMethodSorter<ITestNGMethod>());
r.addAll(tests.getAllMethods());
return r;
}
protected int getNbInstanceForGroup(boolean envt, IResultMap tests) {
int res = 0;
for (ITestResult result : tests.getAllResults()) {
boolean resultIsAnEnvtRes = Arrays.asList(result.getMethod().getGroups()).contains("envt");
if (resultIsAnEnvtRes) {
if (envt) {
res++;
}
} else {
if (!envt) {
res++;
}
}
}
return res;
}
public String getOutputDirectory() {
return outputDirectory;
}
public File getReportLocation() {
return report;
}
public String getResources() {
return resources;
}
/**
* @param tests
* @return
*/
protected Collection<ITestResult> getResultSet(IResultMap tests, ITestNGMethod method) {
Set<ITestResult> r = new TreeSet<ITestResult>(new TestResultSorter<ITestResult>());
for(ITestResult result : tests.getAllResults())
{
if(result.getMethod().getMethodName().equals(method.getMethodName()))
{
//r.addAll(tests.getResults(method));
r.add(result);
}
}
return r;
}
protected ITestNGMethod getTestNGMethod(ITestContext ctx, String method) {
Collection<ITestNGMethod> methods = new HashSet<ITestNGMethod>();
//jliang
//TestNG 6.3. Skip method does not start with package name. So strip off the package name before comparing.
int index = method.substring(0, method.lastIndexOf(".")).lastIndexOf(".");
String localMethod = method.substring(index+1);
ITestNGMethod[] all = ctx.getAllTestMethods();
for (int i = 0; i < all.length; i++) {
methods.add(all[i]);
}
for (ITestNGMethod m : methods) {
if (m.toString().startsWith(localMethod)) {
return m;
}
}
throw new RuntimeException("method " + method + " not found. " + "Should not happen. Suite " + ctx.getName());
}
protected String getType(Class<?> cls) {
while (cls.isArray()) {
cls = cls.getComponentType();
}
return cls.getName();
}
protected boolean hasDependencies(ITestNGMethod method) {
return ((method.getGroupsDependedUpon().length + method.getMethodsDependedUpon().length) != 0);
}
protected Map<String, ITestResult> initMethodsByGroup() {
methodsByGroup = new HashMap<String, ITestResult>();
return null;
}
public void onFinish(final ITestContext arg0) {
// runEnd = Calendar.getInstance().getTime();
//
// // Thread soaThread = new Thread(){@Override
// // public void run() {
// // if(Context.isCalCollectionEnabled()){
// // soaReporter.onFinish(arg0);
// // }
// // }};
// // soaThread.start();
//
// if (Context.isCalCollectionEnabled()) {
// logger.info("Collecting CAL Events...");
//
// Calendar counter = Calendar.getInstance();
// counter.setTime(runBegin);
// CalEventCollector cal = new CalEventCollector(Context.getRunId());
// ArrayList<CALEvent> events = new ArrayList<CALEvent>();
//
// try {
// Thread.sleep(10000);
// } catch (InterruptedException e1) {
//
// }
//
// while (counter.before(runEnd)) {
//
// events.addAll(cal.collect(Context.getPool().toLowerCase(), counter));
// counter.add(Calendar.HOUR, 1);
// }
//
// events.addAll(cal.collect(Context.getPool().toLowerCase(), counter));
//
// // Sort events into HashMap using cmd id as key
// if (!events.isEmpty()) {
// for (CALEvent e : events) {
// if (calEvent.containsKey(e.getCmd())) {
// calEvent.get(e.getCmd()).add(e);
// } else {
// ArrayList<CALEvent> a = new ArrayList<CALEvent>();
// a.add(e);
// calEvent.put(e.getCmd(), a);
// }
// }
// }
// logger.info("Completed Collecting CAL Events.");
// }
// // if(Context.isCalCollectionEnabled()){
// // try {
// // soaThread.join();
// // } catch (InterruptedException e) {
// // }
// //
// // }
if(isRetryHandleNeeded.get(arg0.getName()))
{
removeIncorrectlySkippedTests(arg0, failedTests.get(arg0.getName()));
removeFailedTestsInTestNG(arg0);
}else
{
failedTests.put(arg0.getName(), arg0.getFailedTests());
skippedTests.put(arg0.getName(), arg0.getSkippedTests());
}
}
public void onStart(ITestContext arg0) {
// runBegin = Calendar.getInstance().getTime();
// if(Context.isCalCollectionEnabled()){
// soaReporter.onStart(arg0);
// }
isRetryHandleNeeded.put(arg0.getName(), false);
failedTests.put(arg0.getName(), new ResultMap());
skippedTests.put(arg0.getName(), new ResultMap());
}
public void onTestFailedButWithinSuccessPercentage(ITestResult arg0) {
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestFailedButWithinSuccessPercentage(arg0);
// }
}
public synchronized void onTestFailure(ITestResult arg0) {
if (arg0.getMethod().getRetryAnalyzer() != null) {
TestRetryAnalyzer testRetryAnalyzer = (TestRetryAnalyzer) arg0.getMethod().getRetryAnalyzer();
if (testRetryAnalyzer.getCount() <= testRetryAnalyzer.getMaxCount()) {
arg0.setStatus(ITestResult.SKIP);
Reporter.setCurrentTestResult(null);
}
else {
IResultMap rMap = failedTests.get(arg0.getTestContext().getName());
rMap.addResult(arg0, arg0.getMethod());
failedTests.put(arg0.getTestContext().getName(), rMap);
}
System.out.println(arg0.getMethod()+" Failed in "+testRetryAnalyzer.getCount()+" times");
isRetryHandleNeeded.put(arg0.getTestContext().getName(), true);
}
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestFailure(arg0);
// }
}
public void onTestSkipped(ITestResult arg0) {
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestSkipped(arg0);
// }
}
public void onTestStart(ITestResult arg0) {
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestStart(arg0);
// }
}
public void onTestSuccess(ITestResult arg0) {
// if(Context.isCalCollectionEnabled()){
// soaReporter.onTestSuccess(arg0);
// }
/* if (arg0.getMethod().getRetryAnalyzer() != null) {
TestRetryAnalyzer testRetryAnalyzer = (TestRetryAnalyzer) arg0.getMethod().getRetryAnalyzer();
System.out.println(arg0.getMethod()+" Passed in "+testRetryAnalyzer.getCount()+" times");
isRetryHandleNeeded = true;
}*/
}
/**
* Remote failed test cases in TestNG
* @param tc
* @return
*/
private void removeFailedTestsInTestNG(ITestContext tc)
{
IResultMap returnValue = tc.getFailedTests();
ResultMap removeMap = new ResultMap();
for(ITestResult result : returnValue.getAllResults())
{
boolean isFailed = false;
for(ITestResult resultToCheck : failedTests.get(tc.getName()).getAllResults())
{
if(result.getMethod().equals(resultToCheck.getMethod()) && result.getEndMillis()==resultToCheck.getEndMillis())
{
//logger.info("Keep failed cases:"+result.getMethod().getMethodName());
isFailed = true;
break;
}
}
if(!isFailed)
{
//logger.info("Removed failed cases:"+result.getMethod().getMethodName());
System.out.println("Removed failed cases:"+result.getMethod().getMethodName());
//test.getFailedTests().getAllResults().remove(result);
removeMap.addResult(result, result.getMethod());
//test.getFailedTests().removeResult(result.getMethod());
}
}
for(ITestResult result : removeMap.getAllResults())
{
ITestResult removeResult = null;
for(ITestResult resultToCheck : returnValue.getAllResults())
{
if(result.getMethod().equals(resultToCheck.getMethod()) && result.getEndMillis()==resultToCheck.getEndMillis())
{
removeResult = resultToCheck;
break;
}
}
if(removeResult!=null) returnValue.getAllResults().remove(removeResult);
}
}
/**
* Remove retrying failed test cases from skipped test cases
* @param tc
* @param map
* @return
*/
private void removeIncorrectlySkippedTests(ITestContext tc, IResultMap map)
{
List<ITestNGMethod> failsToRemove = new ArrayList<ITestNGMethod>();
IResultMap returnValue = tc.getSkippedTests();
for(ITestResult result : returnValue.getAllResults())
{
for(ITestResult resultToCheck : map.getAllResults())
{
if(resultToCheck.getMethod().equals(result.getMethod()))
{
failsToRemove.add(resultToCheck.getMethod());
break;
}
}
for(ITestResult resultToCheck : tc.getPassedTests().getAllResults())
{
if(resultToCheck.getMethod().equals(result.getMethod()))
{
failsToRemove.add(resultToCheck.getMethod());
break;
}
}
}
for(ITestNGMethod method : failsToRemove)
{
returnValue.removeResult(method);
}
skippedTests.put(tc.getName(), tc.getSkippedTests());
}
public void setOutputDirectory(String outtimestamped) {
this.outputDirectory = outtimestamped;
}
public void setReportId(String uuid) {
this.uuid = uuid;
}
public void setResources(String resources) {
this.resources = resources;
}
/** Starts HTML stream */
protected void startHtml(ITestContext ctx, PrintWriter out) {
try {
// BufferedWriter out = new BufferedWriter(new FileWriter(path));
/* first, get and initialize an engine */
VelocityEngine ve = new VelocityEngine();
ve.setProperty("resource.loader", "class");
ve.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
ve.init();
Template t = ve.getTemplate("/templates/report.part.header.html");
VelocityContext context = new VelocityContext();
String userName = System.getProperty("user.name");
context.put("userName", userName);
context.put("currentDate", new Date().toString());
// context.put("runId", (String)
// ContextManager.getGlobalContext().getAttribute(Context.RUN_ID));
// context.put("pool", poolInfo);
// context.put("pool", (String) ContextManager.getGlobalContext().getPool());
// context.put("apipool", (String)
// ContextManager.getGlobalContext().getAttribute(Context.API_POOL));
// context.put("buildTag", build);
String mode = ContextManager.getGlobalContext().getWebRunMode();
String hubUrl = ContextManager.getGlobalContext().getWebDriverGrid();
//context.put("gridHub", "<a href='" + hubUrl + "' target=hub>" + (null == hubUrl? null : new URL(hubUrl).getHost()) + "</a>");
context.put("gridHub", "<a href='" + hubUrl + "' target=hub>" + hubUrl + "</a>");
context.put("mode",mode);
StringBuffer sbGroups = new StringBuffer();
sbGroups.append("envt,test,cal");
List<AbstractPageListener> pageListenerList = PluginsUtil.getInstance().getPageListeners();
if (pageListenerList != null && !pageListenerList.isEmpty()) {
for (AbstractPageListener abstractPageListener : pageListenerList) {
sbGroups.append(",").append(abstractPageListener.getClass().getSimpleName());
}
}
context.put("groups", sbGroups.toString());
StringWriter writer = new StringWriter();
t.merge(context, writer);
out.write(writer.toString());
} catch (Exception e) {
logger.error(e.getMessage());
}
}
protected void takeCareOfDirectDependencies(ISuite suite, ITestNGMethod method, int indent, ITestContext ctx, String treeId, StringBuffer res) {
if (indent == 0) {
res.append("<a href=\"#\" onclick=\"expandTree('" + treeId + "'); return false;\">Expand All</a> ");
res.append("<a href=\"#\" onclick=\"collapseTree('" + treeId + "'); return false;\">Collapse All</a>");
res.append("<ul class=\"mktree\" id=\"" + treeId + "\">");
}
String[] methStr = method.getMethodsDependedUpon();
if (methStr.length != 0) {
// Set<ITestNGMethod> methSet = new LinkedHashSet<ITestNGMethod>();
for (int i = 0; i < methStr.length; i++) {
ITestNGMethod m = getTestNGMethod(ctx, methStr[i]);
String intendstr = "";
for (int j = 0; j < indent; j++) {
intendstr += "\t";
}
String img = "<img src=\"";
img += m_root + "/test" + getFailedOrSkippedResult(ctx, m).getStatus() + ".gif";
img += "\"/>";
res.append(intendstr + "<li>" + img + m);
if (hasDependencies(m)) {
res.append(intendstr + "<ul>");
takeCareOfDirectDependencies(suite, m, indent + 1, ctx, treeId, res);
res.append(intendstr + "</ul>");
}
res.append("</li>");
}
}
for (int i = 0; i < method.getGroupsDependedUpon().length; i++) {
if (methodsByGroup == null) {
// Collection<ITestNGMethod> all = suite.getInvokedMethods();
methodsByGroup = initMethodsByGroup();
// System.out.println(all);
}
String dependentGroup = method.getGroupsDependedUpon()[i];
Set<ITestNGMethod> methods = new LinkedHashSet<ITestNGMethod>();
Collection<ITestNGMethod> c = suite.getMethodsByGroups().get(dependentGroup);
if (c != null)
methods.addAll(c);
res.append("<li><u>Group " + dependentGroup + "</u>");
res.append("<ul>");
for (ITestNGMethod m : methods) {
String intendstr = "";
for (int j = 0; j < indent; j++) {
intendstr += "\t";
}
String img = "<img src=\"";
img += m_root + "/test" + getFailedOrSkippedResult(ctx, m).getStatus() + ".gif";
img += "\"/>";
res.append(intendstr + "<li>" + img + m);
if (hasDependencies(m)) {
res.append(intendstr + "<ul>");
takeCareOfDirectDependencies(suite, m, indent + 1, ctx, treeId, res);
res.append(intendstr + "</ul>");
}
res.append("</li>");
}
res.append("</ul>");
res.append("</li>");
}
if (indent == 0) {
res.append("</ul>");
}
}
} | update file name
| src/main/java/com/seleniumtests/reporter/HTMLReporter.java | update file name |
|
Java | apache-2.0 | 8a1245d433b03900f48167abc5055aa3a0eca637 | 0 | DaanHoogland/cloudstack,argv0/cloudstack,cinderella/incubator-cloudstack,argv0/cloudstack,jcshen007/cloudstack,jcshen007/cloudstack,wido/cloudstack,mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,argv0/cloudstack,resmo/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,cinderella/incubator-cloudstack,argv0/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,DaanHoogland/cloudstack,cinderella/incubator-cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,wido/cloudstack,wido/cloudstack,resmo/cloudstack,jcshen007/cloudstack,resmo/cloudstack,argv0/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,resmo/cloudstack,argv0/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,cinderella/incubator-cloudstack,jcshen007/cloudstack,wido/cloudstack,cinderella/incubator-cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,resmo/cloudstack | /**
* Copyright (C) 2010 Cloud.com, Inc. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.hypervisor.xen.resource;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import javax.ejb.Local;
import javax.naming.ConfigurationException;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.log4j.Logger;
import org.apache.xmlrpc.XmlRpcException;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import com.cloud.agent.IAgentControl;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.AttachIsoCommand;
import com.cloud.agent.api.AttachVolumeAnswer;
import com.cloud.agent.api.AttachVolumeCommand;
import com.cloud.agent.api.BackupSnapshotAnswer;
import com.cloud.agent.api.BackupSnapshotCommand;
import com.cloud.agent.api.CheckHealthAnswer;
import com.cloud.agent.api.CheckHealthCommand;
import com.cloud.agent.api.CheckOnHostAnswer;
import com.cloud.agent.api.CheckOnHostCommand;
import com.cloud.agent.api.CheckVirtualMachineAnswer;
import com.cloud.agent.api.CheckVirtualMachineCommand;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.CreatePrivateTemplateFromSnapshotCommand;
import com.cloud.agent.api.CreatePrivateTemplateFromVolumeCommand;
import com.cloud.agent.api.CreateVolumeFromSnapshotAnswer;
import com.cloud.agent.api.CreateVolumeFromSnapshotCommand;
import com.cloud.agent.api.DeleteSnapshotBackupAnswer;
import com.cloud.agent.api.DeleteSnapshotBackupCommand;
import com.cloud.agent.api.DeleteSnapshotsDirCommand;
import com.cloud.agent.api.DeleteStoragePoolCommand;
import com.cloud.agent.api.GetHostStatsAnswer;
import com.cloud.agent.api.GetHostStatsCommand;
import com.cloud.agent.api.GetStorageStatsAnswer;
import com.cloud.agent.api.GetStorageStatsCommand;
import com.cloud.agent.api.GetVmStatsAnswer;
import com.cloud.agent.api.GetVmStatsCommand;
import com.cloud.agent.api.GetVncPortAnswer;
import com.cloud.agent.api.GetVncPortCommand;
import com.cloud.agent.api.HostStatsEntry;
import com.cloud.agent.api.MaintainAnswer;
import com.cloud.agent.api.MaintainCommand;
import com.cloud.agent.api.ManageSnapshotAnswer;
import com.cloud.agent.api.ManageSnapshotCommand;
import com.cloud.agent.api.MigrateAnswer;
import com.cloud.agent.api.MigrateCommand;
import com.cloud.agent.api.ModifySshKeysCommand;
import com.cloud.agent.api.ModifyStoragePoolAnswer;
import com.cloud.agent.api.ModifyStoragePoolCommand;
import com.cloud.agent.api.PingCommand;
import com.cloud.agent.api.PingRoutingCommand;
import com.cloud.agent.api.PingRoutingWithNwGroupsCommand;
import com.cloud.agent.api.PingTestCommand;
import com.cloud.agent.api.PoolEjectCommand;
import com.cloud.agent.api.PrepareForMigrationAnswer;
import com.cloud.agent.api.PrepareForMigrationCommand;
import com.cloud.agent.api.ReadyAnswer;
import com.cloud.agent.api.ReadyCommand;
import com.cloud.agent.api.RebootAnswer;
import com.cloud.agent.api.RebootCommand;
import com.cloud.agent.api.RebootRouterCommand;
import com.cloud.agent.api.SetupAnswer;
import com.cloud.agent.api.SetupCommand;
import com.cloud.agent.api.Start2Answer;
import com.cloud.agent.api.Start2Command;
import com.cloud.agent.api.StartAnswer;
import com.cloud.agent.api.StartCommand;
import com.cloud.agent.api.StartConsoleProxyAnswer;
import com.cloud.agent.api.StartConsoleProxyCommand;
import com.cloud.agent.api.StartRouterAnswer;
import com.cloud.agent.api.StartRouterCommand;
import com.cloud.agent.api.StartSecStorageVmAnswer;
import com.cloud.agent.api.StartSecStorageVmCommand;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupRoutingCommand;
import com.cloud.agent.api.StartupStorageCommand;
import com.cloud.agent.api.StopAnswer;
import com.cloud.agent.api.StopCommand;
import com.cloud.agent.api.StoragePoolInfo;
import com.cloud.agent.api.VmStatsEntry;
import com.cloud.agent.api.proxy.CheckConsoleProxyLoadCommand;
import com.cloud.agent.api.proxy.ConsoleProxyLoadAnswer;
import com.cloud.agent.api.proxy.WatchConsoleProxyLoadCommand;
import com.cloud.agent.api.routing.DhcpEntryCommand;
import com.cloud.agent.api.routing.IPAssocCommand;
import com.cloud.agent.api.routing.LoadBalancerCfgCommand;
import com.cloud.agent.api.routing.SavePasswordCommand;
import com.cloud.agent.api.routing.SetFirewallRuleCommand;
import com.cloud.agent.api.routing.RemoteAccessVpnCfgCommand;
import com.cloud.agent.api.routing.VmDataCommand;
import com.cloud.agent.api.storage.CopyVolumeAnswer;
import com.cloud.agent.api.storage.CopyVolumeCommand;
import com.cloud.agent.api.storage.CreateAnswer;
import com.cloud.agent.api.storage.CreateCommand;
import com.cloud.agent.api.storage.CreatePrivateTemplateAnswer;
import com.cloud.agent.api.storage.DestroyCommand;
import com.cloud.agent.api.storage.DownloadAnswer;
import com.cloud.agent.api.storage.PrimaryStorageDownloadCommand;
import com.cloud.agent.api.storage.ShareAnswer;
import com.cloud.agent.api.storage.ShareCommand;
import com.cloud.agent.api.to.NicTO;
import com.cloud.agent.api.to.StorageFilerTO;
import com.cloud.agent.api.to.VirtualMachineTO;
import com.cloud.agent.api.to.VirtualMachineTO.Monitor;
import com.cloud.agent.api.to.VirtualMachineTO.SshMonitor;
import com.cloud.agent.api.to.VolumeTO;
import com.cloud.exception.InternalErrorException;
import com.cloud.host.Host.Type;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.network.Network.BroadcastDomainType;
import com.cloud.network.Network.TrafficType;
import com.cloud.resource.ServerResource;
import com.cloud.storage.Storage;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.Storage.StoragePoolType;
import com.cloud.storage.StorageLayer;
import com.cloud.storage.StoragePoolVO;
import com.cloud.storage.Volume.VolumeType;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.resource.StoragePoolResource;
import com.cloud.storage.template.TemplateInfo;
import com.cloud.template.VirtualMachineTemplate.BootloaderType;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.Ternary;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.NetUtils;
import com.cloud.utils.script.Script;
import com.cloud.vm.ConsoleProxyVO;
import com.cloud.vm.DiskProfile;
import com.cloud.vm.DomainRouter;
import com.cloud.vm.SecondaryStorageVmVO;
import com.cloud.vm.State;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachineName;
import com.trilead.ssh2.SCPClient;
import com.xensource.xenapi.APIVersion;
import com.xensource.xenapi.Bond;
import com.xensource.xenapi.Connection;
import com.xensource.xenapi.Console;
import com.xensource.xenapi.Host;
import com.xensource.xenapi.HostCpu;
import com.xensource.xenapi.HostMetrics;
import com.xensource.xenapi.Network;
import com.xensource.xenapi.PBD;
import com.xensource.xenapi.PIF;
import com.xensource.xenapi.Pool;
import com.xensource.xenapi.SR;
import com.xensource.xenapi.Session;
import com.xensource.xenapi.Types;
import com.xensource.xenapi.Types.BadServerResponse;
import com.xensource.xenapi.Types.IpConfigurationMode;
import com.xensource.xenapi.Types.VmPowerState;
import com.xensource.xenapi.Types.XenAPIException;
import com.xensource.xenapi.VBD;
import com.xensource.xenapi.VDI;
import com.xensource.xenapi.VIF;
import com.xensource.xenapi.VLAN;
import com.xensource.xenapi.VM;
import com.xensource.xenapi.VMGuestMetrics;
import com.xensource.xenapi.XenAPIObject;
/**
* Encapsulates the interface to the XenServer API.
*
*/
@Local(value = ServerResource.class)
public abstract class CitrixResourceBase implements StoragePoolResource, ServerResource {
private static final Logger s_logger = Logger.getLogger(CitrixResourceBase.class);
protected static final XenServerConnectionPool _connPool = XenServerConnectionPool.getInstance();
protected static final int MB = 1024 * 1024;
protected String _name;
protected String _username;
protected String _password;
protected final int _retry = 24;
protected final int _sleep = 10000;
protected long _dcId;
protected String _pod;
protected String _cluster;
protected HashMap<String, State> _vms = new HashMap<String, State>(71);
protected String _patchPath;
protected String _privateNetworkName;
protected String _linkLocalPrivateNetworkName;
protected String _publicNetworkName;
protected String _storageNetworkName1;
protected String _storageNetworkName2;
protected String _guestNetworkName;
protected int _wait;
protected IAgentControl _agentControl;
int _userVMCap = 0;
final int _maxWeight = 256;
protected final XenServerHost _host = new XenServerHost();
// Guest and Host Performance Statistics
protected boolean _collectHostStats = false;
protected String _consolidationFunction = "AVERAGE";
protected int _pollingIntervalInSeconds = 60;
protected StorageLayer _storage;
protected boolean _canBridgeFirewall = false;
protected HashMap<StoragePoolType, StoragePoolResource> _pools = new HashMap<StoragePoolType, StoragePoolResource>(5);
public enum SRType {
NFS, LVM, ISCSI, ISO, LVMOISCSI;
@Override
public String toString() {
return super.toString().toLowerCase();
}
public boolean equals(String type) {
return super.toString().equalsIgnoreCase(type);
}
}
protected static HashMap<Types.VmPowerState, State> s_statesTable;
protected String _localGateway;
static {
s_statesTable = new HashMap<Types.VmPowerState, State>();
s_statesTable.put(Types.VmPowerState.HALTED, State.Stopped);
s_statesTable.put(Types.VmPowerState.PAUSED, State.Running);
s_statesTable.put(Types.VmPowerState.RUNNING, State.Running);
s_statesTable.put(Types.VmPowerState.SUSPENDED, State.Running);
s_statesTable.put(Types.VmPowerState.UNKNOWN, State.Unknown);
s_statesTable.put(Types.VmPowerState.UNRECOGNIZED, State.Unknown);
}
protected boolean isRefNull(XenAPIObject object) {
return (object == null || object.toWireString().equals("OpaqueRef:NULL"));
}
@Override
public void disconnected() {
s_logger.debug("Logging out of " + _host.uuid);
if (_host.pool != null) {
_connPool.disconnect(_host.uuid, _host.pool);
}
}
protected VDI cloudVDIcopy(VDI vdi, SR sr) throws BadServerResponse, XenAPIException, XmlRpcException{
Connection conn = getConnection();
return vdi.copy(conn, sr);
}
protected void destroyStoppedVm() {
Map<VM, VM.Record> vmentries = null;
Connection conn = getConnection();
for (int i = 0; i < 2; i++) {
try {
vmentries = VM.getAllRecords(conn);
break;
} catch (final Throwable e) {
s_logger.warn("Unable to get vms", e);
}
try {
Thread.sleep(1000);
} catch (final InterruptedException ex) {
}
}
if (vmentries == null) {
return;
}
for (Map.Entry<VM, VM.Record> vmentry : vmentries.entrySet()) {
VM.Record record = vmentry.getValue();
if (record.isControlDomain || record.isASnapshot || record.isATemplate) {
continue; // Skip DOM0
}
if (record.powerState != Types.VmPowerState.HALTED) {
continue;
}
try {
if (isRefNull(record.affinity) || !record.affinity.getUuid(conn).equals(_host.uuid)) {
continue;
}
vmentry.getKey().destroy(conn);
} catch (Exception e) {
String msg = "VM destroy failed for " + record.nameLabel + " due to " + e.getMessage();
s_logger.warn(msg, e);
}
}
}
protected void cleanupDiskMounts() {
Connection conn = getConnection();
Map<SR, SR.Record> srs;
try {
srs = SR.getAllRecords(conn);
} catch (XenAPIException e) {
s_logger.warn("Unable to get the SRs " + e.toString(), e);
throw new CloudRuntimeException("Unable to get SRs " + e.toString(), e);
} catch (XmlRpcException e) {
throw new CloudRuntimeException("Unable to get SRs " + e.getMessage());
}
for (Map.Entry<SR, SR.Record> sr : srs.entrySet()) {
SR.Record rec = sr.getValue();
if (SRType.NFS.equals(rec.type) || (SRType.ISO.equals(rec.type) && rec.nameLabel.endsWith("iso"))) {
if (rec.PBDs == null || rec.PBDs.size() == 0) {
cleanSR(sr.getKey(), rec);
continue;
}
for (PBD pbd : rec.PBDs) {
if (isRefNull(pbd)) {
continue;
}
PBD.Record pbdr = null;
try {
pbdr = pbd.getRecord(conn);
} catch (XenAPIException e) {
s_logger.warn("Unable to get pbd record " + e.toString());
} catch (XmlRpcException e) {
s_logger.warn("Unable to get pbd record " + e.getMessage());
}
if (pbdr == null) {
continue;
}
try {
if (pbdr.host.getUuid(conn).equals(_host.uuid)) {
if (!pbdr.currentlyAttached) {
pbdPlug(conn, pbd);
}
}
} catch (XenAPIException e) {
s_logger.warn("Catch XenAPIException due to" + e.toString(), e);
} catch (XmlRpcException e) {
s_logger.warn("Catch XmlRpcException due to" + e.getMessage(), e);
}
}
}
}
}
protected Pair<VM, VM.Record> getVmByNameLabel(Connection conn, Host host, String nameLabel, boolean getRecord) throws XmlRpcException, XenAPIException {
Set<VM> vms = host.getResidentVMs(conn);
for (VM vm : vms) {
VM.Record rec = null;
String name = null;
if (getRecord) {
rec = vm.getRecord(conn);
name = rec.nameLabel;
} else {
name = vm.getNameLabel(conn);
}
if (name.equals(nameLabel)) {
return new Pair<VM, VM.Record>(vm, rec);
}
}
return null;
}
protected boolean currentlyAttached(SR sr, SR.Record rec, PBD pbd, PBD.Record pbdr) {
String status = null;
if (SRType.NFS.equals(rec.type)) {
status = callHostPlugin("vmops", "checkMount", "mount", rec.uuid);
} else if (SRType.LVMOISCSI.equals(rec.type) ) {
String scsiid = pbdr.deviceConfig.get("SCSIid");
if (scsiid.isEmpty()) {
return false;
}
status = callHostPlugin("vmops", "checkIscsi", "scsiid", scsiid);
} else {
return true;
}
if (status != null && status.equalsIgnoreCase("1")) {
s_logger.debug("currently attached " + pbdr.uuid);
return true;
} else {
s_logger.debug("currently not attached " + pbdr.uuid);
return false;
}
}
protected boolean pingdomr(String host, String port) {
String status;
status = callHostPlugin("vmops", "pingdomr", "host", host, "port", port);
if (status == null || status.isEmpty()) {
return false;
}
return true;
}
protected boolean pingxenserver() {
Session slaveSession = null;
Connection slaveConn = null;
try {
URL slaveUrl = null;
slaveUrl = new URL("http://" + _host.ip);
slaveConn = new Connection(slaveUrl, 100);
slaveSession = Session.slaveLocalLoginWithPassword(slaveConn, _username, _password);
return true;
} catch (Exception e) {
return false;
} finally {
if( slaveSession != null ){
try{
Session.localLogout(slaveConn);
} catch (Exception e) {
}
slaveConn.dispose();
}
}
}
protected String logX(XenAPIObject obj, String msg) {
return new StringBuilder("Host ").append(_host.ip).append(" ").append(obj.toWireString()).append(": ").append(msg).toString();
}
protected void cleanSR(SR sr, SR.Record rec) {
Connection conn = getConnection();
if (rec.VDIs != null) {
for (VDI vdi : rec.VDIs) {
VDI.Record vdir;
try {
vdir = vdi.getRecord(conn);
} catch (XenAPIException e) {
s_logger.debug("Unable to get VDI: " + e.toString());
continue;
} catch (XmlRpcException e) {
s_logger.debug("Unable to get VDI: " + e.getMessage());
continue;
}
if (vdir.VBDs == null)
continue;
for (VBD vbd : vdir.VBDs) {
try {
VBD.Record vbdr = vbd.getRecord(conn);
VM.Record vmr = vbdr.VM.getRecord(conn);
if ((!isRefNull(vmr.residentOn) && vmr.residentOn.getUuid(conn).equals(_host.uuid))
|| (isRefNull(vmr.residentOn) && !isRefNull(vmr.affinity) && vmr.affinity.getUuid(conn).equals(_host.uuid))) {
if (vmr.powerState != VmPowerState.HALTED && vmr.powerState != VmPowerState.UNKNOWN && vmr.powerState != VmPowerState.UNRECOGNIZED) {
try {
vbdr.VM.hardShutdown(conn);
} catch (XenAPIException e) {
s_logger.debug("Shutdown hit error " + vmr.nameLabel + ": " + e.toString());
}
}
try {
vbdr.VM.destroy(conn);
} catch (XenAPIException e) {
s_logger.debug("Destroy hit error " + vmr.nameLabel + ": " + e.toString());
} catch (XmlRpcException e) {
s_logger.debug("Destroy hit error " + vmr.nameLabel + ": " + e.getMessage());
}
vbd.destroy(conn);
break;
}
} catch (XenAPIException e) {
s_logger.debug("Unable to get VBD: " + e.toString());
continue;
} catch (XmlRpcException e) {
s_logger.debug("Uanbel to get VBD: " + e.getMessage());
continue;
}
}
}
}
for (PBD pbd : rec.PBDs) {
PBD.Record pbdr = null;
try {
pbdr = pbd.getRecord(conn);
pbd.unplug(conn);
pbd.destroy(conn);
} catch (XenAPIException e) {
s_logger.warn("PBD " + ((pbdr != null) ? "(uuid:" + pbdr.uuid + ")" : "") + "destroy failed due to " + e.toString());
} catch (XmlRpcException e) {
s_logger.warn("PBD " + ((pbdr != null) ? "(uuid:" + pbdr.uuid + ")" : "") + "destroy failed due to " + e.getMessage());
}
}
try {
rec = sr.getRecord(conn);
if (rec.PBDs == null || rec.PBDs.size() == 0) {
sr.forget(conn);
return;
}
} catch (XenAPIException e) {
s_logger.warn("Unable to retrieve sr again: " + e.toString(), e);
} catch (XmlRpcException e) {
s_logger.warn("Unable to retrieve sr again: " + e.getMessage(), e);
}
}
@Override
public Answer executeRequest(Command cmd) {
if (cmd instanceof CreateCommand) {
return execute((CreateCommand) cmd);
} else if (cmd instanceof SetFirewallRuleCommand) {
return execute((SetFirewallRuleCommand) cmd);
} else if (cmd instanceof LoadBalancerCfgCommand) {
return execute((LoadBalancerCfgCommand) cmd);
} else if (cmd instanceof IPAssocCommand) {
return execute((IPAssocCommand) cmd);
} else if (cmd instanceof CheckConsoleProxyLoadCommand) {
return execute((CheckConsoleProxyLoadCommand) cmd);
} else if (cmd instanceof WatchConsoleProxyLoadCommand) {
return execute((WatchConsoleProxyLoadCommand) cmd);
} else if (cmd instanceof SavePasswordCommand) {
return execute((SavePasswordCommand) cmd);
} else if (cmd instanceof DhcpEntryCommand) {
return execute((DhcpEntryCommand) cmd);
} else if (cmd instanceof VmDataCommand) {
return execute((VmDataCommand) cmd);
} else if (cmd instanceof StartCommand) {
return execute((StartCommand) cmd);
} else if (cmd instanceof StartRouterCommand) {
return execute((StartRouterCommand) cmd);
} else if (cmd instanceof ReadyCommand) {
return execute((ReadyCommand) cmd);
} else if (cmd instanceof GetHostStatsCommand) {
return execute((GetHostStatsCommand) cmd);
} else if (cmd instanceof GetVmStatsCommand) {
return execute((GetVmStatsCommand) cmd);
} else if (cmd instanceof CheckHealthCommand) {
return execute((CheckHealthCommand) cmd);
} else if (cmd instanceof StopCommand) {
return execute((StopCommand) cmd);
} else if (cmd instanceof RebootRouterCommand) {
return execute((RebootRouterCommand) cmd);
} else if (cmd instanceof RebootCommand) {
return execute((RebootCommand) cmd);
} else if (cmd instanceof CheckVirtualMachineCommand) {
return execute((CheckVirtualMachineCommand) cmd);
} else if (cmd instanceof PrepareForMigrationCommand) {
return execute((PrepareForMigrationCommand) cmd);
} else if (cmd instanceof MigrateCommand) {
return execute((MigrateCommand) cmd);
} else if (cmd instanceof DestroyCommand) {
return execute((DestroyCommand) cmd);
} else if (cmd instanceof ShareCommand) {
return execute((ShareCommand) cmd);
} else if (cmd instanceof ModifyStoragePoolCommand) {
return execute((ModifyStoragePoolCommand) cmd);
} else if (cmd instanceof DeleteStoragePoolCommand) {
return execute((DeleteStoragePoolCommand) cmd);
} else if (cmd instanceof CopyVolumeCommand) {
return execute((CopyVolumeCommand) cmd);
} else if (cmd instanceof AttachVolumeCommand) {
return execute((AttachVolumeCommand) cmd);
} else if (cmd instanceof AttachIsoCommand) {
return execute((AttachIsoCommand) cmd);
} else if (cmd instanceof ManageSnapshotCommand) {
return execute((ManageSnapshotCommand) cmd);
} else if (cmd instanceof BackupSnapshotCommand) {
return execute((BackupSnapshotCommand) cmd);
} else if (cmd instanceof DeleteSnapshotBackupCommand) {
return execute((DeleteSnapshotBackupCommand) cmd);
} else if (cmd instanceof CreateVolumeFromSnapshotCommand) {
return execute((CreateVolumeFromSnapshotCommand) cmd);
} else if (cmd instanceof DeleteSnapshotsDirCommand) {
return execute((DeleteSnapshotsDirCommand) cmd);
} else if (cmd instanceof CreatePrivateTemplateFromVolumeCommand) {
return execute((CreatePrivateTemplateFromVolumeCommand) cmd);
} else if (cmd instanceof CreatePrivateTemplateFromSnapshotCommand) {
return execute((CreatePrivateTemplateFromSnapshotCommand) cmd);
} else if (cmd instanceof GetStorageStatsCommand) {
return execute((GetStorageStatsCommand) cmd);
} else if (cmd instanceof PrimaryStorageDownloadCommand) {
return execute((PrimaryStorageDownloadCommand) cmd);
} else if (cmd instanceof StartConsoleProxyCommand) {
return execute((StartConsoleProxyCommand) cmd);
} else if (cmd instanceof StartSecStorageVmCommand) {
return execute((StartSecStorageVmCommand) cmd);
} else if (cmd instanceof GetVncPortCommand) {
return execute((GetVncPortCommand) cmd);
} else if (cmd instanceof SetupCommand) {
return execute((SetupCommand) cmd);
} else if (cmd instanceof MaintainCommand) {
return execute((MaintainCommand) cmd);
} else if (cmd instanceof PingTestCommand) {
return execute((PingTestCommand) cmd);
} else if (cmd instanceof CheckOnHostCommand) {
return execute((CheckOnHostCommand) cmd);
} else if (cmd instanceof ModifySshKeysCommand) {
return execute((ModifySshKeysCommand) cmd);
} else if (cmd instanceof PoolEjectCommand) {
return execute((PoolEjectCommand) cmd);
} else if (cmd instanceof Start2Command) {
return execute((Start2Command)cmd);
} else if (cmd instanceof RemoteAccessVpnCfgCommand) {
return execute((RemoteAccessVpnCfgCommand)cmd);
} else {
return Answer.createUnsupportedCommandAnswer(cmd);
}
}
Pair<Network, String> getNetworkForTraffic(Connection conn, TrafficType type) throws XenAPIException, XmlRpcException {
if (type == TrafficType.Guest) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.guestNetwork), _host.guestPif);
} else if (type == TrafficType.Control) {
setupLinkLocalNetwork();
return new Pair<Network, String>(Network.getByUuid(conn, _host.linkLocalNetwork), null);
} else if (type == TrafficType.Management) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.privateNetwork), _host.privatePif);
} else if (type == TrafficType.Public) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.publicNetwork), _host.publicPif);
} else if (type == TrafficType.Storage) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.storageNetwork1), _host.storagePif1);
} else if (type == TrafficType.Vpn) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.publicNetwork), _host.publicPif);
}
throw new CloudRuntimeException("Unsupported network type: " + type);
}
protected VIF createVif(Connection conn, String vmName, VM vm, NicTO nic) throws XmlRpcException, XenAPIException {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VIF for " + vmName + " on nic " + nic);
}
VIF.Record vifr = new VIF.Record();
vifr.VM = vm;
vifr.device = Integer.toString(nic.getDeviceId());
vifr.MAC = nic.getMac();
Pair<Network, String> network = getNetworkForTraffic(conn, nic.getType());
if (nic.getBroadcastType() == BroadcastDomainType.Vlan) {
URI broadcastUri = nic.getBroadcastUri();
assert broadcastUri.getScheme().equals(BroadcastDomainType.Vlan.scheme());
long vlan = Long.parseLong(broadcastUri.getHost());
vifr.network = enableVlanNetwork(conn, vlan, network.first(), network.second());
} else if (nic.getBroadcastType() == BroadcastDomainType.Native || nic.getBroadcastType() == BroadcastDomainType.LinkLocal) {
vifr.network = network.first();
}
if (nic.getNetworkRateMbps() != null) {
vifr.qosAlgorithmType = "ratelimit";
vifr.qosAlgorithmParams = new HashMap<String, String>();
// convert mbs to kilobyte per second
vifr.qosAlgorithmParams.put("kbps", Integer.toString(nic.getNetworkRateMbps() * 128));
}
VIF vif = VIF.create(conn, vifr);
if (s_logger.isDebugEnabled()) {
vifr = vif.getRecord(conn);
s_logger.debug("Created a vif " + vifr.uuid + " on " + nic.getDeviceId());
}
return vif;
}
protected VDI mount(Connection conn, String vmName, VolumeTO volume) throws XmlRpcException, XenAPIException {
if (volume.getType() == VolumeType.ISO) {
String isopath = volume.getPath();
int index = isopath.lastIndexOf("/");
String mountpoint = isopath.substring(0, index);
URI uri;
try {
uri = new URI(mountpoint);
} catch (URISyntaxException e) {
throw new CloudRuntimeException("Incorrect uri " + mountpoint, e);
}
SR isoSr = createIsoSRbyURI(uri, vmName, false);
String isoname = isopath.substring(index + 1);
VDI isoVdi = getVDIbyLocationandSR(isoname, isoSr);
if (isoVdi == null) {
throw new CloudRuntimeException("Unable to find ISO " + volume.getPath());
}
return isoVdi;
} else {
return VDI.getByUuid(conn, volume.getPath());
}
}
protected VBD createVbd(Connection conn, VolumeTO volume, String vmName, VM vm) throws XmlRpcException, XenAPIException {
VolumeType type = volume.getType();
VDI vdi = mount(conn, vmName, volume);
VBD.Record vbdr = new VBD.Record();
vbdr.VM = vm;
vbdr.VDI = vdi;
if (type == VolumeType.ROOT) {
vbdr.bootable = true;
}
vbdr.userdevice = Long.toString(volume.getDeviceId());
if (volume.getType() == VolumeType.ISO) {
vbdr.mode = Types.VbdMode.RO;
vbdr.type = Types.VbdType.CD;
} else {
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
}
VBD vbd = VBD.create(conn, vbdr);
if (s_logger.isDebugEnabled()) {
s_logger.debug("VBD " + vbd.getUuid(conn) + " created for " + volume);
}
return vbd;
}
protected VM createVmFromTemplate(Connection conn, VirtualMachineTO vmSpec, Host host) throws XenAPIException, XmlRpcException {
String guestOsTypeName = getGuestOsType(vmSpec.getOs());
Set<VM> templates = VM.getByNameLabel(conn, guestOsTypeName);
assert templates.size() == 1 : "Should only have 1 template but found " + templates.size();
VM template = templates.iterator().next();
VM vm = template.createClone(conn, vmSpec.getName());
vm.setAffinity(conn, host);
VM.Record vmr = vm.getRecord(conn);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Created VM " + vmr.uuid + " for " + vmSpec.getName());
}
for (Console console : vmr.consoles) {
console.destroy(conn);
}
vm.setIsATemplate(conn, false);
vm.removeFromOtherConfig(conn, "disks");
vm.setNameLabel(conn, vmSpec.getName());
setMemory(conn, vm, vmSpec.getMinRam());
vm.setVCPUsAtStartup(conn, (long)vmSpec.getCpus());
vm.setVCPUsMax(conn, (long)vmSpec.getCpus());
Map<String, String> vcpuParams = new HashMap<String, String>();
Integer speed = vmSpec.getSpeed();
if (speed != null) {
int utilization = _userVMCap; //cpu_cap
//Configuration cpu.uservm.cap is not available in default installation. Using this parameter is not encouraged
int cpuWeight = _maxWeight; //cpu_weight
// weight based allocation
cpuWeight = (int)((speed*0.99) / _host.speed * _maxWeight);
if (cpuWeight > _maxWeight) {
cpuWeight = _maxWeight;
}
vcpuParams.put("weight", Integer.toString(cpuWeight));
vcpuParams.put("cap", Integer.toString(utilization));
}
if (vcpuParams.size() > 0) {
vm.setVCPUsParams(conn, vcpuParams);
}
vm.setActionsAfterCrash(conn, Types.OnCrashBehaviour.DESTROY);
vm.setActionsAfterShutdown(conn, Types.OnNormalExit.DESTROY);
String bootArgs = vmSpec.getBootArgs();
if (bootArgs != null && bootArgs.length() > 0) {
String pvargs = vm.getPVArgs(conn);
pvargs = pvargs + vmSpec.getBootArgs();
if (s_logger.isDebugEnabled()) {
s_logger.debug("PV args are " + pvargs);
}
vm.setPVArgs(conn, pvargs);
}
if (!(guestOsTypeName.startsWith("Windows") || guestOsTypeName.startsWith("Citrix") || guestOsTypeName.startsWith("Other"))) {
if (vmSpec.getBootloader() == BootloaderType.CD) {
vm.setPVBootloader(conn, "eliloader");
vm.addToOtherConfig(conn, "install-repository", "cdrom");
} else if (vmSpec.getBootloader() == BootloaderType.PyGrub ){
vm.setPVBootloader(conn, "pygrub");
} else {
vm.destroy(conn);
throw new CloudRuntimeException("Unable to handle boot loader type: " + vmSpec.getBootloader());
}
}
return vm;
}
protected String handleVmStartFailure(String vmName, VM vm, String message, Throwable th) {
String msg = "Unable to start " + vmName + " due to " + message;
s_logger.warn(msg, th);
if (vm == null) {
return msg;
}
Connection conn = getConnection();
try {
VM.Record vmr = vm.getRecord(conn);
if (vmr.powerState == VmPowerState.RUNNING) {
try {
vm.hardShutdown(conn);
} catch (Exception e) {
s_logger.warn("VM hardshutdown failed due to ", e);
}
}
if (vm.getPowerState(conn) == VmPowerState.HALTED) {
try {
vm.destroy(conn);
} catch (Exception e) {
s_logger.warn("VM destroy failed due to ", e);
}
}
for (VBD vbd : vmr.VBDs) {
try {
vbd.unplug(conn);
vbd.destroy(conn);
} catch (Exception e) {
s_logger.warn("Unable to clean up VBD due to ", e);
}
}
for (VIF vif : vmr.VIFs) {
try {
vif.unplug(conn);
vif.destroy(conn);
} catch (Exception e) {
s_logger.warn("Unable to cleanup VIF", e);
}
}
} catch (Exception e) {
s_logger.warn("VM getRecord failed due to ", e);
}
return msg;
}
protected VBD createPatchVbd(Connection conn, String vmName, VM vm) throws XmlRpcException, XenAPIException {
VBD.Record cdromVBDR = new VBD.Record();
cdromVBDR.VM = vm;
cdromVBDR.empty = true;
cdromVBDR.bootable = false;
cdromVBDR.userdevice = "3";
cdromVBDR.mode = Types.VbdMode.RO;
cdromVBDR.type = Types.VbdType.CD;
VBD cdromVBD = VBD.create(conn, cdromVBDR);
cdromVBD.insert(conn, VDI.getByUuid(conn, _host.systemvmisouuid));
return cdromVBD;
}
protected Start2Answer execute(Start2Command cmd) {
VirtualMachineTO vmSpec = cmd.getVirtualMachine();
String vmName = vmSpec.getName();
Connection conn = getConnection();
State state = State.Stopped;
VM vm = null;
try {
Host host = Host.getByUuid(conn, _host.uuid);
synchronized (_vms) {
_vms.put(vmName, State.Starting);
}
vm = createVmFromTemplate(conn, vmSpec, host);
for (VolumeTO disk : vmSpec.getDisks()) {
createVbd(conn, disk, vmName, vm);
}
if (vmSpec.getType() != VirtualMachine.Type.User) {
createPatchVbd(conn, vmName, vm);
}
for (NicTO nic : vmSpec.getNics()) {
createVif(conn, vmName, vm, nic);
}
vm.startOn(conn, host, false, true);
if (_canBridgeFirewall) {
String result = null;
if (vmSpec.getType() != VirtualMachine.Type.User) {
result = callHostPlugin("vmops", "default_network_rules_systemvm", "vmName", vmName);
} else {
}
if (result == null || result.isEmpty() || !Boolean.parseBoolean(result)) {
s_logger.warn("Failed to program default network rules for " + vmName);
} else {
s_logger.info("Programmed default network rules for " + vmName);
}
}
Monitor monitor = vmSpec.getMonitor();
if (monitor != null && monitor instanceof SshMonitor) {
SshMonitor sshMon = (SshMonitor)monitor;
String privateIp = sshMon.getIp();
int cmdPort = sshMon.getPort();
if (s_logger.isDebugEnabled()) {
s_logger.debug("Ping command port, " + privateIp + ":" + cmdPort);
}
String result = connect(vmName, privateIp, cmdPort);
if (result != null) {
throw new CloudRuntimeException("Can not ping System vm " + vmName + "due to:" + result);
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Ping command port succeeded for vm " + vmName);
}
}
state = State.Running;
return new Start2Answer(cmd);
} catch (XmlRpcException e) {
s_logger.warn("Exception ", e);
String msg = handleVmStartFailure(vmName, vm, "", e);
return new Start2Answer(cmd, msg);
} catch (XenAPIException e) {
s_logger.warn("Exception ", e);
String msg = handleVmStartFailure(vmName, vm, "", e);
return new Start2Answer(cmd, msg);
} catch (Exception e) {
s_logger.warn("Exception ", e);
String msg = handleVmStartFailure(vmName, vm, "", e);
return new Start2Answer(cmd, msg);
} finally {
synchronized (_vms) {
if (state != State.Stopped) {
_vms.put(vmName, state);
} else {
_vms.remove(vmName);
}
}
}
}
protected Answer execute(ModifySshKeysCommand cmd) {
return new Answer(cmd);
}
private boolean doPingTest(final String computingHostIp) {
String args = "-h " + computingHostIp;
String result = callHostPlugin("vmops", "pingtest", "args", args);
if (result == null || result.isEmpty())
return false;
return true;
}
protected CheckOnHostAnswer execute(CheckOnHostCommand cmd) {
return new CheckOnHostAnswer(cmd, null, "Not Implmeneted");
}
private boolean doPingTest(final String domRIp, final String vmIp) {
String args = "-i " + domRIp + " -p " + vmIp;
String result = callHostPlugin("vmops", "pingtest", "args", args);
if (result == null || result.isEmpty())
return false;
return true;
}
private Answer execute(PingTestCommand cmd) {
boolean result = false;
final String computingHostIp = cmd.getComputingHostIp(); // TODO, split the command into 2 types
if (computingHostIp != null) {
result = doPingTest(computingHostIp);
} else {
result = doPingTest(cmd.getRouterIp(), cmd.getPrivateIp());
}
if (!result) {
return new Answer(cmd, false, "PingTestCommand failed");
}
return new Answer(cmd);
}
protected MaintainAnswer execute(MaintainCommand cmd) {
Connection conn = getConnection();
try {
Pool pool = Pool.getByUuid(conn, _host.pool);
Pool.Record poolr = pool.getRecord(conn);
Host.Record hostr = poolr.master.getRecord(conn);
if (!_host.uuid.equals(hostr.uuid)) {
s_logger.debug("Not the master node so just return ok: " + _host.ip);
return new MaintainAnswer(cmd);
}
Map<Host, Host.Record> hostMap = Host.getAllRecords(conn);
if (hostMap.size() == 1) {
s_logger.debug("There's no one to take over as master");
return new MaintainAnswer(cmd,false, "Only master in the pool");
}
Host newMaster = null;
Host.Record newMasterRecord = null;
for (Map.Entry<Host, Host.Record> entry : hostMap.entrySet()) {
if (!_host.uuid.equals(entry.getValue().uuid)) {
newMaster = entry.getKey();
newMasterRecord = entry.getValue();
s_logger.debug("New master for the XenPool is " + newMasterRecord.uuid + " : " + newMasterRecord.address);
try {
_connPool.switchMaster(_host.ip, _host.pool, conn, newMaster, _username, _password, _wait);
return new MaintainAnswer(cmd, "New Master is " + newMasterRecord.address);
} catch (XenAPIException e) {
s_logger.warn("Unable to switch the new master to " + newMasterRecord.uuid + ": " + newMasterRecord.address + " Trying again...");
} catch (XmlRpcException e) {
s_logger.warn("Unable to switch the new master to " + newMasterRecord.uuid + ": " + newMasterRecord.address + " Trying again...");
}
}
}
return new MaintainAnswer(cmd, false, "Unable to find an appropriate host to set as the new master");
} catch (XenAPIException e) {
s_logger.warn("Unable to put server in maintainence mode", e);
return new MaintainAnswer(cmd, false, e.getMessage());
} catch (XmlRpcException e) {
s_logger.warn("Unable to put server in maintainence mode", e);
return new MaintainAnswer(cmd, false, e.getMessage());
}
}
protected SetupAnswer execute(SetupCommand cmd) {
return new SetupAnswer(cmd);
}
protected Answer execute(StartSecStorageVmCommand cmd) {
final String vmName = cmd.getVmName();
SecondaryStorageVmVO storage = cmd.getSecondaryStorageVmVO();
try {
Connection conn = getConnection();
Network network = Network.getByUuid(conn, _host.privateNetwork);
String bootArgs = cmd.getBootArgs();
bootArgs += " zone=" + _dcId;
bootArgs += " pod=" + _pod;
bootArgs += " localgw=" + _localGateway;
String result = startSystemVM(vmName, storage.getVlanId(), network, cmd.getVolumes(), bootArgs, storage.getGuestMacAddress(), storage.getGuestIpAddress(), storage
.getPrivateMacAddress(), storage.getPublicMacAddress(), cmd.getProxyCmdPort(), storage.getRamSize(), cmd.getGuestOSDescription(), cmd.getNetworkRateMbps());
if (result == null) {
return new StartSecStorageVmAnswer(cmd);
}
return new StartSecStorageVmAnswer(cmd, result);
} catch (Exception e) {
String msg = "Exception caught while starting router vm " + vmName + " due to " + e.getMessage();
s_logger.warn(msg, e);
return new StartSecStorageVmAnswer(cmd, msg);
}
}
protected Answer execute(final SetFirewallRuleCommand cmd) {
String args;
if (cmd.isEnable()) {
args = "-A";
} else {
args = "-D";
}
args += " -P " + cmd.getProtocol().toLowerCase();
args += " -l " + cmd.getPublicIpAddress();
args += " -p " + cmd.getPublicPort();
args += " -n " + cmd.getRouterName();
args += " -i " + cmd.getRouterIpAddress();
args += " -r " + cmd.getPrivateIpAddress();
args += " -d " + cmd.getPrivatePort();
args += " -N " + cmd.getVlanNetmask();
String oldPrivateIP = cmd.getOldPrivateIP();
String oldPrivatePort = cmd.getOldPrivatePort();
if (oldPrivateIP != null) {
args += " -w " + oldPrivateIP;
}
if (oldPrivatePort != null) {
args += " -x " + oldPrivatePort;
}
String result = callHostPlugin("vmops", "setFirewallRule", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "SetFirewallRule failed");
}
return new Answer(cmd);
}
protected Answer execute(final LoadBalancerCfgCommand cmd) {
String routerIp = cmd.getRouterIp();
if (routerIp == null) {
return new Answer(cmd);
}
String tmpCfgFilePath = "/tmp/" + cmd.getRouterIp().replace('.', '_') + ".cfg";
String tmpCfgFileContents = "";
for (int i = 0; i < cmd.getConfig().length; i++) {
tmpCfgFileContents += cmd.getConfig()[i];
tmpCfgFileContents += "\n";
}
String result = callHostPlugin("vmops", "createFile", "filepath", tmpCfgFilePath, "filecontents", tmpCfgFileContents);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "LoadBalancerCfgCommand failed to create HA proxy cfg file.");
}
String[] addRules = cmd.getAddFwRules();
String[] removeRules = cmd.getRemoveFwRules();
String args = "";
args += "-i " + routerIp;
args += " -f " + tmpCfgFilePath;
StringBuilder sb = new StringBuilder();
if (addRules.length > 0) {
for (int i = 0; i < addRules.length; i++) {
sb.append(addRules[i]).append(',');
}
args += " -a " + sb.toString();
}
sb = new StringBuilder();
if (removeRules.length > 0) {
for (int i = 0; i < removeRules.length; i++) {
sb.append(removeRules[i]).append(',');
}
args += " -d " + sb.toString();
}
result = callHostPlugin("vmops", "setLoadBalancerRule", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "LoadBalancerCfgCommand failed");
}
callHostPlugin("vmops", "deleteFile", "filepath", tmpCfgFilePath);
return new Answer(cmd);
}
protected synchronized Answer execute(final DhcpEntryCommand cmd) {
String args = "-r " + cmd.getRouterPrivateIpAddress();
args += " -v " + cmd.getVmIpAddress();
args += " -m " + cmd.getVmMac();
args += " -n " + cmd.getVmName();
String result = callHostPlugin("vmops", "saveDhcpEntry", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "DhcpEntry failed");
}
return new Answer(cmd);
}
protected synchronized Answer execute(final RemoteAccessVpnCfgCommand cmd) {
String args = cmd.getRouterPrivateIpAddress();
if (cmd.isCreate()) {
args += " -r " + cmd.getIpRange();
args += " -p " + cmd.getPresharedKey();
args += " -s " + cmd.getVpnServerIp();
args += " -l " + cmd.getLocalIp();
args += " -c";
} else {
args += " -d";
}
String result = callHostPlugin("vmops", "lt2p_vpn", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "Configure VPN failed");
}
return new Answer(cmd);
}
protected Answer execute(final VmDataCommand cmd) {
String routerPrivateIpAddress = cmd.getRouterPrivateIpAddress();
String vmIpAddress = cmd.getVmIpAddress();
List<String[]> vmData = cmd.getVmData();
String[] vmDataArgs = new String[vmData.size() * 2 + 4];
vmDataArgs[0] = "routerIP";
vmDataArgs[1] = routerPrivateIpAddress;
vmDataArgs[2] = "vmIP";
vmDataArgs[3] = vmIpAddress;
int i = 4;
for (String[] vmDataEntry : vmData) {
String folder = vmDataEntry[0];
String file = vmDataEntry[1];
String contents = (vmDataEntry[2] != null) ? vmDataEntry[2] : "none";
vmDataArgs[i] = folder + "," + file;
vmDataArgs[i + 1] = contents;
i += 2;
}
String result = callHostPlugin("vmops", "vm_data", vmDataArgs);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "vm_data failed");
} else {
return new Answer(cmd);
}
}
protected Answer execute(final SavePasswordCommand cmd) {
final String password = cmd.getPassword();
final String routerPrivateIPAddress = cmd.getRouterPrivateIpAddress();
final String vmName = cmd.getVmName();
final String vmIpAddress = cmd.getVmIpAddress();
final String local = vmName;
// Run save_password_to_domr.sh
String args = "-r " + routerPrivateIPAddress;
args += " -v " + vmIpAddress;
args += " -p " + password;
args += " " + local;
String result = callHostPlugin("vmops", "savePassword", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "savePassword failed");
}
return new Answer(cmd);
}
protected void assignPublicIpAddress(final String vmName, final String privateIpAddress, final String publicIpAddress, final boolean add, final boolean firstIP,
final boolean sourceNat, final String vlanId, final String vlanGateway, final String vlanNetmask, final String vifMacAddress, String guestIp) throws InternalErrorException {
try {
Connection conn = getConnection();
VM router = getVM(conn, vmName);
// Determine the correct VIF on DomR to associate/disassociate the
// IP address with
VIF correctVif = getCorrectVif(router, vlanId);
// If we are associating an IP address and DomR doesn't have a VIF
// for the specified vlan ID, we need to add a VIF
// If we are disassociating the last IP address in the VLAN, we need
// to remove a VIF
boolean addVif = false;
boolean removeVif = false;
if (add && correctVif == null) {
addVif = true;
} else if (!add && firstIP) {
removeVif = true;
}
if (addVif) {
// Add a new VIF to DomR
String vifDeviceNum = getLowestAvailableVIFDeviceNum(router);
if (vifDeviceNum == null) {
throw new InternalErrorException("There were no more available slots for a new VIF on router: " + router.getNameLabel(conn));
}
correctVif = createVIF(conn, router, vifMacAddress, vlanId, 0, vifDeviceNum, true);
correctVif.plug(conn);
// Add iptables rule for network usage
networkUsage(privateIpAddress, "addVif", "eth" + correctVif.getDevice(conn));
}
if (correctVif == null) {
throw new InternalErrorException("Failed to find DomR VIF to associate/disassociate IP with.");
}
String args = null;
if (add) {
args = "-A";
} else {
args = "-D";
}
String cidrSize = Long.toString(NetUtils.getCidrSize(vlanNetmask));
if (sourceNat) {
args += " -f";
args += " -l ";
args += publicIpAddress + "/" + cidrSize;
} else if (firstIP) {
args += " -l ";
args += publicIpAddress + "/" + cidrSize;
} else {
args += " -l ";
args += publicIpAddress;
}
args += " -i ";
args += privateIpAddress;
args += " -c ";
args += "eth" + correctVif.getDevice(conn);
args += " -g ";
args += vlanGateway;
if(guestIp!=null){
args += " -G ";
args += guestIp;
}
String result = callHostPlugin("vmops", "ipassoc", "args", args);
if (result == null || result.isEmpty()) {
throw new InternalErrorException("Xen plugin \"ipassoc\" failed.");
}
if (removeVif) {
Network network = correctVif.getNetwork(conn);
// Mark this vif to be removed from network usage
networkUsage(privateIpAddress, "deleteVif", "eth" + correctVif.getDevice(conn));
// Remove the VIF from DomR
correctVif.unplug(conn);
correctVif.destroy(conn);
// Disable the VLAN network if necessary
disableVlanNetwork(network);
}
} catch (XenAPIException e) {
String msg = "Unable to assign public IP address due to " + e.toString();
s_logger.warn(msg, e);
throw new InternalErrorException(msg);
} catch (final XmlRpcException e) {
String msg = "Unable to assign public IP address due to " + e.getMessage();
s_logger.warn(msg, e);
throw new InternalErrorException(msg);
}
}
protected String networkUsage(final String privateIpAddress, final String option, final String vif) {
if (option.equals("get")) {
return "0:0";
}
return null;
}
protected Answer execute(final IPAssocCommand cmd) {
try {
assignPublicIpAddress(cmd.getRouterName(), cmd.getRouterIp(), cmd.getPublicIp(), cmd.isAdd(), cmd.isFirstIP(), cmd.isSourceNat(), cmd.getVlanId(),
cmd.getVlanGateway(), cmd.getVlanNetmask(), cmd.getVifMacAddress(), cmd.getGuestIp());
} catch (InternalErrorException e) {
return new Answer(cmd, false, e.getMessage());
}
return new Answer(cmd);
}
protected GetVncPortAnswer execute(GetVncPortCommand cmd) {
Connection conn = getConnection();
try {
Set<VM> vms = VM.getByNameLabel(conn, cmd.getName());
return new GetVncPortAnswer(cmd, getVncPort(vms.iterator().next()));
} catch (XenAPIException e) {
s_logger.warn("Unable to get vnc port " + e.toString(), e);
return new GetVncPortAnswer(cmd, e.toString());
} catch (Exception e) {
s_logger.warn("Unable to get vnc port ", e);
return new GetVncPortAnswer(cmd, e.getMessage());
}
}
protected Storage.StorageResourceType getStorageResourceType() {
return Storage.StorageResourceType.STORAGE_POOL;
}
protected CheckHealthAnswer execute(CheckHealthCommand cmd) {
boolean result = pingxenserver();
return new CheckHealthAnswer(cmd, result);
}
protected long[] getNetworkStats(String privateIP) {
String result = networkUsage(privateIP, "get", null);
long[] stats = new long[2];
if (result != null) {
String[] splitResult = result.split(":");
int i = 0;
while (i < splitResult.length - 1) {
stats[0] += (new Long(splitResult[i++])).longValue();
stats[1] += (new Long(splitResult[i++])).longValue();
}
}
return stats;
}
/**
* This is the method called for getting the HOST stats
*
* @param cmd
* @return
*/
protected GetHostStatsAnswer execute(GetHostStatsCommand cmd) {
// Connection conn = getConnection();
try {
HostStatsEntry hostStats = getHostStats(cmd, cmd.getHostGuid(), cmd.getHostId());
return new GetHostStatsAnswer(cmd, hostStats);
} catch (Exception e) {
String msg = "Unable to get Host stats" + e.toString();
s_logger.warn(msg, e);
return new GetHostStatsAnswer(cmd, null);
}
}
protected HostStatsEntry getHostStats(GetHostStatsCommand cmd, String hostGuid, long hostId) {
HostStatsEntry hostStats = new HostStatsEntry(hostId, 0, 0, 0, 0, "host", 0, 0, 0, 0);
Object[] rrdData = getRRDData(1); // call rrd method with 1 for host
if (rrdData == null) {
return null;
}
Integer numRows = (Integer) rrdData[0];
Integer numColumns = (Integer) rrdData[1];
Node legend = (Node) rrdData[2];
Node dataNode = (Node) rrdData[3];
NodeList legendChildren = legend.getChildNodes();
for (int col = 0; col < numColumns; col++) {
if (legendChildren == null || legendChildren.item(col) == null) {
continue;
}
String columnMetadata = getXMLNodeValue(legendChildren.item(col));
if (columnMetadata == null) {
continue;
}
String[] columnMetadataList = columnMetadata.split(":");
if (columnMetadataList.length != 4) {
continue;
}
String type = columnMetadataList[1];
String param = columnMetadataList[3];
if (type.equalsIgnoreCase("host")) {
if (param.contains("pif_eth0_rx")) {
hostStats.setNetworkReadKBs(getDataAverage(dataNode, col, numRows));
}
if (param.contains("pif_eth0_tx")) {
hostStats.setNetworkWriteKBs(getDataAverage(dataNode, col, numRows));
}
if (param.contains("memory_total_kib")) {
hostStats.setTotalMemoryKBs(getDataAverage(dataNode, col, numRows));
}
if (param.contains("memory_free_kib")) {
hostStats.setFreeMemoryKBs(getDataAverage(dataNode, col, numRows));
}
if (param.contains("cpu")) {
hostStats.setNumCpus(hostStats.getNumCpus() + 1);
hostStats.setCpuUtilization(hostStats.getCpuUtilization() + getDataAverage(dataNode, col, numRows));
}
if (param.contains("loadavg")) {
hostStats.setAverageLoad((hostStats.getAverageLoad() + getDataAverage(dataNode, col, numRows)));
}
}
}
// add the host cpu utilization
if (hostStats.getNumCpus() != 0) {
hostStats.setCpuUtilization(hostStats.getCpuUtilization() / hostStats.getNumCpus());
s_logger.debug("Host cpu utilization " + hostStats.getCpuUtilization());
}
return hostStats;
}
protected GetVmStatsAnswer execute(GetVmStatsCommand cmd) {
List<String> vmNames = cmd.getVmNames();
HashMap<String, VmStatsEntry> vmStatsNameMap = new HashMap<String, VmStatsEntry>();
if( vmNames.size() == 0 ) {
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
}
Connection conn = getConnection();
try {
// Determine the UUIDs of the requested VMs
List<String> vmUUIDs = new ArrayList<String>();
for (String vmName : vmNames) {
VM vm = getVM(conn, vmName);
vmUUIDs.add(vm.getUuid(conn));
}
HashMap<String, VmStatsEntry> vmStatsUUIDMap = getVmStats(cmd, vmUUIDs, cmd.getHostGuid());
if( vmStatsUUIDMap == null )
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
for (String vmUUID : vmStatsUUIDMap.keySet()) {
vmStatsNameMap.put(vmNames.get(vmUUIDs.indexOf(vmUUID)), vmStatsUUIDMap.get(vmUUID));
}
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
} catch (XenAPIException e) {
String msg = "Unable to get VM stats" + e.toString();
s_logger.warn(msg, e);
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
} catch (XmlRpcException e) {
String msg = "Unable to get VM stats" + e.getMessage();
s_logger.warn(msg, e);
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
}
}
protected HashMap<String, VmStatsEntry> getVmStats(GetVmStatsCommand cmd, List<String> vmUUIDs, String hostGuid) {
HashMap<String, VmStatsEntry> vmResponseMap = new HashMap<String, VmStatsEntry>();
for (String vmUUID : vmUUIDs) {
vmResponseMap.put(vmUUID, new VmStatsEntry(0, 0, 0, 0, "vm"));
}
Object[] rrdData = getRRDData(2); // call rrddata with 2 for vm
if (rrdData == null) {
return null;
}
Integer numRows = (Integer) rrdData[0];
Integer numColumns = (Integer) rrdData[1];
Node legend = (Node) rrdData[2];
Node dataNode = (Node) rrdData[3];
NodeList legendChildren = legend.getChildNodes();
for (int col = 0; col < numColumns; col++) {
if (legendChildren == null || legendChildren.item(col) == null) {
continue;
}
String columnMetadata = getXMLNodeValue(legendChildren.item(col));
if (columnMetadata == null) {
continue;
}
String[] columnMetadataList = columnMetadata.split(":");
if (columnMetadataList.length != 4) {
continue;
}
String type = columnMetadataList[1];
String uuid = columnMetadataList[2];
String param = columnMetadataList[3];
if (type.equals("vm") && vmResponseMap.keySet().contains(uuid)) {
VmStatsEntry vmStatsAnswer = vmResponseMap.get(uuid);
vmStatsAnswer.setEntityType("vm");
if (param.contains("cpu")) {
vmStatsAnswer.setNumCPUs(vmStatsAnswer.getNumCPUs() + 1);
vmStatsAnswer.setCPUUtilization((vmStatsAnswer.getCPUUtilization() + getDataAverage(dataNode, col, numRows))*100);
} else if (param.equals("vif_0_rx")) {
vmStatsAnswer.setNetworkReadKBs(getDataAverage(dataNode, col, numRows)/(8*2));
} else if (param.equals("vif_0_tx")) {
vmStatsAnswer.setNetworkWriteKBs(getDataAverage(dataNode, col, numRows)/(8*2));
}
}
}
for (String vmUUID : vmResponseMap.keySet()) {
VmStatsEntry vmStatsAnswer = vmResponseMap.get(vmUUID);
if (vmStatsAnswer.getNumCPUs() != 0) {
vmStatsAnswer.setCPUUtilization(vmStatsAnswer.getCPUUtilization() / vmStatsAnswer.getNumCPUs());
s_logger.debug("Vm cpu utilization " + vmStatsAnswer.getCPUUtilization());
}
}
return vmResponseMap;
}
protected Object[] getRRDData(int flag) {
/*
* Note: 1 => called from host, hence host stats 2 => called from vm, hence vm stats
*/
String stats = "";
try {
if (flag == 1)
stats = getHostStatsRawXML();
if (flag == 2)
stats = getVmStatsRawXML();
} catch (Exception e1) {
s_logger.warn("Error whilst collecting raw stats from plugin:" + e1);
return null;
}
// s_logger.debug("The raw xml stream is:"+stats);
// s_logger.debug("Length of raw xml is:"+stats.length());
//stats are null when the host plugin call fails (host down state)
if(stats == null)
return null;
StringReader statsReader = new StringReader(stats);
InputSource statsSource = new InputSource(statsReader);
Document doc = null;
try {
doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(statsSource);
} catch (Exception e) {
s_logger.warn("Exception caught whilst processing the document via document factory:"+e);
return null;
}
if(doc==null){
s_logger.warn("Null document found after tryinh to parse the stats source");
return null;
}
NodeList firstLevelChildren = doc.getChildNodes();
NodeList secondLevelChildren = (firstLevelChildren.item(0)).getChildNodes();
Node metaNode = secondLevelChildren.item(0);
Node dataNode = secondLevelChildren.item(1);
Integer numRows = 0;
Integer numColumns = 0;
Node legend = null;
NodeList metaNodeChildren = metaNode.getChildNodes();
for (int i = 0; i < metaNodeChildren.getLength(); i++) {
Node n = metaNodeChildren.item(i);
if (n.getNodeName().equals("rows")) {
numRows = Integer.valueOf(getXMLNodeValue(n));
} else if (n.getNodeName().equals("columns")) {
numColumns = Integer.valueOf(getXMLNodeValue(n));
} else if (n.getNodeName().equals("legend")) {
legend = n;
}
}
return new Object[] { numRows, numColumns, legend, dataNode };
}
protected String getXMLNodeValue(Node n) {
return n.getChildNodes().item(0).getNodeValue();
}
protected double getDataAverage(Node dataNode, int col, int numRows) {
double value = 0;
double dummy = 0;
int numRowsUsed = 0;
for (int row = 0; row < numRows; row++) {
Node data = dataNode.getChildNodes().item(numRows - 1 - row).getChildNodes().item(col + 1);
Double currentDataAsDouble = Double.valueOf(getXMLNodeValue(data));
if (!currentDataAsDouble.equals(Double.NaN)) {
numRowsUsed += 1;
value += currentDataAsDouble;
}
}
if(numRowsUsed == 0)
{
if((!Double.isInfinite(value))&&(!Double.isNaN(value)))
{
return value;
}
else
{
s_logger.warn("Found an invalid value (infinity/NaN) in getDataAverage(), numRows=0");
return dummy;
}
}
else
{
if((!Double.isInfinite(value/numRowsUsed))&&(!Double.isNaN(value/numRowsUsed)))
{
return (value/numRowsUsed);
}
else
{
s_logger.warn("Found an invalid value (infinity/NaN) in getDataAverage(), numRows>0");
return dummy;
}
}
}
protected String getHostStatsRawXML() {
Date currentDate = new Date();
String startTime = String.valueOf(currentDate.getTime() / 1000 - 1000);
return callHostPlugin("vmops", "gethostvmstats", "collectHostStats", String.valueOf("true"), "consolidationFunction", _consolidationFunction, "interval", String
.valueOf(_pollingIntervalInSeconds), "startTime", startTime);
}
protected String getVmStatsRawXML() {
Date currentDate = new Date();
String startTime = String.valueOf(currentDate.getTime() / 1000 - 1000);
return callHostPlugin("vmops", "gethostvmstats", "collectHostStats", String.valueOf("false"), "consolidationFunction", _consolidationFunction, "interval", String
.valueOf(_pollingIntervalInSeconds), "startTime", startTime);
}
protected void recordWarning(final VM vm, final String message, final Throwable e) {
Connection conn = getConnection();
final StringBuilder msg = new StringBuilder();
try {
final Long domId = vm.getDomid(conn);
msg.append("[").append(domId != null ? domId : -1l).append("] ");
} catch (final BadServerResponse e1) {
} catch (final XmlRpcException e1) {
} catch (XenAPIException e1) {
}
msg.append(message);
}
protected State convertToState(Types.VmPowerState ps) {
final State state = s_statesTable.get(ps);
return state == null ? State.Unknown : state;
}
protected HashMap<String, State> getAllVms() {
final HashMap<String, State> vmStates = new HashMap<String, State>();
Connection conn = getConnection();
Set<VM> vms = null;
for (int i = 0; i < 2; i++) {
try {
Host host = Host.getByUuid(conn, _host.uuid);
vms = host.getResidentVMs(conn);
break;
} catch (final Throwable e) {
s_logger.warn("Unable to get vms", e);
}
try {
Thread.sleep(1000);
} catch (final InterruptedException ex) {
}
}
if (vms == null) {
return null;
}
for (VM vm : vms) {
VM.Record record = null;
for (int i = 0; i < 2; i++) {
try {
record = vm.getRecord(conn);
break;
} catch (XenAPIException e1) {
s_logger.debug("VM.getRecord failed on host:" + _host.uuid + " due to " + e1.toString());
} catch (XmlRpcException e1) {
s_logger.debug("VM.getRecord failed on host:" + _host.uuid + " due to " + e1.getMessage());
}
try {
Thread.sleep(1000);
} catch (final InterruptedException ex) {
}
}
if (record == null) {
continue;
}
if (record.isControlDomain || record.isASnapshot || record.isATemplate) {
continue; // Skip DOM0
}
VmPowerState ps = record.powerState;
final State state = convertToState(ps);
if (s_logger.isTraceEnabled()) {
s_logger.trace("VM " + record.nameLabel + ": powerstate = " + ps + "; vm state=" + state.toString());
}
vmStates.put(record.nameLabel, state);
}
return vmStates;
}
protected State getVmState(final String vmName) {
Connection conn = getConnection();
int retry = 3;
while (retry-- > 0) {
try {
Set<VM> vms = VM.getByNameLabel(conn, vmName);
for (final VM vm : vms) {
return convertToState(vm.getPowerState(conn));
}
} catch (final BadServerResponse e) {
// There is a race condition within xen such that if a vm is
// deleted and we
// happen to ask for it, it throws this stupid response. So
// if this happens,
// we take a nap and try again which then avoids the race
// condition because
// the vm's information is now cleaned up by xen. The error
// is as follows
// com.xensource.xenapi.Types$BadServerResponse
// [HANDLE_INVALID, VM,
// 3dde93f9-c1df-55a7-2cde-55e1dce431ab]
s_logger.info("Unable to get a vm PowerState due to " + e.toString() + ". We are retrying. Count: " + retry);
try {
Thread.sleep(3000);
} catch (final InterruptedException ex) {
}
} catch (XenAPIException e) {
String msg = "Unable to get a vm PowerState due to " + e.toString();
s_logger.warn(msg, e);
break;
} catch (final XmlRpcException e) {
String msg = "Unable to get a vm PowerState due to " + e.getMessage();
s_logger.warn(msg, e);
break;
}
}
return State.Stopped;
}
protected CheckVirtualMachineAnswer execute(final CheckVirtualMachineCommand cmd) {
final String vmName = cmd.getVmName();
final State state = getVmState(vmName);
Integer vncPort = null;
if (state == State.Running) {
synchronized (_vms) {
_vms.put(vmName, State.Running);
}
}
return new CheckVirtualMachineAnswer(cmd, state, vncPort);
}
protected PrepareForMigrationAnswer execute(final PrepareForMigrationCommand cmd) {
/*
*
* String result = null;
*
* List<VolumeVO> vols = cmd.getVolumes(); result = mountwithoutvdi(vols, cmd.getMappings()); if (result !=
* null) { return new PrepareForMigrationAnswer(cmd, false, result); }
*/
final String vmName = cmd.getVmName();
try {
Connection conn = getConnection();
Set<Host> hosts = Host.getAll(conn);
// workaround before implementing xenserver pool
// no migration
if (hosts.size() <= 1) {
return new PrepareForMigrationAnswer(cmd, false, "not in a same xenserver pool");
}
// if the vm have CD
// 1. make iosSR shared
// 2. create pbd in target xenserver
SR sr = getISOSRbyVmName(cmd.getVmName());
if (sr != null) {
Set<PBD> pbds = sr.getPBDs(conn);
boolean found = false;
for (PBD pbd : pbds) {
if (Host.getByUuid(conn, _host.uuid).equals(pbd.getHost(conn))) {
found = true;
break;
}
}
if (!found) {
sr.setShared(conn, true);
PBD pbd = pbds.iterator().next();
PBD.Record pbdr = new PBD.Record();
pbdr.deviceConfig = pbd.getDeviceConfig(conn);
pbdr.host = Host.getByUuid(conn, _host.uuid);
pbdr.SR = sr;
PBD newpbd = PBD.create(conn, pbdr);
newpbd.plug(conn);
}
}
Set<VM> vms = VM.getByNameLabel(conn, vmName);
if (vms.size() != 1) {
String msg = "There are " + vms.size() + " " + vmName;
s_logger.warn(msg);
return new PrepareForMigrationAnswer(cmd, false, msg);
}
VM vm = vms.iterator().next();
// check network
Set<VIF> vifs = vm.getVIFs(conn);
for (VIF vif : vifs) {
Network network = vif.getNetwork(conn);
Set<PIF> pifs = network.getPIFs(conn);
long vlan = -1;
PIF npif = null;
for (PIF pif : pifs) {
try {
vlan = pif.getVLAN(conn);
if (vlan != -1 ) {
VLAN vland = pif.getVLANMasterOf(conn);
npif = vland.getTaggedPIF(conn);
}
break;
}catch (Exception e) {
continue;
}
}
if (npif == null) {
continue;
}
network = npif.getNetwork(conn);
String nwuuid = network.getUuid(conn);
String pifuuid = null;
if(nwuuid.equalsIgnoreCase(_host.privateNetwork)) {
pifuuid = _host.privatePif;
} else if(nwuuid.equalsIgnoreCase(_host.publicNetwork)) {
pifuuid = _host.publicPif;
} else {
continue;
}
Network vlanNetwork = enableVlanNetwork(vlan, pifuuid);
if (vlanNetwork == null) {
throw new InternalErrorException("Failed to enable VLAN network with tag: " + vlan);
}
}
synchronized (_vms) {
_vms.put(cmd.getVmName(), State.Migrating);
}
return new PrepareForMigrationAnswer(cmd, true, null);
} catch (Exception e) {
String msg = "catch exception " + e.getMessage();
s_logger.warn(msg, e);
return new PrepareForMigrationAnswer(cmd, false, msg);
}
}
@Override
public DownloadAnswer execute(final PrimaryStorageDownloadCommand cmd) {
SR tmpltsr = null;
String tmplturl = cmd.getUrl();
int index = tmplturl.lastIndexOf("/");
String mountpoint = tmplturl.substring(0, index);
String tmpltname = null;
if (index < tmplturl.length() - 1)
tmpltname = tmplturl.substring(index + 1).replace(".vhd", "");
try {
Connection conn = getConnection();
String pUuid = cmd.getPoolUuid();
SR poolsr = null;
Set<SR> srs = SR.getByNameLabel(conn, pUuid);
if (srs.size() != 1) {
String msg = "There are " + srs.size() + " SRs with same name: " + pUuid;
s_logger.warn(msg);
return new DownloadAnswer(null, 0, msg, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR, "", "", 0);
} else {
poolsr = srs.iterator().next();
}
/* Does the template exist in primary storage pool? If yes, no copy */
VDI vmtmpltvdi = null;
VDI snapshotvdi = null;
Set<VDI> vdis = VDI.getByNameLabel(conn, "Template " + cmd.getName());
for (VDI vdi : vdis) {
VDI.Record vdir = vdi.getRecord(conn);
if (vdir.SR.equals(poolsr)) {
vmtmpltvdi = vdi;
break;
}
}
String uuid;
if (vmtmpltvdi == null) {
tmpltsr = createNfsSRbyURI(new URI(mountpoint), false);
tmpltsr.scan(conn);
VDI tmpltvdi = null;
if (tmpltname != null) {
tmpltvdi = getVDIbyUuid(tmpltname);
}
if (tmpltvdi == null) {
vdis = tmpltsr.getVDIs(conn);
for (VDI vdi : vdis) {
tmpltvdi = vdi;
break;
}
}
if (tmpltvdi == null) {
String msg = "Unable to find template vdi on secondary storage" + "host:" + _host.uuid + "pool: " + tmplturl;
s_logger.warn(msg);
return new DownloadAnswer(null, 0, msg, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR, "", "", 0);
}
vmtmpltvdi = cloudVDIcopy(tmpltvdi, poolsr);
snapshotvdi = vmtmpltvdi.snapshot(conn, new HashMap<String, String>());
vmtmpltvdi.destroy(conn);
snapshotvdi.setNameLabel(conn, "Template " + cmd.getName());
// vmtmpltvdi.setNameDescription(conn, cmd.getDescription());
uuid = snapshotvdi.getUuid(conn);
vmtmpltvdi = snapshotvdi;
} else
uuid = vmtmpltvdi.getUuid(conn);
// Determine the size of the template
long phySize = vmtmpltvdi.getPhysicalUtilisation(conn);
DownloadAnswer answer = new DownloadAnswer(null, 100, cmd, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOADED, uuid, uuid);
answer.setTemplateSize(phySize);
return answer;
} catch (XenAPIException e) {
String msg = "XenAPIException:" + e.toString() + "host:" + _host.uuid + "pool: " + tmplturl;
s_logger.warn(msg, e);
return new DownloadAnswer(null, 0, msg, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR, "", "", 0);
} catch (Exception e) {
String msg = "XenAPIException:" + e.getMessage() + "host:" + _host.uuid + "pool: " + tmplturl;
s_logger.warn(msg, e);
return new DownloadAnswer(null, 0, msg, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR, "", "", 0);
} finally {
removeSR(tmpltsr);
}
}
protected String removeSRSync(SR sr) {
if (sr == null) {
return null;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "Removing SR"));
}
Connection conn = getConnection();
long waittime = 0;
try {
Set<VDI> vdis = sr.getVDIs(conn);
for (VDI vdi : vdis) {
Map<java.lang.String, Types.VdiOperations> currentOperation = vdi.getCurrentOperations(conn);
if (currentOperation == null || currentOperation.size() == 0) {
continue;
}
if (waittime >= 1800000) {
String msg = "This template is being used, try late time";
s_logger.warn(msg);
return msg;
}
waittime += 30000;
try {
Thread.sleep(30000);
} catch (final InterruptedException ex) {
}
}
removeSR(sr);
return null;
} catch (XenAPIException e) {
s_logger.warn(logX(sr, "Unable to get current opertions " + e.toString()), e);
} catch (XmlRpcException e) {
s_logger.warn(logX(sr, "Unable to get current opertions " + e.getMessage()), e);
}
String msg = "Remove SR failed";
s_logger.warn(msg);
return msg;
}
protected void removeSR(SR sr) {
if (sr == null) {
return;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "Removing SR"));
}
for (int i = 0; i < 2; i++) {
Connection conn = getConnection();
try {
Set<VDI> vdis = sr.getVDIs(conn);
for (VDI vdi : vdis) {
vdi.forget(conn);
}
Set<PBD> pbds = sr.getPBDs(conn);
for (PBD pbd : pbds) {
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(pbd, "Unplugging pbd"));
}
if (pbd.getCurrentlyAttached(conn)) {
pbd.unplug(conn);
}
pbd.destroy(conn);
}
pbds = sr.getPBDs(conn);
if (pbds.size() == 0) {
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "Forgetting"));
}
sr.forget(conn);
return;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "There are still pbd attached"));
if (s_logger.isTraceEnabled()) {
for (PBD pbd : pbds) {
s_logger.trace(logX(pbd, " Still attached"));
}
}
}
} catch (XenAPIException e) {
s_logger.debug(logX(sr, "Catch XenAPIException: " + e.toString()));
} catch (XmlRpcException e) {
s_logger.debug(logX(sr, "Catch Exception: " + e.getMessage()));
}
}
s_logger.warn(logX(sr, "Unable to remove SR"));
}
protected MigrateAnswer execute(final MigrateCommand cmd) {
final String vmName = cmd.getVmName();
State state = null;
synchronized (_vms) {
state = _vms.get(vmName);
_vms.put(vmName, State.Stopping);
}
try {
Connection conn = getConnection();
Set<VM> vms = VM.getByNameLabel(conn, vmName);
String ipaddr = cmd.getDestinationIp();
Set<Host> hosts = Host.getAll(conn);
Host dsthost = null;
for (Host host : hosts) {
if (host.getAddress(conn).equals(ipaddr)) {
dsthost = host;
break;
}
}
// if it is windows, we will not fake it is migrateable,
// windows requires PV driver to migrate
for (VM vm : vms) {
if (!cmd.isWindows()) {
String uuid = vm.getUuid(conn);
String result = callHostPlugin("vmops", "preparemigration", "uuid", uuid);
if (result == null || result.isEmpty()) {
return new MigrateAnswer(cmd, false, "migration failed", null);
}
// check if pv version is successfully set up
int i = 0;
for (; i < 20; i++) {
try {
Thread.sleep(1000);
} catch (final InterruptedException ex) {
}
VMGuestMetrics vmmetric = vm.getGuestMetrics(conn);
if (isRefNull(vmmetric))
continue;
Map<String, String> PVversion = vmmetric.getPVDriversVersion(conn);
if (PVversion != null && PVversion.containsKey("major")) {
break;
}
}
if (i >= 20) {
String msg = "migration failed due to can not fake PV driver for " + vmName;
s_logger.warn(msg);
return new MigrateAnswer(cmd, false, msg, null);
}
}
final Map<String, String> options = new HashMap<String, String>();
vm.poolMigrate(conn, dsthost, options);
state = State.Stopping;
}
return new MigrateAnswer(cmd, true, "migration succeeded", null);
} catch (XenAPIException e) {
String msg = "migration failed due to " + e.toString();
s_logger.warn(msg, e);
return new MigrateAnswer(cmd, false, msg, null);
} catch (XmlRpcException e) {
String msg = "migration failed due to " + e.getMessage();
s_logger.warn(msg, e);
return new MigrateAnswer(cmd, false, msg, null);
} finally {
synchronized (_vms) {
_vms.put(vmName, state);
}
}
}
protected State getRealPowerState(String label) {
Connection conn = getConnection();
int i = 0;
s_logger.trace("Checking on the HALTED State");
for (; i < 20; i++) {
try {
Set<VM> vms = VM.getByNameLabel(conn, label);
if (vms == null || vms.size() == 0) {
continue;
}
VM vm = vms.iterator().next();
VmPowerState vps = vm.getPowerState(conn);
if (vps != null && vps != VmPowerState.HALTED && vps != VmPowerState.UNKNOWN && vps != VmPowerState.UNRECOGNIZED) {
return convertToState(vps);
}
} catch (XenAPIException e) {
String msg = "Unable to get real power state due to " + e.toString();
s_logger.warn(msg, e);
} catch (XmlRpcException e) {
String msg = "Unable to get real power state due to " + e.getMessage();
s_logger.warn(msg, e);
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
return State.Stopped;
}
protected Pair<VM, VM.Record> getControlDomain(Connection conn) throws XenAPIException, XmlRpcException {
Host host = Host.getByUuid(conn, _host.uuid);
Set<VM> vms = null;
vms = host.getResidentVMs(conn);
for (VM vm : vms) {
if (vm.getIsControlDomain(conn)) {
return new Pair<VM, VM.Record>(vm, vm.getRecord(conn));
}
}
throw new CloudRuntimeException("Com'on no control domain? What the crap?!#@!##$@");
}
protected HashMap<String, State> sync() {
HashMap<String, State> newStates;
HashMap<String, State> oldStates = null;
final HashMap<String, State> changes = new HashMap<String, State>();
synchronized (_vms) {
newStates = getAllVms();
if (newStates == null) {
s_logger.debug("Unable to get the vm states so no state sync at this point.");
return null;
}
oldStates = new HashMap<String, State>(_vms.size());
oldStates.putAll(_vms);
for (final Map.Entry<String, State> entry : newStates.entrySet()) {
final String vm = entry.getKey();
State newState = entry.getValue();
final State oldState = oldStates.remove(vm);
if (newState == State.Stopped && oldState != State.Stopping && oldState != null && oldState != State.Stopped) {
newState = getRealPowerState(vm);
}
if (s_logger.isTraceEnabled()) {
s_logger.trace("VM " + vm + ": xen has state " + newState + " and we have state " + (oldState != null ? oldState.toString() : "null"));
}
if (vm.startsWith("migrating")) {
s_logger.debug("Migrating from xen detected. Skipping");
continue;
}
if (oldState == null) {
_vms.put(vm, newState);
s_logger.debug("Detecting a new state but couldn't find a old state so adding it to the changes: " + vm);
changes.put(vm, newState);
} else if (oldState == State.Starting) {
if (newState == State.Running) {
_vms.put(vm, newState);
} else if (newState == State.Stopped) {
s_logger.debug("Ignoring vm " + vm + " because of a lag in starting the vm.");
}
} else if (oldState == State.Migrating) {
if (newState == State.Running) {
s_logger.debug("Detected that an migrating VM is now running: " + vm);
_vms.put(vm, newState);
}
} else if (oldState == State.Stopping) {
if (newState == State.Stopped) {
_vms.put(vm, newState);
} else if (newState == State.Running) {
s_logger.debug("Ignoring vm " + vm + " because of a lag in stopping the vm. ");
}
} else if (oldState != newState) {
_vms.put(vm, newState);
if (newState == State.Stopped) {
/*
* if (_vmsKilled.remove(vm)) { s_logger.debug("VM " + vm + " has been killed for storage. ");
* newState = State.Error; }
*/
}
changes.put(vm, newState);
}
}
for (final Map.Entry<String, State> entry : oldStates.entrySet()) {
final String vm = entry.getKey();
final State oldState = entry.getValue();
if (s_logger.isTraceEnabled()) {
s_logger.trace("VM " + vm + " is now missing from xen so reporting stopped");
}
if (oldState == State.Stopping) {
s_logger.debug("Ignoring VM " + vm + " in transition state stopping.");
_vms.remove(vm);
} else if (oldState == State.Starting) {
s_logger.debug("Ignoring VM " + vm + " in transition state starting.");
} else if (oldState == State.Stopped) {
_vms.remove(vm);
} else if (oldState == State.Migrating) {
s_logger.debug("Ignoring VM " + vm + " in migrating state.");
} else {
State state = State.Stopped;
/*
* if (_vmsKilled.remove(entry.getKey())) { s_logger.debug("VM " + vm +
* " has been killed by storage monitor"); state = State.Error; }
*/
changes.put(entry.getKey(), state);
}
}
}
return changes;
}
protected ReadyAnswer execute(ReadyCommand cmd) {
Long dcId = cmd.getDataCenterId();
// Ignore the result of the callHostPlugin. Even if unmounting the
// snapshots dir fails, let Ready command
// succeed.
callHostPlugin("vmopsSnapshot", "unmountSnapshotsDir", "dcId", dcId.toString());
return new ReadyAnswer(cmd);
}
//
// using synchronized on VM name in the caller does not prevent multiple
// commands being sent against
// the same VM, there will be a race condition here in finally clause and
// the main block if
// there are multiple requests going on
//
// Therefore, a lazy solution is to add a synchronized guard here
protected int getVncPort(VM vm) {
Connection conn = getConnection();
VM.Record record;
try {
record = vm.getRecord(conn);
} catch (XenAPIException e) {
String msg = "Unable to get vnc-port due to " + e.toString();
s_logger.warn(msg, e);
return -1;
} catch (XmlRpcException e) {
String msg = "Unable to get vnc-port due to " + e.getMessage();
s_logger.warn(msg, e);
return -1;
}
String hvm = "true";
if (record.HVMBootPolicy.isEmpty()) {
hvm = "false";
}
String vncport = callHostPlugin("vmops", "getvncport", "domID", record.domid.toString(), "hvm", hvm);
if (vncport == null || vncport.isEmpty()) {
return -1;
}
vncport = vncport.replace("\n", "");
return NumbersUtil.parseInt(vncport, -1);
}
protected Answer execute(final RebootCommand cmd) {
synchronized (_vms) {
_vms.put(cmd.getVmName(), State.Starting);
}
try {
Connection conn = getConnection();
Set<VM> vms = null;
try {
vms = VM.getByNameLabel(conn, cmd.getVmName());
} catch (XenAPIException e0) {
s_logger.debug("getByNameLabel failed " + e0.toString());
return new RebootAnswer(cmd, "getByNameLabel failed " + e0.toString());
} catch (Exception e0) {
s_logger.debug("getByNameLabel failed " + e0.getMessage());
return new RebootAnswer(cmd, "getByNameLabel failed");
}
for (VM vm : vms) {
try {
vm.cleanReboot(conn);
} catch (XenAPIException e) {
s_logger.debug("Do Not support Clean Reboot, fall back to hard Reboot: " + e.toString());
try {
vm.hardReboot(conn);
} catch (XenAPIException e1) {
s_logger.debug("Caught exception on hard Reboot " + e1.toString());
return new RebootAnswer(cmd, "reboot failed: " + e1.toString());
} catch (XmlRpcException e1) {
s_logger.debug("Caught exception on hard Reboot " + e1.getMessage());
return new RebootAnswer(cmd, "reboot failed");
}
} catch (XmlRpcException e) {
String msg = "Clean Reboot failed due to " + e.getMessage();
s_logger.warn(msg, e);
return new RebootAnswer(cmd, msg);
}
}
return new RebootAnswer(cmd, "reboot succeeded", null, null);
} finally {
synchronized (_vms) {
_vms.put(cmd.getVmName(), State.Running);
}
}
}
protected Answer execute(RebootRouterCommand cmd) {
Long bytesSent = 0L;
Long bytesRcvd = 0L;
if (VirtualMachineName.isValidRouterName(cmd.getVmName())) {
long[] stats = getNetworkStats(cmd.getPrivateIpAddress());
bytesSent = stats[0];
bytesRcvd = stats[1];
}
RebootAnswer answer = (RebootAnswer) execute((RebootCommand) cmd);
answer.setBytesSent(bytesSent);
answer.setBytesReceived(bytesRcvd);
if (answer.getResult()) {
String cnct = connect(cmd.getVmName(), cmd.getPrivateIpAddress());
networkUsage(cmd.getPrivateIpAddress(), "create", null);
if (cnct == null) {
return answer;
} else {
return new Answer(cmd, false, cnct);
}
}
return answer;
}
protected VM createVmFromTemplate(Connection conn, StartCommand cmd) throws XenAPIException, XmlRpcException {
Set<VM> templates;
VM vm = null;
String stdType = cmd.getGuestOSDescription();
String guestOsTypeName = getGuestOsType(stdType);
templates = VM.getByNameLabel(conn, guestOsTypeName);
assert templates.size() == 1 : "Should only have 1 template but found " + templates.size();
VM template = templates.iterator().next();
vm = template.createClone(conn, cmd.getVmName());
vm.removeFromOtherConfig(conn, "disks");
if (!(guestOsTypeName.startsWith("Windows") || guestOsTypeName.startsWith("Citrix") || guestOsTypeName.startsWith("Other"))) {
if (cmd.getBootFromISO())
vm.setPVBootloader(conn, "eliloader");
else
vm.setPVBootloader(conn, "pygrub");
vm.addToOtherConfig(conn, "install-repository", "cdrom");
}
return vm;
}
public boolean joinPool(String masterIp, String username, String password) {
Connection hostConn = null;
Connection poolConn = null;
Session hostSession = null;
URL hostUrl = null;
try {
// Connect and find out about the new connection to the new pool.
poolConn = _connPool.masterConnect(masterIp, username, password);
Set<Pool> pools = Pool.getAll(poolConn);
Pool pool = pools.iterator().next();
String poolUUID = pool.getUuid(poolConn);
//check if this host is already in pool
Set<Host> hosts = Host.getAll(poolConn);
for( Host host : hosts ) {
if(host.getAddress(poolConn).equals(_host.ip)) {
_host.pool = poolUUID;
return true;
}
}
hostUrl = new URL("http://" + _host.ip);
hostConn = new Connection(hostUrl, 100);
hostSession = Session.loginWithPassword(hostConn, _username, _password, APIVersion.latest().toString());
// Now join it.
Pool.join(hostConn, masterIp, username, password);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Joined the pool at " + masterIp);
}
try {
// slave will restart xapi in 10 sec
Thread.sleep(10000);
} catch (InterruptedException e) {
}
// check if the master of this host is set correctly.
Connection c = new Connection(hostUrl, 100);
int i;
for (i = 0 ; i < 15; i++) {
try {
Session.loginWithPassword(c, _username, _password, APIVersion.latest().toString());
s_logger.debug(_host.ip + " is still master, waiting for the conversion to the slave");
Session.logout(c);
c.dispose();
} catch (Types.HostIsSlave e) {
try {
Session.logout(c);
c.dispose();
} catch (XmlRpcException e1) {
s_logger.debug("Unable to logout of test connection due to " + e1.getMessage());
} catch (XenAPIException e1) {
s_logger.debug("Unable to logout of test connection due to " + e1.getMessage());
}
break;
} catch (XmlRpcException e) {
s_logger.debug("XmlRpcException: Still waiting for the conversion to the master");
} catch (Exception e) {
s_logger.debug("Exception: Still waiting for the conversion to the master");
}
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
}
}
if( i >= 15 ) {
throw new CloudRuntimeException(_host.ip + " didn't change to slave after waiting 30 secondary");
}
_host.pool = poolUUID;
return true;
} catch (MalformedURLException e) {
throw new CloudRuntimeException("Problem with url " + _host.ip);
} catch (XenAPIException e) {
String msg = "Unable to allow host " + _host.uuid
+ " to join pool " + masterIp + " due to " + e.toString();
s_logger.warn(msg, e);
throw new RuntimeException(msg);
} catch (XmlRpcException e) {
String msg = "Unable to allow host " + _host.uuid
+ " to join pool " + masterIp + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new RuntimeException(msg);
} finally {
if (poolConn != null) {
try {
Session.logout(poolConn);
} catch (Exception e) {
}
poolConn.dispose();
}
if(hostSession != null) {
try {
Session.logout(hostConn);
} catch (Exception e) {
}
}
}
}
protected void startvmfailhandle(VM vm, List<Ternary<SR, VDI, VolumeVO>> mounts) {
Connection conn = getConnection();
if (vm != null) {
try {
if (vm.getPowerState(conn) == VmPowerState.RUNNING) {
try {
vm.hardShutdown(conn);
} catch (Exception e) {
String msg = "VM hardshutdown failed due to " + e.toString();
s_logger.warn(msg);
}
}
if (vm.getPowerState(conn) == VmPowerState.HALTED) {
try {
vm.destroy(conn);
} catch (Exception e) {
String msg = "VM destroy failed due to " + e.toString();
s_logger.warn(msg);
}
}
} catch (Exception e) {
String msg = "VM getPowerState failed due to " + e.toString();
s_logger.warn(msg);
}
}
if (mounts != null) {
for (Ternary<SR, VDI, VolumeVO> mount : mounts) {
VDI vdi = mount.second();
Set<VBD> vbds = null;
try {
vbds = vdi.getVBDs(conn);
} catch (Exception e) {
String msg = "VDI getVBDS failed due to " + e.toString();
s_logger.warn(msg);
continue;
}
for (VBD vbd : vbds) {
try {
vbd.unplug(conn);
vbd.destroy(conn);
} catch (Exception e) {
String msg = "VBD destroy failed due to " + e.toString();
s_logger.warn(msg);
}
}
}
}
}
protected void setMemory(Connection conn, VM vm, long memsize) throws XmlRpcException, XenAPIException {
vm.setMemoryStaticMin(conn, memsize);
vm.setMemoryDynamicMin(conn, memsize);
vm.setMemoryDynamicMax(conn, memsize);
vm.setMemoryStaticMax(conn, memsize);
}
protected StartAnswer execute(StartCommand cmd) {
State state = State.Stopped;
Connection conn = getConnection();
VM vm = null;
SR isosr = null;
List<Ternary<SR, VDI, VolumeVO>> mounts = null;
for (int retry = 0; retry < 2; retry++) {
try {
synchronized (_vms) {
_vms.put(cmd.getVmName(), State.Starting);
}
List<VolumeVO> vols = cmd.getVolumes();
mounts = mount(vols);
if (retry == 1) {
// at the second time, try hvm
cmd.setGuestOSDescription("Other install media");
}
vm = createVmFromTemplate(conn, cmd);
long memsize = cmd.getRamSize() * 1024L * 1024L;
setMemory(conn, vm, memsize);
vm.setIsATemplate(conn, false);
vm.setVCPUsMax(conn, (long) cmd.getCpu());
vm.setVCPUsAtStartup(conn, (long) cmd.getCpu());
Host host = Host.getByUuid(conn, _host.uuid);
vm.setAffinity(conn, host);
Map<String, String> vcpuparam = new HashMap<String, String>();
vcpuparam.put("weight", Integer.toString(cmd.getCpuWeight()));
vcpuparam.put("cap", Integer.toString(cmd.getUtilization()));
vm.setVCPUsParams(conn, vcpuparam);
boolean bootFromISO = cmd.getBootFromISO();
/* create root VBD */
VBD.Record vbdr = new VBD.Record();
Ternary<SR, VDI, VolumeVO> mount = mounts.get(0);
vbdr.VM = vm;
vbdr.VDI = mount.second();
vbdr.bootable = !bootFromISO;
vbdr.userdevice = "0";
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
VBD.create(conn, vbdr);
/* create data VBDs */
for (int i = 1; i < mounts.size(); i++) {
mount = mounts.get(i);
// vdi.setNameLabel(conn, cmd.getVmName() + "-DATA");
vbdr.VM = vm;
vbdr.VDI = mount.second();
vbdr.bootable = false;
vbdr.userdevice = Long.toString(mount.third().getDeviceId());
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
vbdr.unpluggable = true;
VBD.create(conn, vbdr);
}
/* create CD-ROM VBD */
VBD.Record cdromVBDR = new VBD.Record();
cdromVBDR.VM = vm;
cdromVBDR.empty = true;
cdromVBDR.bootable = bootFromISO;
cdromVBDR.userdevice = "3";
cdromVBDR.mode = Types.VbdMode.RO;
cdromVBDR.type = Types.VbdType.CD;
VBD cdromVBD = VBD.create(conn, cdromVBDR);
/* insert the ISO VDI if isoPath is not null */
String isopath = cmd.getISOPath();
if (isopath != null) {
int index = isopath.lastIndexOf("/");
String mountpoint = isopath.substring(0, index);
URI uri = new URI(mountpoint);
isosr = createIsoSRbyURI(uri, cmd.getVmName(), false);
String isoname = isopath.substring(index + 1);
VDI isovdi = getVDIbyLocationandSR(isoname, isosr);
if (isovdi == null) {
String msg = " can not find ISO " + cmd.getISOPath();
s_logger.warn(msg);
return new StartAnswer(cmd, msg);
} else {
cdromVBD.insert(conn, isovdi);
}
}
createVIF(conn, vm, cmd.getGuestMacAddress(), cmd.getGuestNetworkId(), cmd.getNetworkRateMbps(), "0", false);
if (cmd.getExternalMacAddress() != null && cmd.getExternalVlan() != null) {
createVIF(conn, vm, cmd.getExternalMacAddress(), cmd.getExternalVlan(), 0, "1", true);
}
/* set action after crash as destroy */
vm.setActionsAfterCrash(conn, Types.OnCrashBehaviour.DESTROY);
vm.start(conn, false, true);
if (_canBridgeFirewall) {
String result = callHostPlugin("vmops", "default_network_rules",
"vmName", cmd.getVmName(),
"vmIP", cmd.getGuestIpAddress(),
"vmMAC", cmd.getGuestMacAddress(),
"vmID", Long.toString(cmd.getId()));
if (result == null || result.isEmpty() || !Boolean.parseBoolean(result)) {
s_logger.warn("Failed to program default network rules for vm " + cmd.getVmName());
} else {
s_logger.info("Programmed default network rules for vm " + cmd.getVmName());
}
}
state = State.Running;
return new StartAnswer(cmd);
} catch (XenAPIException e) {
String errormsg = e.toString();
String msg = "Exception caught while starting VM due to message:" + errormsg + " (" + e.getClass().getName() + ")";
if (!errormsg.contains("Unable to find partition containing kernel") && !errormsg.contains("Unable to access a required file in the specified repository")) {
s_logger.warn(msg, e);
startvmfailhandle(vm, mounts);
removeSR(isosr);
} else {
startvmfailhandle(vm, mounts);
removeSR(isosr);
continue;
}
state = State.Stopped;
return new StartAnswer(cmd, msg);
} catch (Exception e) {
String msg = "Exception caught while starting VM due to message:" + e.getMessage();
s_logger.warn(msg, e);
startvmfailhandle(vm, mounts);
removeSR(isosr);
state = State.Stopped;
return new StartAnswer(cmd, msg);
} finally {
synchronized (_vms) {
_vms.put(cmd.getVmName(), state);
}
}
}
String msg = "Start VM failed";
return new StartAnswer(cmd, msg);
}
protected VIF createVIF(Connection conn, VM vm, String mac, int rate, String devNum, Network network) throws XenAPIException, XmlRpcException,
InternalErrorException {
VIF.Record vifr = new VIF.Record();
vifr.VM = vm;
vifr.device = devNum;
vifr.MAC = mac;
vifr.network = network;
if ( rate == 0 ) rate = 200;
vifr.qosAlgorithmType = "ratelimit";
vifr.qosAlgorithmParams = new HashMap<String, String>();
// convert mbs to kilobyte per second
vifr.qosAlgorithmParams.put("kbps", Integer.toString(rate * 128));
return VIF.create(conn, vifr);
}
protected VIF createVIF(Connection conn, VM vm, String mac, String vlanTag, int rate, String devNum, boolean isPub) throws XenAPIException, XmlRpcException,
InternalErrorException {
String nwUuid = (isPub ? _host.publicNetwork : _host.guestNetwork);
String pifUuid = (isPub ? _host.publicPif : _host.guestPif);
Network vlanNetwork = null;
if ("untagged".equalsIgnoreCase(vlanTag)) {
vlanNetwork = Network.getByUuid(conn, nwUuid);
} else {
vlanNetwork = enableVlanNetwork(Long.valueOf(vlanTag), pifUuid);
}
if (vlanNetwork == null) {
throw new InternalErrorException("Failed to enable VLAN network with tag: " + vlanTag);
}
return createVIF(conn, vm, mac, rate, devNum, vlanNetwork);
}
protected StopAnswer execute(final StopCommand cmd) {
String vmName = cmd.getVmName();
try {
Connection conn = getConnection();
Set<VM> vms = VM.getByNameLabel(conn, vmName);
// stop vm which is running on this host or is in halted state
for (VM vm : vms) {
VM.Record vmr = vm.getRecord(conn);
if (vmr.powerState != VmPowerState.RUNNING)
continue;
if (isRefNull(vmr.residentOn))
continue;
if (vmr.residentOn.getUuid(conn).equals(_host.uuid))
continue;
vms.remove(vm);
}
if (vms.size() == 0) {
s_logger.warn("VM does not exist on XenServer" + _host.uuid);
synchronized (_vms) {
_vms.remove(vmName);
}
return new StopAnswer(cmd, "VM does not exist", 0, 0L, 0L);
}
Long bytesSent = 0L;
Long bytesRcvd = 0L;
for (VM vm : vms) {
VM.Record vmr = vm.getRecord(conn);
if (vmr.isControlDomain) {
String msg = "Tring to Shutdown control domain";
s_logger.warn(msg);
return new StopAnswer(cmd, msg);
}
if (vmr.powerState == VmPowerState.RUNNING && !isRefNull(vmr.residentOn) && !vmr.residentOn.getUuid(conn).equals(_host.uuid)) {
String msg = "Stop Vm " + vmName + " failed due to this vm is not running on this host: " + _host.uuid + " but host:" + vmr.residentOn.getUuid(conn);
s_logger.warn(msg);
return new StopAnswer(cmd, msg);
}
State state = null;
synchronized (_vms) {
state = _vms.get(vmName);
_vms.put(vmName, State.Stopping);
}
try {
if (vmr.powerState == VmPowerState.RUNNING) {
/* when stop a vm, set affinity to current xenserver */
vm.setAffinity(conn, vm.getResidentOn(conn));
try {
if (VirtualMachineName.isValidRouterName(vmName)) {
if(cmd.getPrivateRouterIpAddress() != null){
long[] stats = getNetworkStats(cmd.getPrivateRouterIpAddress());
bytesSent = stats[0];
bytesRcvd = stats[1];
}
}
if (_canBridgeFirewall) {
String result = callHostPlugin("vmops", "destroy_network_rules_for_vm", "vmName", cmd.getVmName());
if (result == null || result.isEmpty() || !Boolean.parseBoolean(result)) {
s_logger.warn("Failed to remove network rules for vm " + cmd.getVmName());
} else {
s_logger.info("Removed network rules for vm " + cmd.getVmName());
}
}
vm.cleanShutdown(conn);
} catch (XenAPIException e) {
s_logger.debug("Do Not support Clean Shutdown, fall back to hard Shutdown: " + e.toString());
try {
vm.hardShutdown(conn);
} catch (XenAPIException e1) {
String msg = "Hard Shutdown failed due to " + e1.toString();
s_logger.warn(msg, e1);
return new StopAnswer(cmd, msg);
} catch (XmlRpcException e1) {
String msg = "Hard Shutdown failed due to " + e1.getMessage();
s_logger.warn(msg, e1);
return new StopAnswer(cmd, msg);
}
} catch (XmlRpcException e) {
String msg = "Clean Shutdown failed due to " + e.getMessage();
s_logger.warn(msg, e);
return new StopAnswer(cmd, msg);
}
}
} catch (Exception e) {
String msg = "Catch exception " + e.getClass().toString() + " when stop VM:" + cmd.getVmName();
s_logger.debug(msg);
return new StopAnswer(cmd, msg);
} finally {
try {
if (vm.getPowerState(conn) == VmPowerState.HALTED) {
Set<VIF> vifs = vm.getVIFs(conn);
List<Network> networks = new ArrayList<Network>();
for (VIF vif : vifs) {
networks.add(vif.getNetwork(conn));
}
List<VDI> vdis = getVdis(vm);
vm.destroy(conn);
for( VDI vdi : vdis ){
umount(vdi);
}
state = State.Stopped;
SR sr = getISOSRbyVmName(cmd.getVmName());
removeSR(sr);
// Disable any VLAN networks that aren't used
// anymore
for (Network network : networks) {
if (network.getNameLabel(conn).startsWith("VLAN")) {
disableVlanNetwork(network);
}
}
}
} catch (XenAPIException e) {
String msg = "VM destroy failed in Stop " + vmName + " Command due to " + e.toString();
s_logger.warn(msg, e);
} catch (Exception e) {
String msg = "VM destroy failed in Stop " + vmName + " Command due to " + e.getMessage();
s_logger.warn(msg, e);
} finally {
synchronized (_vms) {
_vms.put(vmName, state);
}
}
}
}
return new StopAnswer(cmd, "Stop VM " + vmName + " Succeed", 0, bytesSent, bytesRcvd);
} catch (XenAPIException e) {
String msg = "Stop Vm " + vmName + " fail due to " + e.toString();
s_logger.warn(msg, e);
return new StopAnswer(cmd, msg);
} catch (XmlRpcException e) {
String msg = "Stop Vm " + vmName + " fail due to " + e.getMessage();
s_logger.warn(msg, e);
return new StopAnswer(cmd, msg);
}
}
private List<VDI> getVdis(VM vm) {
List<VDI> vdis = new ArrayList<VDI>();
try {
Connection conn = getConnection();
Set<VBD> vbds =vm.getVBDs(conn);
for( VBD vbd : vbds ) {
vdis.add(vbd.getVDI(conn));
}
} catch (XenAPIException e) {
String msg = "getVdis can not get VPD due to " + e.toString();
s_logger.warn(msg, e);
} catch (XmlRpcException e) {
String msg = "getVdis can not get VPD due to " + e.getMessage();
s_logger.warn(msg, e);
}
return vdis;
}
protected String connect(final String vmName, final String ipAddress, final int port) {
for (int i = 0; i <= _retry; i++) {
try {
Connection conn = getConnection();
Set<VM> vms = VM.getByNameLabel(conn, vmName);
if (vms.size() < 1) {
String msg = "VM " + vmName + " is not running";
s_logger.warn(msg);
return msg;
}
} catch (Exception e) {
String msg = "VM.getByNameLabel " + vmName + " failed due to " + e.toString();
s_logger.warn(msg, e);
return msg;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Trying to connect to " + ipAddress);
}
if (pingdomr(ipAddress, Integer.toString(port)))
return null;
try {
Thread.sleep(_sleep);
} catch (final InterruptedException e) {
}
}
String msg = "Timeout, Unable to logon to " + ipAddress;
s_logger.debug(msg);
return msg;
}
protected String connect(final String vmname, final String ipAddress) {
return connect(vmname, ipAddress, 3922);
}
protected StartRouterAnswer execute(StartRouterCommand cmd) {
final String vmName = cmd.getVmName();
final DomainRouter router = cmd.getRouter();
try {
String tag = router.getVnet();
Network network = null;
if ("untagged".equalsIgnoreCase(tag)) {
Connection conn = getConnection();
network = Network.getByUuid(conn, _host.guestNetwork);
} else {
network = enableVlanNetwork(Long.parseLong(tag), _host.guestPif);
}
if (network == null) {
throw new InternalErrorException("Failed to enable VLAN network with tag: " + tag);
}
String bootArgs = cmd.getBootArgs();
String result = startSystemVM(vmName, router.getVlanId(), network, cmd.getVolumes(), bootArgs, router.getGuestMacAddress(), router.getPrivateIpAddress(), router
.getPrivateMacAddress(), router.getPublicMacAddress(), 3922, router.getRamSize(), cmd.getGuestOSDescription(), cmd.getNetworkRateMbps());
if (result == null) {
networkUsage(router.getPrivateIpAddress(), "create", null);
return new StartRouterAnswer(cmd);
}
return new StartRouterAnswer(cmd, result);
} catch (Exception e) {
String msg = "Exception caught while starting router vm " + vmName + " due to " + e.getMessage();
s_logger.warn(msg, e);
return new StartRouterAnswer(cmd, msg);
}
}
protected String startSystemVM(String vmName, String vlanId, Network nw0, List<VolumeVO> vols, String bootArgs, String guestMacAddr, String privateIp, String privateMacAddr,
String publicMacAddr, int cmdPort, long ramSize, String getGuestOSDescription, int networkRateMbps) {
setupLinkLocalNetwork();
VM vm = null;
List<Ternary<SR, VDI, VolumeVO>> mounts = null;
Connection conn = getConnection();
State state = State.Stopped;
try {
synchronized (_vms) {
_vms.put(vmName, State.Starting);
}
mounts = mount(vols);
assert mounts.size() == 1 : "System VMs should have only 1 partition but we actually have " + mounts.size();
Ternary<SR, VDI, VolumeVO> mount = mounts.get(0);
Set<VM> templates = VM.getByNameLabel(conn, getGuestOsType(getGuestOSDescription));
if (templates.size() == 0) {
String msg = " can not find systemvm template " + getGuestOsType(getGuestOSDescription) ;
s_logger.warn(msg);
return msg;
}
VM template = templates.iterator().next();
vm = template.createClone(conn, vmName);
vm.removeFromOtherConfig(conn, "disks");
vm.setPVBootloader(conn, "pygrub");
long memsize = ramSize * 1024L * 1024L;
setMemory(conn, vm, memsize);
vm.setIsATemplate(conn, false);
vm.setVCPUsAtStartup(conn, 1L);
Host host = Host.getByUuid(conn, _host.uuid);
vm.setAffinity(conn, host);
/* create VBD */
VBD.Record vbdr = new VBD.Record();
vbdr.VM = vm;
vbdr.VDI = mount.second();
vbdr.bootable = true;
vbdr.userdevice = "0";
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
VBD.create(conn, vbdr);
/* create CD-ROM VBD */
VBD.Record cdromVBDR = new VBD.Record();
cdromVBDR.VM = vm;
cdromVBDR.empty = true;
cdromVBDR.bootable = false;
cdromVBDR.userdevice = "3";
cdromVBDR.mode = Types.VbdMode.RO;
cdromVBDR.type = Types.VbdType.CD;
VBD cdromVBD = VBD.create(conn, cdromVBDR);
cdromVBD.insert(conn, VDI.getByUuid(conn, _host.systemvmisouuid));
/* create VIF0 */
Network network = null;
if (VirtualMachineName.isValidConsoleProxyName(vmName) || VirtualMachineName.isValidSecStorageVmName(vmName, null)) {
network = Network.getByUuid(conn, _host.linkLocalNetwork);
} else {
network = nw0;
}
createVIF(conn, vm, guestMacAddr, networkRateMbps, "0", network);
/* create VIF1 */
/* For routing vm, set its network as link local bridge */
if (VirtualMachineName.isValidRouterName(vmName) && privateIp.startsWith("169.254")) {
network = Network.getByUuid(conn, _host.linkLocalNetwork);
} else {
network = Network.getByUuid(conn, _host.privateNetwork);
}
createVIF(conn, vm, privateMacAddr, networkRateMbps, "1", network);
/* create VIF2 */
if( !publicMacAddr.equalsIgnoreCase("FE:FF:FF:FF:FF:FF") ) {
network = null;
if ("untagged".equalsIgnoreCase(vlanId)) {
network = Network.getByUuid(conn, _host.publicNetwork);
} else {
network = enableVlanNetwork(Long.valueOf(vlanId), _host.publicPif);
if (network == null) {
throw new InternalErrorException("Failed to enable VLAN network with tag: " + vlanId);
}
}
createVIF(conn, vm, publicMacAddr, networkRateMbps, "2", network);
}
/* set up PV dom argument */
String pvargs = vm.getPVArgs(conn);
pvargs = pvargs + bootArgs;
if (s_logger.isInfoEnabled())
s_logger.info("PV args for system vm are " + pvargs);
vm.setPVArgs(conn, pvargs);
/* destroy console */
Set<Console> consoles = vm.getRecord(conn).consoles;
for (Console console : consoles) {
console.destroy(conn);
}
/* set action after crash as destroy */
vm.setActionsAfterCrash(conn, Types.OnCrashBehaviour.DESTROY);
vm.start(conn, false, true);
if (_canBridgeFirewall) {
String result = callHostPlugin("vmops", "default_network_rules_systemvm", "vmName", vmName);
if (result == null || result.isEmpty() || !Boolean.parseBoolean(result)) {
s_logger.warn("Failed to program default system vm network rules for " + vmName);
} else {
s_logger.info("Programmed default system vm network rules for " + vmName);
}
}
if (s_logger.isInfoEnabled())
s_logger.info("Ping system vm command port, " + privateIp + ":" + cmdPort);
state = State.Running;
String result = connect(vmName, privateIp, cmdPort);
if (result != null) {
String msg = "Can not ping System vm " + vmName + "due to:" + result;
s_logger.warn(msg);
throw new CloudRuntimeException(msg);
} else {
if (s_logger.isInfoEnabled())
s_logger.info("Ping system vm command port succeeded for vm " + vmName);
}
return null;
} catch (XenAPIException e) {
String msg = "Exception caught while starting System vm " + vmName + " due to " + e.toString();
s_logger.warn(msg, e);
startvmfailhandle(vm, mounts);
state = State.Stopped;
return msg;
} catch (Exception e) {
String msg = "Exception caught while starting System vm " + vmName + " due to " + e.getMessage();
s_logger.warn(msg, e);
startvmfailhandle(vm, mounts);
state = State.Stopped;
return msg;
} finally {
synchronized (_vms) {
_vms.put(vmName, state);
}
}
}
// TODO : need to refactor it to reuse code with StartRouter
protected Answer execute(final StartConsoleProxyCommand cmd) {
final String vmName = cmd.getVmName();
final ConsoleProxyVO proxy = cmd.getProxy();
try {
Connection conn = getConnection();
Network network = Network.getByUuid(conn, _host.privateNetwork);
String bootArgs = cmd.getBootArgs();
bootArgs += " zone=" + _dcId;
bootArgs += " pod=" + _pod;
bootArgs += " guid=Proxy." + proxy.getId();
bootArgs += " proxy_vm=" + proxy.getId();
bootArgs += " localgw=" + _localGateway;
String result = startSystemVM(vmName, proxy.getVlanId(), network, cmd.getVolumes(), bootArgs, proxy.getGuestMacAddress(), proxy.getGuestIpAddress(), proxy
.getPrivateMacAddress(), proxy.getPublicMacAddress(), cmd.getProxyCmdPort(), proxy.getRamSize(), cmd.getGuestOSDescription(), cmd.getNetworkRateMbps());
if (result == null) {
return new StartConsoleProxyAnswer(cmd);
}
return new StartConsoleProxyAnswer(cmd, result);
} catch (Exception e) {
String msg = "Exception caught while starting router vm " + vmName + " due to " + e.getMessage();
s_logger.warn(msg, e);
return new StartConsoleProxyAnswer(cmd, msg);
}
}
protected boolean isDeviceUsed(VM vm, Long deviceId) {
// Figure out the disk number to attach the VM to
String msg = null;
try {
Connection conn = getConnection();
Set<String> allowedVBDDevices = vm.getAllowedVBDDevices(conn);
if (allowedVBDDevices.contains(deviceId.toString())) {
return false;
}
return true;
} catch (XmlRpcException e) {
msg = "Catch XmlRpcException due to: " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
msg = "Catch XenAPIException due to: " + e.toString();
s_logger.warn(msg, e);
}
throw new CloudRuntimeException("When check deviceId " + msg);
}
protected String getUnusedDeviceNum(VM vm) {
// Figure out the disk number to attach the VM to
try {
Connection conn = getConnection();
Set<String> allowedVBDDevices = vm.getAllowedVBDDevices(conn);
if (allowedVBDDevices.size() == 0)
throw new CloudRuntimeException("Could not find an available slot in VM with name: " + vm.getNameLabel(conn) + " to attach a new disk.");
return allowedVBDDevices.iterator().next();
} catch (XmlRpcException e) {
String msg = "Catch XmlRpcException due to: " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
String msg = "Catch XenAPIException due to: " + e.toString();
s_logger.warn(msg, e);
}
throw new CloudRuntimeException("Could not find an available slot in VM with name to attach a new disk.");
}
protected String callHostPlugin(String plugin, String cmd, String... params) {
//default time out is 300 s
return callHostPluginWithTimeOut(plugin, cmd, 300, params);
}
protected String callHostPluginWithTimeOut(String plugin, String cmd, int timeout, String... params) {
Map<String, String> args = new HashMap<String, String>();
try {
Connection conn = getConnection();
for (int i = 0; i < params.length; i += 2) {
args.put(params[i], params[i + 1]);
}
if (s_logger.isTraceEnabled()) {
s_logger.trace("callHostPlugin executing for command " + cmd + " with " + getArgsString(args));
}
Host host = Host.getByUuid(conn, _host.uuid);
String result = host.callPlugin(conn, plugin, cmd, args);
if (s_logger.isTraceEnabled()) {
s_logger.trace("callHostPlugin Result: " + result);
}
return result.replace("\n", "");
} catch (XenAPIException e) {
s_logger.warn("callHostPlugin failed for cmd: " + cmd + " with args " + getArgsString(args) + " due to " + e.toString());
} catch (XmlRpcException e) {
s_logger.debug("callHostPlugin failed for cmd: " + cmd + " with args " + getArgsString(args) + " due to " + e.getMessage());
}
return null;
}
protected String getArgsString(Map<String, String> args) {
StringBuilder argString = new StringBuilder();
for (Map.Entry<String, String> arg : args.entrySet()) {
argString.append(arg.getKey() + ": " + arg.getValue() + ", ");
}
return argString.toString();
}
protected boolean setIptables() {
String result = callHostPlugin("vmops", "setIptables");
if (result == null || result.isEmpty())
return false;
return true;
}
protected Nic getLocalNetwork(Connection conn, String name) throws XmlRpcException, XenAPIException {
if( name == null) {
return null;
}
Set<Network> networks = Network.getByNameLabel(conn, name);
for (Network network : networks) {
Network.Record nr = network.getRecord(conn);
for (PIF pif : nr.PIFs) {
PIF.Record pr = pif.getRecord(conn);
if (_host.uuid.equals(pr.host.getUuid(conn))) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Found a network called " + name + " on host=" + _host.ip + "; Network=" + nr.uuid + "; pif=" + pr.uuid);
}
if (pr.bondMasterOf != null && pr.bondMasterOf.size() > 0) {
if (pr.bondMasterOf.size() > 1) {
String msg = new StringBuilder("Unsupported configuration. Network " + name + " has more than one bond. Network=").append(nr.uuid)
.append("; pif=").append(pr.uuid).toString();
s_logger.warn(msg);
return null;
}
Bond bond = pr.bondMasterOf.iterator().next();
Set<PIF> slaves = bond.getSlaves(conn);
for (PIF slave : slaves) {
PIF.Record spr = slave.getRecord(conn);
if (spr.management) {
Host host = Host.getByUuid(conn, _host.uuid);
if (!transferManagementNetwork(conn, host, slave, spr, pif)) {
String msg = new StringBuilder("Unable to transfer management network. slave=" + spr.uuid + "; master=" + pr.uuid + "; host="
+ _host.uuid).toString();
s_logger.warn(msg);
return null;
}
break;
}
}
}
return new Nic(network, nr, pif, pr);
}
}
}
return null;
}
protected VIF getCorrectVif(VM router, String vlanId) {
try {
Connection conn = getConnection();
Set<VIF> routerVIFs = router.getVIFs(conn);
for (VIF vif : routerVIFs) {
Network vifNetwork = vif.getNetwork(conn);
if (vlanId.equals("untagged")) {
if (vifNetwork.getUuid(conn).equals(_host.publicNetwork)) {
return vif;
}
} else {
if (vifNetwork.getNameLabel(conn).equals("VLAN" + vlanId)) {
return vif;
}
}
}
} catch (XmlRpcException e) {
String msg = "Caught XmlRpcException: " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
String msg = "Caught XenAPIException: " + e.toString();
s_logger.warn(msg, e);
}
return null;
}
protected String getLowestAvailableVIFDeviceNum(VM vm) {
try {
Connection conn = getConnection();
Set<String> availableDeviceNums = vm.getAllowedVIFDevices(conn);
Iterator<String> deviceNumsIterator = availableDeviceNums.iterator();
List<Integer> sortedDeviceNums = new ArrayList<Integer>();
while (deviceNumsIterator.hasNext()) {
try {
sortedDeviceNums.add(Integer.valueOf(deviceNumsIterator.next()));
} catch (NumberFormatException e) {
s_logger.debug("Obtained an invalid value for an available VIF device number for VM: " + vm.getNameLabel(conn));
return null;
}
}
Collections.sort(sortedDeviceNums);
return String.valueOf(sortedDeviceNums.get(0));
} catch (XmlRpcException e) {
String msg = "Caught XmlRpcException: " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
String msg = "Caught XenAPIException: " + e.toString();
s_logger.warn(msg, e);
}
return null;
}
protected VDI mount(StoragePoolType pooltype, String volumeFolder, String volumePath) {
return getVDIbyUuid(volumePath);
}
protected List<Ternary<SR, VDI, VolumeVO>> mount(List<VolumeVO> vos) {
ArrayList<Ternary<SR, VDI, VolumeVO>> mounts = new ArrayList<Ternary<SR, VDI, VolumeVO>>(vos.size());
for (VolumeVO vol : vos) {
String vdiuuid = vol.getPath();
SR sr = null;
VDI vdi = null;
// Look up the VDI
vdi = getVDIbyUuid(vdiuuid);
Ternary<SR, VDI, VolumeVO> ter = new Ternary<SR, VDI, VolumeVO>(sr, vdi, vol);
if( vol.getVolumeType() == VolumeType.ROOT ) {
mounts.add(0, ter);
} else {
mounts.add(ter);
}
}
return mounts;
}
protected Network getNetworkByName(String name) throws BadServerResponse, XenAPIException, XmlRpcException {
Connection conn = getConnection();
Set<Network> networks = Network.getByNameLabel(conn, name);
if (networks.size() > 0) {
assert networks.size() == 1 : "How did we find more than one network with this name label" + name + "? Strange....";
return networks.iterator().next(); // Found it.
}
return null;
}
protected synchronized Network getNetworkByName(Connection conn, String name, boolean lookForPif) throws XenAPIException, XmlRpcException {
Network found = null;
Set<Network> networks = Network.getByNameLabel(conn, name);
if (networks.size() == 1) {
found = networks.iterator().next();
} else if (networks.size() > 1) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Found more than one network with the name " + name);
}
for (Network network : networks) {
if (!lookForPif) {
found = network;
break;
}
Network.Record netr = network.getRecord(conn);
s_logger.debug("Checking network " + netr.uuid);
if (netr.PIFs.size() == 0) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Network " + netr.uuid + " has no pifs so skipping that.");
}
} else {
for (PIF pif : netr.PIFs) {
PIF.Record pifr = pif.getRecord(conn);
if (_host.uuid.equals(pifr.host.getUuid(conn))) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Network " + netr.uuid + " has a pif " + pifr.uuid + " for our host ");
}
found = network;
break;
}
}
}
}
}
return found;
}
protected Network enableVlanNetwork(long tag, String pifUuid) throws XenAPIException, XmlRpcException {
// In XenServer, vlan is added by
// 1. creating a network.
// 2. creating a vlan associating network with the pif.
// We always create
// 1. a network with VLAN[vlan id in decimal]
// 2. a vlan associating the network created with the pif to private
// network.
Connection conn = getConnection();
Network vlanNetwork = null;
String name = "VLAN" + Long.toString(tag);
synchronized (name.intern()) {
vlanNetwork = getNetworkByName(name);
if (vlanNetwork == null) { // Can't find it, then create it.
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VLAN network for " + tag + " on host " + _host.ip);
}
Network.Record nwr = new Network.Record();
nwr.nameLabel = name;
nwr.bridge = name;
vlanNetwork = Network.create(conn, nwr);
}
PIF nPif = PIF.getByUuid(conn, pifUuid);
PIF.Record nPifr = nPif.getRecord(conn);
Network.Record vlanNetworkr = vlanNetwork.getRecord(conn);
if (vlanNetworkr.PIFs != null) {
for (PIF pif : vlanNetworkr.PIFs) {
PIF.Record pifr = pif.getRecord(conn);
if(pifr.host.equals(nPifr.host)) {
if (pifr.device.equals(nPifr.device) ) {
pif.plug(conn);
return vlanNetwork;
} else {
throw new CloudRuntimeException("Creating VLAN " + tag + " on " + nPifr.device + " failed due to this VLAN is already created on " + pifr.device);
}
}
}
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VLAN " + tag + " on host " + _host.ip + " on device " + nPifr.device);
}
VLAN vlan = VLAN.create(conn, nPif, tag, vlanNetwork);
PIF untaggedPif = vlan.getUntaggedPIF(conn);
if (!untaggedPif.getCurrentlyAttached(conn)) {
untaggedPif.plug(conn);
}
}
return vlanNetwork;
}
protected Network enableVlanNetwork(Connection conn, long tag, Network network, String pifUuid) throws XenAPIException, XmlRpcException {
// In XenServer, vlan is added by
// 1. creating a network.
// 2. creating a vlan associating network with the pif.
// We always create
// 1. a network with VLAN[vlan id in decimal]
// 2. a vlan associating the network created with the pif to private
// network.
Network vlanNetwork = null;
String name = "VLAN" + Long.toString(tag);
vlanNetwork = getNetworkByName(conn, name, true);
if (vlanNetwork == null) { // Can't find it, then create it.
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VLAN network for " + tag + " on host " + _host.ip);
}
Network.Record nwr = new Network.Record();
nwr.nameLabel = name;
nwr.bridge = name;
vlanNetwork = Network.create(conn, nwr);
}
PIF nPif = PIF.getByUuid(conn, pifUuid);
PIF.Record nPifr = nPif.getRecord(conn);
Network.Record vlanNetworkr = vlanNetwork.getRecord(conn);
if (vlanNetworkr.PIFs != null) {
for (PIF pif : vlanNetworkr.PIFs) {
PIF.Record pifr = pif.getRecord(conn);
if (pifr.device.equals(nPifr.device) && pifr.host.equals(nPifr.host)) {
return vlanNetwork;
}
}
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VLAN " + tag + " on host " + _host.ip + " on device " + nPifr.device);
}
VLAN vlan = VLAN.create(conn, nPif, tag, vlanNetwork);
VLAN.Record vlanr = vlan.getRecord(conn);
if (s_logger.isDebugEnabled()) {
s_logger.debug("VLAN is created for " + tag + ". The uuid is " + vlanr.uuid);
}
return vlanNetwork;
}
protected void disableVlanNetwork(Network network) throws InternalErrorException {
try {
Connection conn = getConnection();
if (network.getVIFs(conn).isEmpty()) {
Iterator<PIF> pifs = network.getPIFs(conn).iterator();
while (pifs.hasNext()) {
PIF pif = pifs.next();
pif.unplug(conn);
}
}
} catch (XenAPIException e) {
String msg = "Unable to disable VLAN network due to " + e.toString();
s_logger.warn(msg, e);
} catch (Exception e) {
String msg = "Unable to disable VLAN network due to " + e.getMessage();
s_logger.warn(msg, e);
}
}
protected SR getLocalLVMSR() {
Connection conn = getConnection();
try {
Map<SR, SR.Record> map = SR.getAllRecords(conn);
for (Map.Entry<SR, SR.Record> entry : map.entrySet()) {
SR.Record srRec = entry.getValue();
if (SRType.LVM.equals(srRec.type)) {
Set<PBD> pbds = srRec.PBDs;
if (pbds == null) {
continue;
}
for (PBD pbd : pbds) {
Host host = pbd.getHost(conn);
if (!isRefNull(host) && host.getUuid(conn).equals(_host.uuid)) {
if (!pbd.getCurrentlyAttached(conn)) {
pbd.plug(conn);
}
SR sr = entry.getKey();
sr.scan(conn);
return sr;
}
}
}
}
} catch (XenAPIException e) {
String msg = "Unable to get local LVMSR in host:" + _host.uuid + e.toString();
s_logger.warn(msg);
} catch (XmlRpcException e) {
String msg = "Unable to get local LVMSR in host:" + _host.uuid + e.getCause();
s_logger.warn(msg);
}
return null;
}
protected StartupStorageCommand initializeLocalSR() {
SR lvmsr = getLocalLVMSR();
if (lvmsr == null) {
return null;
}
try {
Connection conn = getConnection();
String lvmuuid = lvmsr.getUuid(conn);
long cap = lvmsr.getPhysicalSize(conn);
if (cap < 0)
return null;
long avail = cap - lvmsr.getPhysicalUtilisation(conn);
lvmsr.setNameLabel(conn, lvmuuid);
String name = "VMOps local storage pool in host : " + _host.uuid;
lvmsr.setNameDescription(conn, name);
Host host = Host.getByUuid(conn, _host.uuid);
String address = host.getAddress(conn);
StoragePoolInfo pInfo = new StoragePoolInfo(name, lvmuuid, address, SRType.LVM.toString(), SRType.LVM.toString(), StoragePoolType.LVM, cap, avail);
StartupStorageCommand cmd = new StartupStorageCommand();
cmd.setPoolInfo(pInfo);
cmd.setGuid(_host.uuid);
cmd.setResourceType(Storage.StorageResourceType.STORAGE_POOL);
return cmd;
} catch (XenAPIException e) {
String msg = "build startupstoragecommand err in host:" + _host.uuid + e.toString();
s_logger.warn(msg);
} catch (XmlRpcException e) {
String msg = "build startupstoragecommand err in host:" + _host.uuid + e.getMessage();
s_logger.warn(msg);
}
return null;
}
@Override
public PingCommand getCurrentStatus(long id) {
try {
if (!pingxenserver()) {
Thread.sleep(1000);
if (!pingxenserver()) {
s_logger.warn(" can not ping xenserver " + _host.uuid);
return null;
}
}
HashMap<String, State> newStates = sync();
if (newStates == null) {
newStates = new HashMap<String, State>();
}
if (!_canBridgeFirewall) {
return new PingRoutingCommand(getType(), id, newStates);
} else {
HashMap<String, Pair<Long, Long>> nwGrpStates = syncNetworkGroups(id);
return new PingRoutingWithNwGroupsCommand(getType(), id, newStates, nwGrpStates);
}
} catch (Exception e) {
s_logger.warn("Unable to get current status", e);
return null;
}
}
private HashMap<String, Pair<Long,Long>> syncNetworkGroups(long id) {
HashMap<String, Pair<Long,Long>> states = new HashMap<String, Pair<Long,Long>>();
String result = callHostPlugin("vmops", "get_rule_logs_for_vms", "host_uuid", _host.uuid);
s_logger.trace("syncNetworkGroups: id=" + id + " got: " + result);
String [] rulelogs = result != null ?result.split(";"): new String [0];
for (String rulesforvm: rulelogs){
String [] log = rulesforvm.split(",");
if (log.length != 6) {
continue;
}
//output = ','.join([vmName, vmID, vmIP, domID, signature, seqno])
try {
states.put(log[0], new Pair<Long,Long>(Long.parseLong(log[1]), Long.parseLong(log[5])));
} catch (NumberFormatException nfe) {
states.put(log[0], new Pair<Long,Long>(-1L, -1L));
}
}
return states;
}
@Override
public Type getType() {
return com.cloud.host.Host.Type.Routing;
}
protected void getPVISO(StartupStorageCommand sscmd) {
Connection conn = getConnection();
try {
Set<VDI> vids = VDI.getByNameLabel(conn, "xs-tools.iso");
if (vids.isEmpty())
return;
VDI pvISO = vids.iterator().next();
String uuid = pvISO.getUuid(conn);
Map<String, TemplateInfo> pvISOtmlt = new HashMap<String, TemplateInfo>();
TemplateInfo tmplt = new TemplateInfo("xs-tools.iso", uuid, pvISO.getVirtualSize(conn), true);
pvISOtmlt.put("xs-tools", tmplt);
sscmd.setTemplateInfo(pvISOtmlt);
} catch (XenAPIException e) {
s_logger.debug("Can't get xs-tools.iso: " + e.toString());
} catch (XmlRpcException e) {
s_logger.debug("Can't get xs-tools.iso: " + e.toString());
}
}
protected boolean can_bridge_firewall() {
return false;
}
protected boolean getHostInfo() throws IllegalArgumentException{
Connection conn = getConnection();
try {
Host myself = Host.getByUuid(conn, _host.uuid);
Set<HostCpu> hcs = myself.getHostCPUs(conn);
_host.cpus = hcs.size();
for (final HostCpu hc : hcs) {
_host.speed = hc.getSpeed(conn).intValue();
break;
}
Set<SR> srs = SR.getByNameLabel(conn, "XenServer Tools");
if( srs.size() != 1 ) {
throw new CloudRuntimeException("There are " + srs.size() + " SRs with name XenServer Tools");
}
SR sr = srs.iterator().next();
sr.scan(conn);
SR.Record srr = sr.getRecord(conn);
_host.systemvmisouuid = null;
for( VDI vdi : srr.VDIs ) {
VDI.Record vdir = vdi.getRecord(conn);
if(vdir.nameLabel.contains("systemvm-premium")){
_host.systemvmisouuid = vdir.uuid;
break;
}
}
if( _host.systemvmisouuid == null ) {
for( VDI vdi : srr.VDIs ) {
VDI.Record vdir = vdi.getRecord(conn);
if(vdir.nameLabel.contains("systemvm")){
_host.systemvmisouuid = vdir.uuid;
break;
}
}
}
if( _host.systemvmisouuid == null ) {
throw new CloudRuntimeException("can not find systemvmiso");
}
String name = "cloud-private";
if (_privateNetworkName != null) {
name = _privateNetworkName;
}
_localGateway = callHostPlugin("vmops", "getgateway", "mgmtIP", myself.getAddress(conn));
if (_localGateway == null || _localGateway.isEmpty()) {
s_logger.warn("can not get gateway for host :" + _host.uuid);
return false;
}
_canBridgeFirewall = can_bridge_firewall();
Nic privateNic = getLocalNetwork(conn, name);
if (privateNic == null) {
s_logger.debug("Unable to find any private network. Trying to determine that by route for host " + _host.ip);
name = callHostPlugin("vmops", "getnetwork", "mgmtIP", myself.getAddress(conn));
if (name == null || name.isEmpty()) {
s_logger.warn("Unable to determine the private network for host " + _host.ip);
return false;
}
_privateNetworkName = name;
privateNic = getLocalNetwork(conn, name);
if (privateNic == null) {
s_logger.warn("Unable to get private network " + name);
return false;
}
} else {
_privateNetworkName = name;
}
_host.privatePif = privateNic.pr.uuid;
_host.privateNetwork = privateNic.nr.uuid;
Nic guestNic = null;
if (_guestNetworkName != null && !_guestNetworkName.equals(_privateNetworkName)) {
guestNic = getLocalNetwork(conn, _guestNetworkName);
if (guestNic == null) {
s_logger.warn("Unable to find guest network " + _guestNetworkName);
throw new IllegalArgumentException("Unable to find guest network " + _guestNetworkName + " for host " + _host.ip);
}
} else {
guestNic = privateNic;
_guestNetworkName = _privateNetworkName;
}
_host.guestNetwork = guestNic.nr.uuid;
_host.guestPif = guestNic.pr.uuid;
Nic publicNic = null;
if (_publicNetworkName != null && !_publicNetworkName.equals(_guestNetworkName)) {
publicNic = getLocalNetwork(conn, _publicNetworkName);
if (publicNic == null) {
s_logger.warn("Unable to find public network " + _publicNetworkName + " for host " + _host.ip);
throw new IllegalArgumentException("Unable to find public network " + _publicNetworkName + " for host " + _host.ip);
}
} else {
publicNic = guestNic;
_publicNetworkName = _guestNetworkName;
}
_host.publicPif = publicNic.pr.uuid;
_host.publicNetwork = publicNic.nr.uuid;
Nic storageNic1 = getLocalNetwork(conn, _storageNetworkName1);
if (storageNic1 == null) {
storageNic1 = privateNic;
_storageNetworkName1 = _privateNetworkName;
}
_host.storageNetwork1 = storageNic1.nr.uuid;
_host.storagePif1 = storageNic1.pr.uuid;
Nic storageNic2 = getLocalNetwork(conn, _storageNetworkName2);
if (storageNic2 == null) {
storageNic2 = privateNic;
_storageNetworkName2 = _privateNetworkName;
}
_host.storageNetwork2 = storageNic2.nr.uuid;
_host.storagePif2 = storageNic2.pr.uuid;
s_logger.info("Private Network is " + _privateNetworkName + " for host " + _host.ip);
s_logger.info("Guest Network is " + _guestNetworkName + " for host " + _host.ip);
s_logger.info("Public Network is " + _publicNetworkName + " for host " + _host.ip);
s_logger.info("Storage Network 1 is " + _storageNetworkName1 + " for host " + _host.ip);
s_logger.info("Storage Network 2 is " + _storageNetworkName2 + " for host " + _host.ip);
return true;
} catch (XenAPIException e) {
s_logger.warn("Unable to get host information for " + _host.ip, e);
return false;
} catch (XmlRpcException e) {
s_logger.warn("Unable to get host information for " + _host.ip, e);
return false;
}
}
private void setupLinkLocalNetwork() {
try {
Network.Record rec = new Network.Record();
Connection conn = getConnection();
Set<Network> networks = Network.getByNameLabel(conn, _linkLocalPrivateNetworkName);
Network linkLocal = null;
if (networks.size() == 0) {
rec.nameDescription = "link local network used by system vms";
rec.nameLabel = _linkLocalPrivateNetworkName;
Map<String, String> configs = new HashMap<String, String>();
configs.put("ip_begin", NetUtils.getLinkLocalGateway());
configs.put("ip_end", NetUtils.getLinkLocalIpEnd());
configs.put("netmask", NetUtils.getLinkLocalNetMask());
rec.otherConfig = configs;
linkLocal = Network.create(conn, rec);
} else {
linkLocal = networks.iterator().next();
}
/* Make sure there is a physical bridge on this network */
VIF dom0vif = null;
Pair<VM, VM.Record> vm = getControlDomain(conn);
VM dom0 = vm.first();
Set<VIF> vifs = dom0.getVIFs(conn);
if (vifs.size() != 0) {
for (VIF vif : vifs) {
Map<String, String> otherConfig = vif.getOtherConfig(conn);
if (otherConfig != null) {
String nameLabel = otherConfig.get("nameLabel");
if ((nameLabel != null) && nameLabel.equalsIgnoreCase("link_local_network_vif")) {
dom0vif = vif;
}
}
}
}
/* create temp VIF0 */
if (dom0vif == null) {
s_logger.debug("Can't find a vif on dom0 for link local, creating a new one");
VIF.Record vifr = new VIF.Record();
vifr.VM = dom0;
vifr.device = getLowestAvailableVIFDeviceNum(dom0);
if (vifr.device == null) {
s_logger.debug("Failed to create link local network, no vif available");
return;
}
Map<String, String> config = new HashMap<String, String>();
config.put("nameLabel", "link_local_network_vif");
vifr.otherConfig = config;
vifr.MAC = "FE:FF:FF:FF:FF:FF";
vifr.network = linkLocal;
dom0vif = VIF.create(conn, vifr);
dom0vif.plug(conn);
} else {
s_logger.debug("already have a vif on dom0 for link local network");
if (!dom0vif.getCurrentlyAttached(conn)) {
dom0vif.plug(conn);
}
}
String brName = linkLocal.getBridge(conn);
callHostPlugin("vmops", "setLinkLocalIP", "brName", brName);
_host.linkLocalNetwork = linkLocal.getUuid(conn);
} catch (XenAPIException e) {
s_logger.warn("Unable to create local link network", e);
} catch (XmlRpcException e) {
// TODO Auto-generated catch block
s_logger.warn("Unable to create local link network", e);
}
}
protected boolean transferManagementNetwork(Connection conn, Host host, PIF src, PIF.Record spr, PIF dest) throws XmlRpcException, XenAPIException {
dest.reconfigureIp(conn, spr.ipConfigurationMode, spr.IP, spr.netmask, spr.gateway, spr.DNS);
Host.managementReconfigure(conn, dest);
String hostUuid = null;
int count = 0;
while (count < 10) {
try {
Thread.sleep(10000);
hostUuid = host.getUuid(conn);
if (hostUuid != null) {
break;
}
} catch (XmlRpcException e) {
s_logger.debug("Waiting for host to come back: " + e.getMessage());
} catch (XenAPIException e) {
s_logger.debug("Waiting for host to come back: " + e.getMessage());
} catch (InterruptedException e) {
s_logger.debug("Gotta run");
return false;
}
}
if (hostUuid == null) {
s_logger.warn("Unable to transfer the management network from " + spr.uuid);
return false;
}
src.reconfigureIp(conn, IpConfigurationMode.NONE, null, null, null, null);
return true;
}
@Override
public StartupCommand[] initialize() throws IllegalArgumentException{
disconnected();
setupServer();
if (!getHostInfo()) {
s_logger.warn("Unable to get host information for " + _host.ip);
return null;
}
destroyStoppedVm();
StartupRoutingCommand cmd = new StartupRoutingCommand();
fillHostInfo(cmd);
cleanupDiskMounts();
Map<String, State> changes = null;
synchronized (_vms) {
_vms.clear();
changes = sync();
}
cmd.setHypervisorType(HypervisorType.XenServer);
cmd.setChanges(changes);
cmd.setCluster(_cluster);
StartupStorageCommand sscmd = initializeLocalSR();
if (sscmd != null) {
/* report pv driver iso */
getPVISO(sscmd);
return new StartupCommand[] { cmd, sscmd };
}
return new StartupCommand[] { cmd };
}
protected String getPoolUuid() {
Connection conn = getConnection();
try {
Map<Pool, Pool.Record> pools = Pool.getAllRecords(conn);
assert (pools.size() == 1) : "Tell me how pool size can be " + pools.size();
Pool.Record rec = pools.values().iterator().next();
return rec.uuid;
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to get pool ", e);
} catch (XmlRpcException e) {
throw new CloudRuntimeException("Unable to get pool ", e);
}
}
protected void setupServer() {
Connection conn = getConnection();
String version = CitrixResourceBase.class.getPackage().getImplementationVersion();
try {
Host host = Host.getByUuid(conn, _host.uuid);
/* enable host in case it is disabled somehow */
host.enable(conn);
/* push patches to XenServer */
Host.Record hr = host.getRecord(conn);
Iterator<String> it = hr.tags.iterator();
while (it.hasNext()) {
String tag = it.next();
if (tag.startsWith("vmops-version-")) {
if (tag.contains(version)) {
s_logger.info(logX(host, "Host " + hr.address + " is already setup."));
return;
} else {
it.remove();
}
}
}
com.trilead.ssh2.Connection sshConnection = new com.trilead.ssh2.Connection(hr.address, 22);
try {
sshConnection.connect(null, 60000, 60000);
if (!sshConnection.authenticateWithPassword(_username, _password)) {
throw new CloudRuntimeException("Unable to authenticate");
}
SCPClient scp = new SCPClient(sshConnection);
String path = _patchPath.substring(0, _patchPath.lastIndexOf(File.separator) + 1);
List<File> files = getPatchFiles();
if( files == null || files.isEmpty() ) {
throw new CloudRuntimeException("Can not find patch file");
}
for( File file :files) {
Properties props = new Properties();
props.load(new FileInputStream(file));
for (Map.Entry<Object, Object> entry : props.entrySet()) {
String k = (String) entry.getKey();
String v = (String) entry.getValue();
assert (k != null && k.length() > 0 && v != null && v.length() > 0) : "Problems with " + k + "=" + v;
String[] tokens = v.split(",");
String f = null;
if (tokens.length == 3 && tokens[0].length() > 0) {
if (tokens[0].startsWith("/")) {
f = tokens[0];
} else if (tokens[0].startsWith("~")) {
String homedir = System.getenv("HOME");
f = homedir + tokens[0].substring(1) + k;
} else {
f = path + tokens[0] + '/' + k;
}
} else {
f = path + k;
}
String d = tokens[tokens.length - 1];
f = f.replace('/', File.separatorChar);
String p = "0755";
if (tokens.length == 3) {
p = tokens[1];
} else if (tokens.length == 2) {
p = tokens[0];
}
if (!new File(f).exists()) {
s_logger.warn("We cannot locate " + f);
continue;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Copying " + f + " to " + d + " on " + hr.address + " with permission " + p);
}
scp.put(f, d, p);
}
}
} catch (IOException e) {
throw new CloudRuntimeException("Unable to setup the server correctly", e);
} finally {
sshConnection.close();
}
if (!setIptables()) {
s_logger.warn("set xenserver Iptable failed");
}
hr.tags.add("vmops-version-" + version);
host.setTags(conn, hr.tags);
} catch (XenAPIException e) {
String msg = "Xen setup failed due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException("Unable to get host information " + e.toString(), e);
} catch (XmlRpcException e) {
String msg = "Xen setup failed due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException("Unable to get host information ", e);
}
}
protected List<File> getPatchFiles() {
List<File> files = new ArrayList<File>();
File file = new File(_patchPath);
files.add(file);
return files;
}
protected SR getSRByNameLabelandHost(String name) throws BadServerResponse, XenAPIException, XmlRpcException {
Connection conn = getConnection();
Set<SR> srs = SR.getByNameLabel(conn, name);
SR ressr = null;
for (SR sr : srs) {
Set<PBD> pbds;
pbds = sr.getPBDs(conn);
for (PBD pbd : pbds) {
PBD.Record pbdr = pbd.getRecord(conn);
if (pbdr.host != null && pbdr.host.getUuid(conn).equals(_host.uuid)) {
if (!pbdr.currentlyAttached) {
pbd.plug(conn);
}
ressr = sr;
break;
}
}
}
return ressr;
}
protected GetStorageStatsAnswer execute(final GetStorageStatsCommand cmd) {
try {
Connection conn = getConnection();
Set<SR> srs = SR.getByNameLabel(conn, cmd.getStorageId());
if (srs.size() != 1) {
String msg = "There are " + srs.size() + " storageid: " + cmd.getStorageId();
s_logger.warn(msg);
return new GetStorageStatsAnswer(cmd, msg);
}
SR sr = srs.iterator().next();
sr.scan(conn);
long capacity = sr.getPhysicalSize(conn);
long used = sr.getPhysicalUtilisation(conn);
return new GetStorageStatsAnswer(cmd, capacity, used);
} catch (XenAPIException e) {
String msg = "GetStorageStats Exception:" + e.toString() + "host:" + _host.uuid + "storageid: " + cmd.getStorageId();
s_logger.warn(msg);
return new GetStorageStatsAnswer(cmd, msg);
} catch (XmlRpcException e) {
String msg = "GetStorageStats Exception:" + e.getMessage() + "host:" + _host.uuid + "storageid: " + cmd.getStorageId();
s_logger.warn(msg);
return new GetStorageStatsAnswer(cmd, msg);
}
}
private void pbdPlug(Connection conn, PBD pbd) {
String pbdUuid = "";
String hostAddr = "";
try {
pbdUuid = pbd.getUuid(conn);
hostAddr = pbd.getHost(conn).getAddress(conn);
pbd.plug(conn);
} catch (Exception e) {
String msg = "PBD " + pbdUuid + " is not attached! and PBD plug failed due to "
+ e.toString() + ". Please check this PBD in host : " + hostAddr;
s_logger.warn(msg, e);
}
}
protected boolean checkSR(SR sr) {
try {
Connection conn = getConnection();
SR.Record srr = sr.getRecord(conn);
Set<PBD> pbds = sr.getPBDs(conn);
if (pbds.size() == 0) {
String msg = "There is no PBDs for this SR: " + srr.nameLabel + " on host:" + _host.uuid;
s_logger.warn(msg);
return false;
}
Set<Host> hosts = null;
if (srr.shared) {
hosts = Host.getAll(conn);
for (Host host : hosts) {
boolean found = false;
for (PBD pbd : pbds) {
if (host.equals(pbd.getHost(conn))) {
PBD.Record pbdr = pbd.getRecord(conn);
if (!pbdr.currentlyAttached) {
pbdPlug(conn, pbd);
}
pbds.remove(pbd);
found = true;
break;
}
}
if (!found) {
PBD.Record pbdr = srr.PBDs.iterator().next().getRecord(conn);
pbdr.host = host;
pbdr.uuid = "";
PBD pbd = PBD.create(conn, pbdr);
pbdPlug(conn, pbd);
}
}
} else {
for (PBD pbd : pbds) {
PBD.Record pbdr = pbd.getRecord(conn);
if (!pbdr.currentlyAttached) {
pbdPlug(conn, pbd);
}
}
}
} catch (Exception e) {
String msg = "checkSR failed host:" + _host.uuid + " due to " + e.toString();
s_logger.warn(msg);
return false;
}
return true;
}
protected Answer execute(ModifyStoragePoolCommand cmd) {
StoragePoolVO pool = cmd.getPool();
StorageFilerTO poolTO = new StorageFilerTO(pool);
try {
Connection conn = getConnection();
SR sr = getStorageRepository(conn, poolTO);
long capacity = sr.getPhysicalSize(conn);
long available = capacity - sr.getPhysicalUtilisation(conn);
if (capacity == -1) {
String msg = "Pool capacity is -1! pool: " + pool.getName() + pool.getHostAddress() + pool.getPath();
s_logger.warn(msg);
return new Answer(cmd, false, msg);
}
Map<String, TemplateInfo> tInfo = new HashMap<String, TemplateInfo>();
ModifyStoragePoolAnswer answer = new ModifyStoragePoolAnswer(cmd, capacity, available, tInfo);
return answer;
} catch (XenAPIException e) {
String msg = "ModifyStoragePoolCommand XenAPIException:" + e.toString() + " host:" + _host.uuid + " pool: " + pool.getName() + pool.getHostAddress() + pool.getPath();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
} catch (Exception e) {
String msg = "ModifyStoragePoolCommand XenAPIException:" + e.getMessage() + " host:" + _host.uuid + " pool: " + pool.getName() + pool.getHostAddress() + pool.getPath();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
}
protected Answer execute(DeleteStoragePoolCommand cmd) {
StoragePoolVO pool = cmd.getPool();
StorageFilerTO poolTO = new StorageFilerTO(pool);
try {
Connection conn = getConnection();
SR sr = getStorageRepository(conn, poolTO);
removeSR(sr);
Answer answer = new Answer(cmd, true, "success");
return answer;
} catch (Exception e) {
String msg = "DeleteStoragePoolCommand XenAPIException:" + e.getMessage() + " host:" + _host.uuid + " pool: " + pool.getName() + pool.getHostAddress() + pool.getPath();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
}
public Connection getConnection() {
return _connPool.connect(_host.uuid, _host.pool, _host.ip, _username, _password, _wait);
}
protected void fillHostInfo(StartupRoutingCommand cmd) {
long speed = 0;
int cpus = 0;
long ram = 0;
Connection conn = getConnection();
long dom0Ram = 0;
final StringBuilder caps = new StringBuilder();
try {
Host host = Host.getByUuid(conn, _host.uuid);
Host.Record hr = host.getRecord(conn);
Map<String, String> details = cmd.getHostDetails();
if (details == null) {
details = new HashMap<String, String>();
}
if (_privateNetworkName != null) {
details.put("private.network.device", _privateNetworkName);
}
if (_publicNetworkName != null) {
details.put("public.network.device", _publicNetworkName);
}
if (_guestNetworkName != null) {
details.put("guest.network.device", _guestNetworkName);
}
details.put("can_bridge_firewall", Boolean.toString(_canBridgeFirewall));
cmd.setHostDetails(details);
cmd.setName(hr.nameLabel);
cmd.setGuid(_host.uuid);
cmd.setDataCenter(Long.toString(_dcId));
for (final String cap : hr.capabilities) {
if (cap.length() > 0) {
caps.append(cap).append(" , ");
}
}
if (caps.length() > 0) {
caps.delete(caps.length() - 3, caps.length());
}
cmd.setCaps(caps.toString());
cmd.setSpeed(_host.speed);
cmd.setCpus(_host.cpus);
long free = 0;
HostMetrics hm = host.getMetrics(conn);
ram = hm.getMemoryTotal(conn);
free = hm.getMemoryFree(conn);
Set<VM> vms = host.getResidentVMs(conn);
for (VM vm : vms) {
if (vm.getIsControlDomain(conn)) {
dom0Ram = vm.getMemoryDynamicMax(conn);
break;
}
}
// assume the memory Virtualization overhead is 1/64
ram = (ram - dom0Ram) * 63/64;
cmd.setMemory(ram);
cmd.setDom0MinMemory(dom0Ram);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total Ram: " + ram + " Free Ram: " + free + " dom0 Ram: " + dom0Ram);
}
PIF pif = PIF.getByUuid(conn, _host.privatePif);
PIF.Record pifr = pif.getRecord(conn);
if (pifr.IP != null && pifr.IP.length() > 0) {
cmd.setPrivateIpAddress(pifr.IP);
cmd.setPrivateMacAddress(pifr.MAC);
cmd.setPrivateNetmask(pifr.netmask);
}
pif = PIF.getByUuid(conn, _host.storagePif1);
pifr = pif.getRecord(conn);
if (pifr.IP != null && pifr.IP.length() > 0) {
cmd.setStorageIpAddress(pifr.IP);
cmd.setStorageMacAddress(pifr.MAC);
cmd.setStorageNetmask(pifr.netmask);
}
if (_host.storagePif2 != null) {
pif = PIF.getByUuid(conn, _host.storagePif2);
pifr = pif.getRecord(conn);
if (pifr.IP != null && pifr.IP.length() > 0) {
cmd.setStorageIpAddressDeux(pifr.IP);
cmd.setStorageMacAddressDeux(pifr.MAC);
cmd.setStorageNetmaskDeux(pifr.netmask);
}
}
Map<String, String> configs = hr.otherConfig;
cmd.setIqn(configs.get("iscsi_iqn"));
cmd.setPod(_pod);
cmd.setVersion(CitrixResourceBase.class.getPackage().getImplementationVersion());
} catch (final XmlRpcException e) {
throw new CloudRuntimeException("XML RPC Exception" + e.getMessage(), e);
} catch (XenAPIException e) {
throw new CloudRuntimeException("XenAPIException" + e.toString(), e);
}
}
public CitrixResourceBase() {
}
protected String getPatchPath() {
return "scripts/vm/hypervisor/xenserver/xcpserver";
}
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
_name = name;
_host.uuid = (String) params.get("guid");
try {
_dcId = Long.parseLong((String) params.get("zone"));
} catch (NumberFormatException e) {
throw new ConfigurationException("Unable to get the zone " + params.get("zone"));
}
_name = _host.uuid;
_host.ip = (String) params.get("url");
_host.pool = (String) params.get("pool");
_username = (String) params.get("username");
_password = (String) params.get("password");
_pod = (String) params.get("pod");
_cluster = (String)params.get("cluster");
_privateNetworkName = (String) params.get("private.network.device");
_publicNetworkName = (String) params.get("public.network.device");
_guestNetworkName = (String)params.get("guest.network.device");
_linkLocalPrivateNetworkName = (String) params.get("private.linkLocal.device");
if (_linkLocalPrivateNetworkName == null)
_linkLocalPrivateNetworkName = "cloud_link_local_network";
_storageNetworkName1 = (String) params.get("storage.network.device1");
if (_storageNetworkName1 == null) {
_storageNetworkName1 = "cloud-stor1";
}
_storageNetworkName2 = (String) params.get("storage.network.device2");
if (_storageNetworkName2 == null) {
_storageNetworkName2 = "cloud-stor2";
}
String value = (String) params.get("wait");
_wait = NumbersUtil.parseInt(value, 1800);
if (_pod == null) {
throw new ConfigurationException("Unable to get the pod");
}
if (_host.ip == null) {
throw new ConfigurationException("Unable to get the host address");
}
if (_username == null) {
throw new ConfigurationException("Unable to get the username");
}
if (_password == null) {
throw new ConfigurationException("Unable to get the password");
}
if (_host.uuid == null) {
throw new ConfigurationException("Unable to get the uuid");
}
String patchPath = getPatchPath();
_patchPath = Script.findScript(patchPath, "patch");
if (_patchPath == null) {
throw new ConfigurationException("Unable to find all of patch files for xenserver");
}
_storage = (StorageLayer) params.get(StorageLayer.InstanceConfigKey);
if (_storage == null) {
value = (String) params.get(StorageLayer.ClassConfigKey);
if (value == null) {
value = "com.cloud.storage.JavaStorageLayer";
}
try {
Class<?> clazz = Class.forName(value);
_storage = (StorageLayer) ComponentLocator.inject(clazz);
_storage.configure("StorageLayer", params);
} catch (ClassNotFoundException e) {
throw new ConfigurationException("Unable to find class " + value);
}
}
return true;
}
void destroyVDI(VDI vdi) {
try {
Connection conn = getConnection();
vdi.destroy(conn);
} catch (Exception e) {
String msg = "destroy VDI failed due to " + e.toString();
s_logger.warn(msg);
}
}
@Override
public CreateAnswer execute(CreateCommand cmd) {
StorageFilerTO pool = cmd.getPool();
DiskProfile dskch = cmd.getDiskCharacteristics();
VDI vdi = null;
Connection conn = getConnection();
try {
SR poolSr = getStorageRepository(conn, pool);
if (cmd.getTemplateUrl() != null) {
VDI tmpltvdi = null;
tmpltvdi = getVDIbyUuid(cmd.getTemplateUrl());
vdi = tmpltvdi.createClone(conn, new HashMap<String, String>());
vdi.setNameLabel(conn, dskch.getName());
} else {
VDI.Record vdir = new VDI.Record();
vdir.nameLabel = dskch.getName();
vdir.SR = poolSr;
vdir.type = Types.VdiType.USER;
if(cmd.getSize()!=0)
vdir.virtualSize = cmd.getSize();
else
vdir.virtualSize = dskch.getSize();
vdi = VDI.create(conn, vdir);
}
VDI.Record vdir;
vdir = vdi.getRecord(conn);
s_logger.debug("Succesfully created VDI for " + cmd + ". Uuid = " + vdir.uuid);
VolumeTO vol = new VolumeTO(cmd.getVolumeId(), dskch.getType(), Storage.StorageResourceType.STORAGE_POOL, pool.getType(), vdir.nameLabel, pool.getPath(), vdir.uuid,
vdir.virtualSize);
return new CreateAnswer(cmd, vol);
} catch (Exception e) {
s_logger.warn("Unable to create volume; Pool=" + pool + "; Disk: " + dskch, e);
return new CreateAnswer(cmd, e);
}
}
protected SR getISOSRbyVmName(String vmName) {
Connection conn = getConnection();
try {
Set<SR> srs = SR.getByNameLabel(conn, vmName + "-ISO");
if (srs.size() == 0) {
return null;
} else if (srs.size() == 1) {
return srs.iterator().next();
} else {
String msg = "getIsoSRbyVmName failed due to there are more than 1 SR having same Label";
s_logger.warn(msg);
}
} catch (XenAPIException e) {
String msg = "getIsoSRbyVmName failed due to " + e.toString();
s_logger.warn(msg, e);
} catch (Exception e) {
String msg = "getIsoSRbyVmName failed due to " + e.getMessage();
s_logger.warn(msg, e);
}
return null;
}
protected SR createNfsSRbyURI(URI uri, boolean shared) {
try {
Connection conn = getConnection();
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating a " + (shared ? "shared SR for " : "not shared SR for ") + uri);
}
Map<String, String> deviceConfig = new HashMap<String, String>();
String path = uri.getPath();
path = path.replace("//", "/");
deviceConfig.put("server", uri.getHost());
deviceConfig.put("serverpath", path);
String name = UUID.nameUUIDFromBytes(new String(uri.getHost() + path).getBytes()).toString();
if (!shared) {
Set<SR> srs = SR.getByNameLabel(conn, name);
for (SR sr : srs) {
SR.Record record = sr.getRecord(conn);
if (SRType.NFS.equals(record.type) && record.contentType.equals("user") && !record.shared) {
removeSRSync(sr);
}
}
}
Host host = Host.getByUuid(conn, _host.uuid);
SR sr = SR.create(conn, host, deviceConfig, new Long(0), name, uri.getHost() + uri.getPath(), SRType.NFS.toString(), "user", shared, new HashMap<String, String>());
if( !checkSR(sr) ) {
throw new Exception("no attached PBD");
}
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "Created a SR; UUID is " + sr.getUuid(conn) + " device config is " + deviceConfig));
}
sr.scan(conn);
return sr;
} catch (XenAPIException e) {
String msg = "Can not create second storage SR mountpoint: " + uri.getHost() + uri.getPath() + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "Can not create second storage SR mountpoint: " + uri.getHost() + uri.getPath() + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
protected SR createIsoSRbyURI(URI uri, String vmName, boolean shared) {
try {
Connection conn = getConnection();
Map<String, String> deviceConfig = new HashMap<String, String>();
String path = uri.getPath();
path = path.replace("//", "/");
deviceConfig.put("location", uri.getHost() + ":" + uri.getPath());
Host host = Host.getByUuid(conn, _host.uuid);
SR sr = SR.create(conn, host, deviceConfig, new Long(0), uri.getHost() + uri.getPath(), "iso", "iso", "iso", shared, new HashMap<String, String>());
sr.setNameLabel(conn, vmName + "-ISO");
sr.setNameDescription(conn, deviceConfig.get("location"));
sr.scan(conn);
return sr;
} catch (XenAPIException e) {
String msg = "createIsoSRbyURI failed! mountpoint: " + uri.getHost() + uri.getPath() + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "createIsoSRbyURI failed! mountpoint: " + uri.getHost() + uri.getPath() + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
protected VDI getVDIbyLocationandSR(String loc, SR sr) {
Connection conn = getConnection();
try {
Set<VDI> vdis = sr.getVDIs(conn);
for (VDI vdi : vdis) {
if (vdi.getLocation(conn).startsWith(loc)) {
return vdi;
}
}
String msg = "can not getVDIbyLocationandSR " + loc;
s_logger.warn(msg);
return null;
} catch (XenAPIException e) {
String msg = "getVDIbyLocationandSR exception " + loc + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "getVDIbyLocationandSR exception " + loc + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
protected VDI getVDIbyUuid(String uuid) {
try {
Connection conn = getConnection();
return VDI.getByUuid(conn, uuid);
} catch (XenAPIException e) {
String msg = "VDI getByUuid for uuid: " + uuid + " failed due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "VDI getByUuid for uuid: " + uuid + " failed due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
protected SR getIscsiSR(StorageFilerTO pool) {
Connection conn = getConnection();
synchronized (pool.getUuid().intern()) {
Map<String, String> deviceConfig = new HashMap<String, String>();
try {
String target = pool.getHost();
String path = pool.getPath();
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
String tmp[] = path.split("/");
if (tmp.length != 3) {
String msg = "Wrong iscsi path " + pool.getPath() + " it should be /targetIQN/LUN";
s_logger.warn(msg);
throw new CloudRuntimeException(msg);
}
String targetiqn = tmp[1].trim();
String lunid = tmp[2].trim();
String scsiid = "";
Set<SR> srs = SR.getByNameLabel(conn, pool.getUuid());
for (SR sr : srs) {
if (!SRType.LVMOISCSI.equals(sr.getType(conn)))
continue;
Set<PBD> pbds = sr.getPBDs(conn);
if (pbds.isEmpty())
continue;
PBD pbd = pbds.iterator().next();
Map<String, String> dc = pbd.getDeviceConfig(conn);
if (dc == null)
continue;
if (dc.get("target") == null)
continue;
if (dc.get("targetIQN") == null)
continue;
if (dc.get("lunid") == null)
continue;
if (target.equals(dc.get("target")) && targetiqn.equals(dc.get("targetIQN")) && lunid.equals(dc.get("lunid"))) {
if (checkSR(sr)) {
return sr;
}
throw new CloudRuntimeException("SR check failed for storage pool: " + pool.getUuid() + "on host:" + _host.uuid);
}
}
deviceConfig.put("target", target);
deviceConfig.put("targetIQN", targetiqn);
Host host = Host.getByUuid(conn, _host.uuid);
SR sr = null;
try {
sr = SR.create(conn, host, deviceConfig, new Long(0), pool.getUuid(), Long.toString(pool.getId()), SRType.LVMOISCSI.toString(), "user", true,
new HashMap<String, String>());
} catch (XenAPIException e) {
String errmsg = e.toString();
if (errmsg.contains("SR_BACKEND_FAILURE_107")) {
String lun[] = errmsg.split("<LUN>");
boolean found = false;
for (int i = 1; i < lun.length; i++) {
int blunindex = lun[i].indexOf("<LUNid>") + 7;
int elunindex = lun[i].indexOf("</LUNid>");
String ilun = lun[i].substring(blunindex, elunindex);
ilun = ilun.trim();
if (ilun.equals(lunid)) {
int bscsiindex = lun[i].indexOf("<SCSIid>") + 8;
int escsiindex = lun[i].indexOf("</SCSIid>");
scsiid = lun[i].substring(bscsiindex, escsiindex);
scsiid = scsiid.trim();
found = true;
break;
}
}
if (!found) {
String msg = "can not find LUN " + lunid + " in " + errmsg;
s_logger.warn(msg);
throw new CloudRuntimeException(msg);
}
} else {
String msg = "Unable to create Iscsi SR " + deviceConfig + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
deviceConfig.put("SCSIid", scsiid);
sr = SR.create(conn, host, deviceConfig, new Long(0), pool.getUuid(), Long.toString(pool.getId()), SRType.LVMOISCSI.toString(), "user", true,
new HashMap<String, String>());
sr.scan(conn);
return sr;
} catch (XenAPIException e) {
String msg = "Unable to create Iscsi SR " + deviceConfig + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "Unable to create Iscsi SR " + deviceConfig + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
}
protected SR getNfsSR(StorageFilerTO pool) {
Connection conn = getConnection();
Map<String, String> deviceConfig = new HashMap<String, String>();
try {
String server = pool.getHost();
String serverpath = pool.getPath();
serverpath = serverpath.replace("//", "/");
Set<SR> srs = SR.getAll(conn);
for (SR sr : srs) {
if (!SRType.NFS.equals(sr.getType(conn)))
continue;
Set<PBD> pbds = sr.getPBDs(conn);
if (pbds.isEmpty())
continue;
PBD pbd = pbds.iterator().next();
Map<String, String> dc = pbd.getDeviceConfig(conn);
if (dc == null)
continue;
if (dc.get("server") == null)
continue;
if (dc.get("serverpath") == null)
continue;
if (server.equals(dc.get("server")) && serverpath.equals(dc.get("serverpath"))) {
if (checkSR(sr)) {
return sr;
}
throw new CloudRuntimeException("SR check failed for storage pool: " + pool.getUuid() + "on host:" + _host.uuid);
}
}
deviceConfig.put("server", server);
deviceConfig.put("serverpath", serverpath);
Host host = Host.getByUuid(conn, _host.uuid);
SR sr = SR.create(conn, host, deviceConfig, new Long(0), pool.getUuid(), Long.toString(pool.getId()), SRType.NFS.toString(), "user", true,
new HashMap<String, String>());
sr.scan(conn);
return sr;
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to create NFS SR " + pool.toString(), e);
} catch (XmlRpcException e) {
throw new CloudRuntimeException("Unable to create NFS SR " + pool.toString(), e);
}
}
@Override
public Answer execute(DestroyCommand cmd) {
VolumeTO vol = cmd.getVolume();
Connection conn = getConnection();
// Look up the VDI
String volumeUUID = vol.getPath();
VDI vdi = null;
try {
vdi = getVDIbyUuid(volumeUUID);
} catch (Exception e) {
String msg = "getVDIbyUuid for " + volumeUUID + " failed due to " + e.toString();
s_logger.warn(msg);
return new Answer(cmd, true, "Success");
}
Set<VBD> vbds = null;
try {
vbds = vdi.getVBDs(conn);
} catch (Exception e) {
String msg = "VDI getVBDS for " + volumeUUID + " failed due to " + e.toString();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
for (VBD vbd : vbds) {
try {
vbd.unplug(conn);
vbd.destroy(conn);
} catch (Exception e) {
String msg = "VM destroy for " + volumeUUID + " failed due to " + e.toString();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
}
try {
vdi.destroy(conn);
} catch (Exception e) {
String msg = "VDI destroy for " + volumeUUID + " failed due to " + e.toString();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
return new Answer(cmd, true, "Success");
}
@Override
public ShareAnswer execute(final ShareCommand cmd) {
if (!cmd.isShare()) {
SR sr = getISOSRbyVmName(cmd.getVmName());
Connection conn = getConnection();
try {
if (sr != null) {
Set<VM> vms = VM.getByNameLabel(conn, cmd.getVmName());
if (vms.size() == 0) {
removeSR(sr);
}
}
} catch (Exception e) {
String msg = "SR.getNameLabel failed due to " + e.getMessage() + e.toString();
s_logger.warn(msg);
}
}
return new ShareAnswer(cmd, new HashMap<String, Integer>());
}
@Override
public CopyVolumeAnswer execute(final CopyVolumeCommand cmd) {
String volumeUUID = cmd.getVolumePath();
StoragePoolVO pool = cmd.getPool();
StorageFilerTO poolTO = new StorageFilerTO(pool);
String secondaryStorageURL = cmd.getSecondaryStorageURL();
URI uri = null;
try {
uri = new URI(secondaryStorageURL);
} catch (URISyntaxException e) {
return new CopyVolumeAnswer(cmd, false, "Invalid secondary storage URL specified.", null, null);
}
String remoteVolumesMountPath = uri.getHost() + ":" + uri.getPath() + "/volumes/";
String volumeFolder = String.valueOf(cmd.getVolumeId()) + "/";
boolean toSecondaryStorage = cmd.toSecondaryStorage();
String errorMsg = "Failed to copy volume";
SR primaryStoragePool = null;
SR secondaryStorage = null;
VDI srcVolume = null;
VDI destVolume = null;
Connection conn = getConnection();
try {
if (toSecondaryStorage) {
// Create the volume folder
if (!createSecondaryStorageFolder(remoteVolumesMountPath, volumeFolder)) {
throw new InternalErrorException("Failed to create the volume folder.");
}
// Create a SR for the volume UUID folder
secondaryStorage = createNfsSRbyURI(new URI(secondaryStorageURL + "/volumes/" + volumeFolder), false);
// Look up the volume on the source primary storage pool
srcVolume = getVDIbyUuid(volumeUUID);
// Copy the volume to secondary storage
destVolume = cloudVDIcopy(srcVolume, secondaryStorage);
} else {
// Mount the volume folder
secondaryStorage = createNfsSRbyURI(new URI(secondaryStorageURL + "/volumes/" + volumeFolder), false);
// Look up the volume on secondary storage
Set<VDI> vdis = secondaryStorage.getVDIs(conn);
for (VDI vdi : vdis) {
if (vdi.getUuid(conn).equals(volumeUUID)) {
srcVolume = vdi;
break;
}
}
if (srcVolume == null) {
throw new InternalErrorException("Failed to find volume on secondary storage.");
}
// Copy the volume to the primary storage pool
primaryStoragePool = getStorageRepository(conn, poolTO);
destVolume = cloudVDIcopy(srcVolume, primaryStoragePool);
}
String srUUID;
if (primaryStoragePool == null) {
srUUID = secondaryStorage.getUuid(conn);
} else {
srUUID = primaryStoragePool.getUuid(conn);
}
String destVolumeUUID = destVolume.getUuid(conn);
return new CopyVolumeAnswer(cmd, true, null, srUUID, destVolumeUUID);
} catch (XenAPIException e) {
s_logger.warn(errorMsg + ": " + e.toString(), e);
return new CopyVolumeAnswer(cmd, false, e.toString(), null, null);
} catch (Exception e) {
s_logger.warn(errorMsg + ": " + e.toString(), e);
return new CopyVolumeAnswer(cmd, false, e.getMessage(), null, null);
} finally {
if (!toSecondaryStorage && srcVolume != null) {
// Delete the volume on secondary storage
destroyVDI(srcVolume);
}
removeSR(secondaryStorage);
if (!toSecondaryStorage) {
// Delete the volume folder on secondary storage
deleteSecondaryStorageFolder(remoteVolumesMountPath, volumeFolder);
}
}
}
protected AttachVolumeAnswer execute(final AttachVolumeCommand cmd) {
boolean attach = cmd.getAttach();
String vmName = cmd.getVmName();
Long deviceId = cmd.getDeviceId();
String errorMsg;
if (attach) {
errorMsg = "Failed to attach volume";
} else {
errorMsg = "Failed to detach volume";
}
Connection conn = getConnection();
try {
// Look up the VDI
VDI vdi = mount(cmd.getPooltype(), cmd.getVolumeFolder(),cmd.getVolumePath());
// Look up the VM
VM vm = getVM(conn, vmName);
/* For HVM guest, if no pv driver installed, no attach/detach */
boolean isHVM;
if (vm.getPVBootloader(conn).equalsIgnoreCase(""))
isHVM = true;
else
isHVM = false;
VMGuestMetrics vgm = vm.getGuestMetrics(conn);
boolean pvDrvInstalled = false;
if (!isRefNull(vgm) && vgm.getPVDriversUpToDate(conn)) {
pvDrvInstalled = true;
}
if (isHVM && !pvDrvInstalled) {
s_logger.warn(errorMsg + ": You attempted an operation on a VM which requires PV drivers to be installed but the drivers were not detected");
return new AttachVolumeAnswer(cmd, "You attempted an operation that requires PV drivers to be installed on the VM. Please install them by inserting xen-pv-drv.iso.");
}
if (attach) {
// Figure out the disk number to attach the VM to
String diskNumber = null;
if( deviceId != null ) {
if( deviceId.longValue() == 3 ) {
String msg = "Device 3 is reserved for CD-ROM, choose other device";
return new AttachVolumeAnswer(cmd,msg);
}
if(isDeviceUsed(vm, deviceId)) {
String msg = "Device " + deviceId + " is used in VM " + vmName;
return new AttachVolumeAnswer(cmd,msg);
}
diskNumber = deviceId.toString();
} else {
diskNumber = getUnusedDeviceNum(vm);
}
// Create a new VBD
VBD.Record vbdr = new VBD.Record();
vbdr.VM = vm;
vbdr.VDI = vdi;
vbdr.bootable = false;
vbdr.userdevice = diskNumber;
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
vbdr.unpluggable = true;
VBD vbd = VBD.create(conn, vbdr);
// Attach the VBD to the VM
vbd.plug(conn);
// Update the VDI's label to include the VM name
vdi.setNameLabel(conn, vmName + "-DATA");
return new AttachVolumeAnswer(cmd, Long.parseLong(diskNumber));
} else {
// Look up all VBDs for this VDI
Set<VBD> vbds = vdi.getVBDs(conn);
// Detach each VBD from its VM, and then destroy it
for (VBD vbd : vbds) {
VBD.Record vbdr = vbd.getRecord(conn);
if (vbdr.currentlyAttached) {
vbd.unplug(conn);
}
vbd.destroy(conn);
}
// Update the VDI's label to be "detached"
vdi.setNameLabel(conn, "detached");
umount(vdi);
return new AttachVolumeAnswer(cmd);
}
} catch (XenAPIException e) {
String msg = errorMsg + " for uuid: " + cmd.getVolumePath() + " due to " + e.toString();
s_logger.warn(msg, e);
return new AttachVolumeAnswer(cmd, msg);
} catch (Exception e) {
String msg = errorMsg + " for uuid: " + cmd.getVolumePath() + " due to " + e.getMessage();
s_logger.warn(msg, e);
return new AttachVolumeAnswer(cmd, msg);
}
}
protected void umount(VDI vdi) {
}
protected Answer execute(final AttachIsoCommand cmd) {
boolean attach = cmd.isAttach();
String vmName = cmd.getVmName();
String isoURL = cmd.getIsoPath();
String errorMsg;
if (attach) {
errorMsg = "Failed to attach ISO";
} else {
errorMsg = "Failed to detach ISO";
}
Connection conn = getConnection();
try {
if (attach) {
VBD isoVBD = null;
// Find the VM
VM vm = getVM(conn, vmName);
// Find the ISO VDI
VDI isoVDI = getIsoVDIByURL(conn, vmName, isoURL);
// Find the VM's CD-ROM VBD
Set<VBD> vbds = vm.getVBDs(conn);
for (VBD vbd : vbds) {
String userDevice = vbd.getUserdevice(conn);
Types.VbdType type = vbd.getType(conn);
if (userDevice.equals("3") && type == Types.VbdType.CD) {
isoVBD = vbd;
break;
}
}
if (isoVBD == null) {
throw new CloudRuntimeException("Unable to find CD-ROM VBD for VM: " + vmName);
} else {
// If an ISO is already inserted, eject it
if (isoVBD.getEmpty(conn) == false) {
isoVBD.eject(conn);
}
// Insert the new ISO
isoVBD.insert(conn, isoVDI);
}
return new Answer(cmd);
} else {
// Find the VM
VM vm = getVM(conn, vmName);
String vmUUID = vm.getUuid(conn);
// Find the ISO VDI
VDI isoVDI = getIsoVDIByURL(conn, vmName, isoURL);
SR sr = isoVDI.getSR(conn);
// Look up all VBDs for this VDI
Set<VBD> vbds = isoVDI.getVBDs(conn);
// Iterate through VBDs, and if the VBD belongs the VM, eject
// the ISO from it
for (VBD vbd : vbds) {
VM vbdVM = vbd.getVM(conn);
String vbdVmUUID = vbdVM.getUuid(conn);
if (vbdVmUUID.equals(vmUUID)) {
// If an ISO is already inserted, eject it
if (!vbd.getEmpty(conn)) {
vbd.eject(conn);
}
break;
}
}
if (!sr.getNameLabel(conn).startsWith("XenServer Tools")) {
removeSR(sr);
}
return new Answer(cmd);
}
} catch (XenAPIException e) {
s_logger.warn(errorMsg + ": " + e.toString(), e);
return new Answer(cmd, false, e.toString());
} catch (Exception e) {
s_logger.warn(errorMsg + ": " + e.toString(), e);
return new Answer(cmd, false, e.getMessage());
}
}
protected ManageSnapshotAnswer execute(final ManageSnapshotCommand cmd) {
long snapshotId = cmd.getSnapshotId();
String snapshotName = cmd.getSnapshotName();
// By default assume failure
boolean success = false;
String cmdSwitch = cmd.getCommandSwitch();
String snapshotOp = "Unsupported snapshot command." + cmdSwitch;
if (cmdSwitch.equals(ManageSnapshotCommand.CREATE_SNAPSHOT)) {
snapshotOp = "create";
} else if (cmdSwitch.equals(ManageSnapshotCommand.DESTROY_SNAPSHOT)) {
snapshotOp = "destroy";
}
String details = "ManageSnapshotCommand operation: " + snapshotOp + " Failed for snapshotId: " + snapshotId;
String snapshotUUID = null;
Connection conn = getConnection();
try {
if (cmdSwitch.equals(ManageSnapshotCommand.CREATE_SNAPSHOT)) {
// Look up the volume
String volumeUUID = cmd.getVolumePath();
VDI volume = VDI.getByUuid(conn, volumeUUID);
// Create a snapshot
VDI snapshot = volume.snapshot(conn, new HashMap<String, String>());
if (snapshotName != null) {
snapshot.setNameLabel(conn, snapshotName);
}
// Determine the UUID of the snapshot
snapshotUUID = snapshot.getUuid(conn);
String preSnapshotUUID = cmd.getSnapshotPath();
//check if it is a empty snapshot
if( preSnapshotUUID != null) {
SR sr = volume.getSR(conn);
String srUUID = sr.getUuid(conn);
String type = sr.getType(conn);
Boolean isISCSI = SRType.LVMOISCSI.equals(type);
String snapshotParentUUID = getVhdParent(srUUID, snapshotUUID, isISCSI);
String preSnapshotParentUUID = getVhdParent(srUUID, preSnapshotUUID, isISCSI);
if( snapshotParentUUID != null && snapshotParentUUID.equals(preSnapshotParentUUID)) {
// this is empty snapshot, remove it
snapshot.destroy(conn);
snapshotUUID = preSnapshotUUID;
}
}
success = true;
details = null;
} else if (cmd.getCommandSwitch().equals(ManageSnapshotCommand.DESTROY_SNAPSHOT)) {
// Look up the snapshot
snapshotUUID = cmd.getSnapshotPath();
VDI snapshot = getVDIbyUuid(snapshotUUID);
snapshot.destroy(conn);
snapshotUUID = null;
success = true;
details = null;
}
} catch (XenAPIException e) {
details += ", reason: " + e.toString();
s_logger.warn(details, e);
} catch (Exception e) {
details += ", reason: " + e.toString();
s_logger.warn(details, e);
}
return new ManageSnapshotAnswer(cmd, snapshotId, snapshotUUID, success, details);
}
protected CreatePrivateTemplateAnswer execute(final CreatePrivateTemplateFromVolumeCommand cmd) {
String secondaryStoragePoolURL = cmd.getSecondaryStorageURL();
String volumeUUID = cmd.getVolumePath();
Long accountId = cmd.getAccountId();
String userSpecifiedName = cmd.getTemplateName();
Long templateId = cmd.getTemplateId();
String details = null;
SR tmpltSR = null;
boolean result = false;
try {
URI uri = new URI(secondaryStoragePoolURL);
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
String installPath = "template/tmpl/" + accountId + "/" + templateId;
if( !createSecondaryStorageFolder(secondaryStorageMountPath, installPath)) {
details = " Filed to create folder " + installPath + " in secondary storage";
s_logger.warn(details);
return new CreatePrivateTemplateAnswer(cmd, false, details);
}
Connection conn = getConnection();
VDI volume = getVDIbyUuid(volumeUUID);
// create template SR
URI tmpltURI = new URI(secondaryStoragePoolURL + "/" + installPath);
tmpltSR = createNfsSRbyURI(tmpltURI, false);
// copy volume to template SR
VDI tmpltVDI = cloudVDIcopy(volume, tmpltSR);
if (userSpecifiedName != null) {
tmpltVDI.setNameLabel(conn, userSpecifiedName);
}
String tmpltSrUUID = tmpltSR.getUuid(conn);
String tmpltUUID = tmpltVDI.getUuid(conn);
String tmpltFilename = tmpltUUID + ".vhd";
long virtualSize = tmpltVDI.getVirtualSize(conn);
long size = tmpltVDI.getPhysicalUtilisation(conn);
// create the template.properties file
result = postCreatePrivateTemplate(tmpltSrUUID, tmpltFilename, tmpltUUID, userSpecifiedName, null, size, virtualSize, templateId);
if (!result) {
throw new CloudRuntimeException("Could not create the template.properties file on secondary storage dir: " + tmpltURI);
}
return new CreatePrivateTemplateAnswer(cmd, true, null, installPath, virtualSize, tmpltUUID, ImageFormat.VHD);
} catch (XenAPIException e) {
details = "Creating template from volume " + volumeUUID + " failed due to " + e.getMessage();
s_logger.error(details, e);
} catch (Exception e) {
details = "Creating template from volume " + volumeUUID + " failed due to " + e.getMessage();
s_logger.error(details, e);
} finally {
// Remove the secondary storage SR
removeSR(tmpltSR);
}
return new CreatePrivateTemplateAnswer(cmd, result, details);
}
protected CreatePrivateTemplateAnswer execute(final CreatePrivateTemplateFromSnapshotCommand cmd) {
String primaryStorageNameLabel = cmd.getPrimaryStoragePoolNameLabel();
Long dcId = cmd.getDataCenterId();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String backedUpSnapshotUuid = cmd.getSnapshotUuid();
Long newTemplateId = cmd.getNewTemplateId();
String userSpecifiedName = cmd.getTemplateName();
// By default, assume failure
String details = null;
SR snapshotSR = null;
SR tmpltSR = null;
boolean result = false;
try {
URI uri = new URI(secondaryStoragePoolURL);
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
String installPath = "template/tmpl/" + accountId + "/" + newTemplateId;
if( !createSecondaryStorageFolder(secondaryStorageMountPath, installPath)) {
details = " Filed to create folder " + installPath + " in secondary storage";
s_logger.warn(details);
return new CreatePrivateTemplateAnswer(cmd, false, details);
}
Connection conn = getConnection();
// create snapshot SR
URI snapshotURI = new URI(secondaryStoragePoolURL + "/snapshots/" + accountId + "/" + volumeId );
snapshotSR = createNfsSRbyURI(snapshotURI, false);
snapshotSR.scan(conn);
VDI snapshotVDI = getVDIbyUuid(backedUpSnapshotUuid);
// create template SR
URI tmpltURI = new URI(secondaryStoragePoolURL + "/" + installPath);
tmpltSR = createNfsSRbyURI(tmpltURI, false);
// copy snapshotVDI to template SR
VDI tmpltVDI = cloudVDIcopy(snapshotVDI, tmpltSR);
String tmpltSrUUID = tmpltSR.getUuid(conn);
String tmpltUUID = tmpltVDI.getUuid(conn);
String tmpltFilename = tmpltUUID + ".vhd";
long virtualSize = tmpltVDI.getVirtualSize(conn);
long size = tmpltVDI.getPhysicalUtilisation(conn);
// create the template.properties file
result = postCreatePrivateTemplate(tmpltSrUUID, tmpltFilename, tmpltUUID, userSpecifiedName, null, size, virtualSize, newTemplateId);
if (!result) {
throw new CloudRuntimeException("Could not create the template.properties file on secondary storage dir: " + tmpltURI);
}
return new CreatePrivateTemplateAnswer(cmd, true, null, installPath, virtualSize, tmpltUUID, ImageFormat.VHD);
} catch (XenAPIException e) {
details = "Creating template from snapshot " + backedUpSnapshotUuid + " failed due to " + e.getMessage();
s_logger.error(details, e);
} catch (Exception e) {
details = "Creating template from snapshot " + backedUpSnapshotUuid + " failed due to " + e.getMessage();
s_logger.error(details, e);
} finally {
// Remove the secondary storage SR
removeSR(snapshotSR);
removeSR(tmpltSR);
}
return new CreatePrivateTemplateAnswer(cmd, result, details);
}
protected BackupSnapshotAnswer execute(final BackupSnapshotCommand cmd) {
String primaryStorageNameLabel = cmd.getPrimaryStoragePoolNameLabel();
Long dcId = cmd.getDataCenterId();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String snapshotUuid = cmd.getSnapshotUuid(); // not null: Precondition.
String prevSnapshotUuid = cmd.getPrevSnapshotUuid();
String prevBackupUuid = cmd.getPrevBackupUuid();
// By default assume failure
String details = null;
boolean success = false;
String snapshotBackupUuid = null;
try {
Connection conn = getConnection();
SR primaryStorageSR = getSRByNameLabelandHost(primaryStorageNameLabel);
if (primaryStorageSR == null) {
throw new InternalErrorException("Could not backup snapshot because the primary Storage SR could not be created from the name label: " + primaryStorageNameLabel);
}
String primaryStorageSRUuid = primaryStorageSR.getUuid(conn);
Boolean isISCSI = SRType.LVMOISCSI.equals(primaryStorageSR.getType(conn));
URI uri = new URI(secondaryStoragePoolURL);
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
if (prevBackupUuid == null) {
// the first snapshot is always a full snapshot
String folder = "snapshots/" + accountId + "/" + volumeId;
if( !createSecondaryStorageFolder(secondaryStorageMountPath, folder)) {
details = " Filed to create folder " + folder + " in secondary storage";
s_logger.warn(details);
return new BackupSnapshotAnswer(cmd, success, details, snapshotBackupUuid);
}
String snapshotMountpoint = secondaryStoragePoolURL + "/" + folder;
SR snapshotSr = null;
try {
snapshotSr = createNfsSRbyURI(new URI(snapshotMountpoint), false);
VDI snapshotVdi = getVDIbyUuid(snapshotUuid);
VDI backedVdi = snapshotVdi.copy(conn, snapshotSr);
snapshotBackupUuid = backedVdi.getUuid(conn);
success = true;
} finally {
if( snapshotSr != null) {
removeSR(snapshotSr);
}
}
} else {
snapshotBackupUuid = backupSnapshot(primaryStorageSRUuid, dcId, accountId, volumeId, secondaryStorageMountPath,
snapshotUuid, prevSnapshotUuid, prevBackupUuid, isISCSI);
success = (snapshotBackupUuid != null);
}
if (success) {
details = "Successfully backedUp the snapshotUuid: " + snapshotUuid + " to secondary storage.";
// Mark the snapshot as removed in the database.
// When the next snapshot is taken, it will be
// 1) deleted from the DB 2) The snapshotUuid will be deleted from the primary
// 3) the snapshotBackupUuid will be copied to secondary
// 4) if possible it will be coalesced with the next snapshot.
if (prevSnapshotUuid != null) {
// Destroy the previous snapshot, if it exists.
// We destroy the previous snapshot only if the current snapshot
// backup succeeds.
// The aim is to keep the VDI of the last 'successful' snapshot
// so that it doesn't get merged with the
// new one
// and muddle the vhd chain on the secondary storage.
destroySnapshotOnPrimaryStorage(prevSnapshotUuid);
}
}
} catch (XenAPIException e) {
details = "BackupSnapshot Failed due to " + e.toString();
s_logger.warn(details, e);
} catch (Exception e) {
details = "BackupSnapshot Failed due to " + e.getMessage();
s_logger.warn(details, e);
}
return new BackupSnapshotAnswer(cmd, success, details, snapshotBackupUuid);
}
protected CreateVolumeFromSnapshotAnswer execute(final CreateVolumeFromSnapshotCommand cmd) {
String primaryStorageNameLabel = cmd.getPrimaryStoragePoolNameLabel();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String backedUpSnapshotUuid = cmd.getSnapshotUuid();
// By default, assume the command has failed and set the params to be
// passed to CreateVolumeFromSnapshotAnswer appropriately
boolean result = false;
// Generic error message.
String details = null;
String volumeUUID = null;
SR snapshotSR = null;
if (secondaryStoragePoolURL == null) {
details += " because the URL passed: " + secondaryStoragePoolURL + " is invalid.";
return new CreateVolumeFromSnapshotAnswer(cmd, result, details, volumeUUID);
}
try {
Connection conn = getConnection();
SR primaryStorageSR = getSRByNameLabelandHost(primaryStorageNameLabel);
if (primaryStorageSR == null) {
throw new InternalErrorException("Could not create volume from snapshot because the primary Storage SR could not be created from the name label: "
+ primaryStorageNameLabel);
}
// Get the absolute path of the snapshot on the secondary storage.
URI snapshotURI = new URI(secondaryStoragePoolURL + "/snapshots/" + accountId + "/" + volumeId );
snapshotSR = createNfsSRbyURI(snapshotURI, false);
snapshotSR.scan(conn);
VDI snapshotVDI = getVDIbyUuid(backedUpSnapshotUuid);
VDI volumeVDI = cloudVDIcopy(snapshotVDI, primaryStorageSR);
volumeUUID = volumeVDI.getUuid(conn);
result = true;
} catch (XenAPIException e) {
details += " due to " + e.toString();
s_logger.warn(details, e);
} catch (Exception e) {
details += " due to " + e.getMessage();
s_logger.warn(details, e);
} finally {
// In all cases, if the temporary SR was created, forget it.
if (snapshotSR != null) {
removeSR(snapshotSR);
}
}
if (!result) {
// Is this logged at a higher level?
s_logger.error(details);
}
// In all cases return something.
return new CreateVolumeFromSnapshotAnswer(cmd, result, details, volumeUUID);
}
protected DeleteSnapshotBackupAnswer execute(final DeleteSnapshotBackupCommand cmd) {
Long dcId = cmd.getDataCenterId();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String backupUUID = cmd.getSnapshotUuid();
String details = null;
boolean success = false;
URI uri = null;
try {
uri = new URI(secondaryStoragePoolURL);
} catch (URISyntaxException e) {
details = "Error finding the secondary storage URL" + e.getMessage();
s_logger.error(details, e);
}
if (uri != null) {
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
if (secondaryStorageMountPath == null) {
details = "Couldn't delete snapshot because the URL passed: " + secondaryStoragePoolURL + " is invalid.";
} else {
details = deleteSnapshotBackup(dcId, accountId, volumeId, secondaryStorageMountPath, backupUUID);
success = (details != null && details.equals("1"));
if (success) {
s_logger.debug("Successfully deleted snapshot backup " + backupUUID);
}
}
}
return new DeleteSnapshotBackupAnswer(cmd, success, details);
}
protected Answer execute(DeleteSnapshotsDirCommand cmd) {
Long dcId = cmd.getDataCenterId();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String snapshotUUID = cmd.getSnapshotUuid();
String primaryStorageNameLabel = cmd.getPrimaryStoragePoolNameLabel();
String details = null;
boolean success = false;
SR primaryStorageSR = null;
try {
primaryStorageSR = getSRByNameLabelandHost(primaryStorageNameLabel);
if (primaryStorageSR == null) {
details = "Primary Storage SR could not be created from the name label: " + primaryStorageNameLabel;
}
} catch (XenAPIException e) {
details = "Couldn't determine primary SR type " + e.getMessage();
s_logger.error(details, e);
} catch (Exception e) {
details = "Couldn't determine primary SR type " + e.getMessage();
s_logger.error(details, e);
}
if (primaryStorageSR != null) {
if (snapshotUUID != null) {
VDI snapshotVDI = getVDIbyUuid(snapshotUUID);
if (snapshotVDI != null) {
destroyVDI(snapshotVDI);
}
}
}
URI uri = null;
try {
uri = new URI(secondaryStoragePoolURL);
} catch (URISyntaxException e) {
details = "Error finding the secondary storage URL" + e.getMessage();
s_logger.error(details, e);
}
if (uri != null) {
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
if (secondaryStorageMountPath == null) {
details = "Couldn't delete snapshotsDir because the URL passed: " + secondaryStoragePoolURL + " is invalid.";
} else {
details = deleteSnapshotsDir(dcId, accountId, volumeId, secondaryStorageMountPath);
success = (details != null && details.equals("1"));
if (success) {
s_logger.debug("Successfully deleted snapshotsDir for volume: " + volumeId);
}
}
}
return new Answer(cmd, success, details);
}
protected VM getVM(Connection conn, String vmName) {
// Look up VMs with the specified name
Set<VM> vms;
try {
vms = VM.getByNameLabel(conn, vmName);
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to get " + vmName + ": " + e.toString(), e);
} catch (Exception e) {
throw new CloudRuntimeException("Unable to get " + vmName + ": " + e.getMessage(), e);
}
// If there are no VMs, throw an exception
if (vms.size() == 0)
throw new CloudRuntimeException("VM with name: " + vmName + " does not exist.");
// If there is more than one VM, print a warning
if (vms.size() > 1)
s_logger.warn("Found " + vms.size() + " VMs with name: " + vmName);
// Return the first VM in the set
return vms.iterator().next();
}
protected VDI getIsoVDIByURL(Connection conn, String vmName, String isoURL) {
SR isoSR = null;
String mountpoint = null;
if (isoURL.startsWith("xs-tools")) {
try {
Set<VDI> vdis = VDI.getByNameLabel(conn, isoURL);
if (vdis.isEmpty()) {
throw new CloudRuntimeException("Could not find ISO with URL: " + isoURL);
}
return vdis.iterator().next();
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to get pv iso: " + isoURL + " due to " + e.toString());
} catch (Exception e) {
throw new CloudRuntimeException("Unable to get pv iso: " + isoURL + " due to " + e.toString());
}
}
int index = isoURL.lastIndexOf("/");
mountpoint = isoURL.substring(0, index);
URI uri;
try {
uri = new URI(mountpoint);
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
throw new CloudRuntimeException("isoURL is wrong: " + isoURL);
}
isoSR = getISOSRbyVmName(vmName);
if (isoSR == null) {
isoSR = createIsoSRbyURI(uri, vmName, false);
}
String isoName = isoURL.substring(index + 1);
VDI isoVDI = getVDIbyLocationandSR(isoName, isoSR);
if (isoVDI != null) {
return isoVDI;
} else {
throw new CloudRuntimeException("Could not find ISO with URL: " + isoURL);
}
}
protected SR getStorageRepository(Connection conn, StorageFilerTO pool) {
Set<SR> srs;
try {
srs = SR.getByNameLabel(conn, pool.getUuid());
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to get SR " + pool.getUuid() + " due to " + e.toString(), e);
} catch (Exception e) {
throw new CloudRuntimeException("Unable to get SR " + pool.getUuid() + " due to " + e.getMessage(), e);
}
if (srs.size() > 1) {
throw new CloudRuntimeException("More than one storage repository was found for pool with uuid: " + pool.getUuid());
} else if (srs.size() == 1) {
SR sr = srs.iterator().next();
if (s_logger.isDebugEnabled()) {
s_logger.debug("SR retrieved for " + pool.getId() + " is mapped to " + sr.toString());
}
if (checkSR(sr)) {
return sr;
}
throw new CloudRuntimeException("SR check failed for storage pool: " + pool.getUuid() + "on host:" + _host.uuid);
} else {
if (pool.getType() == StoragePoolType.NetworkFilesystem)
return getNfsSR(pool);
else if (pool.getType() == StoragePoolType.IscsiLUN)
return getIscsiSR(pool);
else
throw new CloudRuntimeException("The pool type: " + pool.getType().name() + " is not supported.");
}
}
protected Answer execute(final CheckConsoleProxyLoadCommand cmd) {
return executeProxyLoadScan(cmd, cmd.getProxyVmId(), cmd.getProxyVmName(), cmd.getProxyManagementIp(), cmd.getProxyCmdPort());
}
protected Answer execute(final WatchConsoleProxyLoadCommand cmd) {
return executeProxyLoadScan(cmd, cmd.getProxyVmId(), cmd.getProxyVmName(), cmd.getProxyManagementIp(), cmd.getProxyCmdPort());
}
protected Answer executeProxyLoadScan(final Command cmd, final long proxyVmId, final String proxyVmName, final String proxyManagementIp, final int cmdPort) {
String result = null;
final StringBuffer sb = new StringBuffer();
sb.append("http://").append(proxyManagementIp).append(":" + cmdPort).append("/cmd/getstatus");
boolean success = true;
try {
final URL url = new URL(sb.toString());
final URLConnection conn = url.openConnection();
// setting TIMEOUTs to avoid possible waiting until death situations
conn.setConnectTimeout(5000);
conn.setReadTimeout(5000);
final InputStream is = conn.getInputStream();
final BufferedReader reader = new BufferedReader(new InputStreamReader(is));
final StringBuilder sb2 = new StringBuilder();
String line = null;
try {
while ((line = reader.readLine()) != null)
sb2.append(line + "\n");
result = sb2.toString();
} catch (final IOException e) {
success = false;
} finally {
try {
is.close();
} catch (final IOException e) {
s_logger.warn("Exception when closing , console proxy address : " + proxyManagementIp);
success = false;
}
}
} catch (final IOException e) {
s_logger.warn("Unable to open console proxy command port url, console proxy address : " + proxyManagementIp);
success = false;
}
return new ConsoleProxyLoadAnswer(cmd, proxyVmId, proxyVmName, success, result);
}
protected boolean createSecondaryStorageFolder(String remoteMountPath, String newFolder) {
String result = callHostPlugin("vmopsSnapshot", "create_secondary_storage_folder", "remoteMountPath", remoteMountPath, "newFolder", newFolder);
return (result != null);
}
protected boolean deleteSecondaryStorageFolder(String remoteMountPath, String folder) {
String result = callHostPlugin("vmopsSnapshot", "delete_secondary_storage_folder", "remoteMountPath", remoteMountPath, "folder", folder);
return (result != null);
}
protected boolean postCreatePrivateTemplate(String tmpltSrUUID,String tmpltFilename, String templateName, String templateDescription, String checksum, long size, long virtualSize, long templateId) {
if (templateDescription == null) {
templateDescription = "";
}
if (checksum == null) {
checksum = "";
}
String result = callHostPluginWithTimeOut("vmopsSnapshot", "post_create_private_template", 110*60, "tmpltSrUUID", tmpltSrUUID, "templateFilename", tmpltFilename, "templateName", templateName, "templateDescription", templateDescription,
"checksum", checksum, "size", String.valueOf(size), "virtualSize", String.valueOf(virtualSize), "templateId", String.valueOf(templateId));
boolean success = false;
if (result != null && !result.isEmpty()) {
// Else, command threw an exception which has already been logged.
if (result.equalsIgnoreCase("1")) {
s_logger.debug("Successfully created template.properties file on secondary storage for " + tmpltFilename);
success = true;
} else {
s_logger.warn("Could not create template.properties file on secondary storage for " + tmpltFilename + " for templateId: " + templateId);
}
}
return success;
}
// Each argument is put in a separate line for readability.
// Using more lines does not harm the environment.
protected String backupSnapshot(String primaryStorageSRUuid, Long dcId, Long accountId, Long volumeId, String secondaryStorageMountPath,
String snapshotUuid, String prevSnapshotUuid, String prevBackupUuid, Boolean isISCSI) {
String backupSnapshotUuid = null;
if (prevSnapshotUuid == null) {
prevSnapshotUuid = "";
}
if (prevBackupUuid == null) {
prevBackupUuid = "";
}
// Each argument is put in a separate line for readability.
// Using more lines does not harm the environment.
String results = callHostPluginWithTimeOut("vmopsSnapshot", "backupSnapshot", 110*60, "primaryStorageSRUuid", primaryStorageSRUuid, "dcId", dcId.toString(), "accountId", accountId.toString(), "volumeId",
volumeId.toString(), "secondaryStorageMountPath", secondaryStorageMountPath, "snapshotUuid", snapshotUuid, "prevSnapshotUuid", prevSnapshotUuid, "prevBackupUuid",
prevBackupUuid, "isISCSI", isISCSI.toString());
if (results == null || results.isEmpty()) {
// errString is already logged.
return null;
}
String[] tmp = results.split("#");
String status = tmp[0];
backupSnapshotUuid = tmp[1];
// status == "1" if and only if backupSnapshotUuid != null
// So we don't rely on status value but return backupSnapshotUuid as an
// indicator of success.
String failureString = "Could not copy backupUuid: " + backupSnapshotUuid + " of volumeId: " + volumeId + " from primary storage " + primaryStorageSRUuid
+ " to secondary storage " + secondaryStorageMountPath;
if (status != null && status.equalsIgnoreCase("1") && backupSnapshotUuid != null) {
s_logger.debug("Successfully copied backupUuid: " + backupSnapshotUuid + " of volumeId: " + volumeId + " to secondary storage");
} else {
s_logger.debug(failureString + ". Failed with status: " + status);
return null;
}
return backupSnapshotUuid;
}
protected String getVhdParent(String primaryStorageSRUuid, String snapshotUuid, Boolean isISCSI) {
String parentUuid = callHostPlugin("vmopsSnapshot", "getVhdParent", "primaryStorageSRUuid", primaryStorageSRUuid,
"snapshotUuid", snapshotUuid, "isISCSI", isISCSI.toString());
if (parentUuid == null || parentUuid.isEmpty()) {
s_logger.debug("Unable to get parent of VHD " + snapshotUuid + " in SR " + primaryStorageSRUuid);
// errString is already logged.
return null;
}
return parentUuid;
}
protected boolean destroySnapshotOnPrimaryStorage(String snapshotUuid) {
// Precondition snapshotUuid != null
try {
Connection conn = getConnection();
VDI snapshot = getVDIbyUuid(snapshotUuid);
if (snapshot == null) {
throw new InternalErrorException("Could not destroy snapshot " + snapshotUuid + " because the snapshot VDI was null");
}
snapshot.destroy(conn);
s_logger.debug("Successfully destroyed snapshotUuid: " + snapshotUuid + " on primary storage");
return true;
} catch (XenAPIException e) {
String msg = "Destroy snapshotUuid: " + snapshotUuid + " on primary storage failed due to " + e.toString();
s_logger.error(msg, e);
} catch (Exception e) {
String msg = "Destroy snapshotUuid: " + snapshotUuid + " on primary storage failed due to " + e.getMessage();
s_logger.warn(msg, e);
}
return false;
}
protected String deleteSnapshotBackup(Long dcId, Long accountId, Long volumeId, String secondaryStorageMountPath, String backupUUID) {
// If anybody modifies the formatting below again, I'll skin them
String result = callHostPlugin("vmopsSnapshot", "deleteSnapshotBackup", "backupUUID", backupUUID, "dcId", dcId.toString(), "accountId", accountId.toString(),
"volumeId", volumeId.toString(), "secondaryStorageMountPath", secondaryStorageMountPath);
return result;
}
protected String deleteSnapshotsDir(Long dcId, Long accountId, Long volumeId, String secondaryStorageMountPath) {
// If anybody modifies the formatting below again, I'll skin them
String result = callHostPlugin("vmopsSnapshot", "deleteSnapshotsDir", "dcId", dcId.toString(), "accountId", accountId.toString(), "volumeId", volumeId.toString(),
"secondaryStorageMountPath", secondaryStorageMountPath);
return result;
}
@Override
public boolean start() {
return true;
}
@Override
public boolean stop() {
disconnected();
return true;
}
@Override
public String getName() {
return _name;
}
@Override
public IAgentControl getAgentControl() {
return _agentControl;
}
@Override
public void setAgentControl(IAgentControl agentControl) {
_agentControl = agentControl;
}
protected Answer execute(PoolEjectCommand cmd) {
Connection conn = getConnection();
String hostuuid = cmd.getHostuuid();
try {
Map<Host, Host.Record> hostrs = Host.getAllRecords(conn);
boolean found = false;
for( Host.Record hr : hostrs.values() ) {
if( hr.uuid.equals(hostuuid)) {
found = true;
}
}
if( ! found) {
s_logger.debug("host " + hostuuid + " has already been ejected from pool " + _host.pool);
return new Answer(cmd);
}
Host host = Host.getByUuid(conn, hostuuid);
// remove all tags cloud stack add before eject
Host.Record hr = host.getRecord(conn);
Iterator<String> it = hr.tags.iterator();
while (it.hasNext()) {
String tag = it.next();
if (tag.startsWith("vmops-version-")) {
it.remove();
}
}
// eject from pool
try {
Pool.eject(conn, host);
} catch (XenAPIException e) {
String msg = "Unable to eject host " + _host.uuid + " due to " + e.toString();
s_logger.warn(msg);
host.destroy(conn);
}
return new Answer(cmd);
} catch (XenAPIException e) {
String msg = "XenAPIException Unable to destroy host " + _host.uuid + " in xenserver database due to " + e.toString();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
} catch (Exception e) {
String msg = "Exception Unable to destroy host " + _host.uuid + " in xenserver database due to " + e.getMessage();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
}
protected class Nic {
public Network n;
public Network.Record nr;
public PIF p;
public PIF.Record pr;
public Nic(Network n, Network.Record nr, PIF p, PIF.Record pr) {
this.n = n;
this.nr = nr;
this.p = p;
this.pr = pr;
}
}
// A list of UUIDs that are gathered from the XenServer when
// the resource first connects to XenServer. These UUIDs do
// not change over time.
protected class XenServerHost {
public String systemvmisouuid;
public String uuid;
public String ip;
public String publicNetwork;
public String privateNetwork;
public String linkLocalNetwork;
public String storageNetwork1;
public String storageNetwork2;
public String guestNetwork;
public String guestPif;
public String publicPif;
public String privatePif;
public String storagePif1;
public String storagePif2;
public String pool;
public int speed;
public int cpus;
}
/*Override by subclass*/
protected String getGuestOsType(String stdType) {
return stdType;
}
/*
protected boolean patchSystemVm(VDI vdi, String vmName, VirtualMachine.Type type) {
if (type == VirtualMachine.Type.DomainRouter) {
return patchSpecialVM(vdi, vmName, "router");
} else if (type == VirtualMachine.Type.ConsoleProxy) {
return patchSpecialVM(vdi, vmName, "consoleproxy");
} else if (type == VirtualMachine.Type.SecondaryStorageVm) {
return patchSpecialVM(vdi, vmName, "secstorage");
} else {
throw new CloudRuntimeException("Tried to patch unknown type of system vm");
}
}
protected boolean patchSystemVm(VDI vdi, String vmName) {
if (vmName.startsWith("r-")) {
return patchSpecialVM(vdi, vmName, "router");
} else if (vmName.startsWith("v-")) {
return patchSpecialVM(vdi, vmName, "consoleproxy");
} else if (vmName.startsWith("s-")) {
return patchSpecialVM(vdi, vmName, "secstorage");
} else {
throw new CloudRuntimeException("Tried to patch unknown type of system vm");
}
}
protected boolean patchSpecialVM(VDI vdi, String vmname, String vmtype) {
// patch special vm here, domr, domp
VBD vbd = null;
Connection conn = getConnection();
try {
Host host = Host.getByUuid(conn, _host.uuid);
Set<VM> vms = host.getResidentVMs(conn);
for (VM vm : vms) {
VM.Record vmrec = null;
try {
vmrec = vm.getRecord(conn);
} catch (Exception e) {
String msg = "VM.getRecord failed due to " + e.toString() + " " + e.getMessage();
s_logger.warn(msg);
continue;
}
if (vmrec.isControlDomain) {
VBD.Record vbdr = new VBD.Record();
vbdr.VM = vm;
vbdr.VDI = vdi;
vbdr.bootable = false;
vbdr.userdevice = getUnusedDeviceNum(vm);
vbdr.unpluggable = true;
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
vbd = VBD.create(conn, vbdr);
vbd.plug(conn);
String device = vbd.getDevice(conn);
return patchspecialvm(vmname, device, vmtype);
}
}
} catch (XenAPIException e) {
String msg = "patchSpecialVM faile on " + _host.uuid + " due to " + e.toString();
s_logger.warn(msg, e);
} catch (Exception e) {
String msg = "patchSpecialVM faile on " + _host.uuid + " due to " + e.getMessage();
s_logger.warn(msg, e);
} finally {
if (vbd != null) {
try {
if (vbd.getCurrentlyAttached(conn)) {
vbd.unplug(conn);
}
vbd.destroy(conn);
} catch (XmlRpcException e) {
String msg = "Catch XmlRpcException due to " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
String msg = "Catch XenAPIException due to " + e.toString();
s_logger.warn(msg, e);
}
}
}
return false;
}
protected boolean patchspecialvm(String vmname, String device, String vmtype) {
String result = callHostPlugin("vmops", "patchdomr", "vmname", vmname, "vmtype", vmtype, "device", "/dev/" + device);
if (result == null || result.isEmpty())
return false;
return true;
}
*/
}
| core/src/com/cloud/hypervisor/xen/resource/CitrixResourceBase.java | /**
* Copyright (C) 2010 Cloud.com, Inc. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.hypervisor.xen.resource;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import javax.ejb.Local;
import javax.naming.ConfigurationException;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.log4j.Logger;
import org.apache.xmlrpc.XmlRpcException;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import com.cloud.agent.IAgentControl;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.AttachIsoCommand;
import com.cloud.agent.api.AttachVolumeAnswer;
import com.cloud.agent.api.AttachVolumeCommand;
import com.cloud.agent.api.BackupSnapshotAnswer;
import com.cloud.agent.api.BackupSnapshotCommand;
import com.cloud.agent.api.CheckHealthAnswer;
import com.cloud.agent.api.CheckHealthCommand;
import com.cloud.agent.api.CheckOnHostAnswer;
import com.cloud.agent.api.CheckOnHostCommand;
import com.cloud.agent.api.CheckVirtualMachineAnswer;
import com.cloud.agent.api.CheckVirtualMachineCommand;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.CreatePrivateTemplateFromSnapshotCommand;
import com.cloud.agent.api.CreatePrivateTemplateFromVolumeCommand;
import com.cloud.agent.api.CreateVolumeFromSnapshotAnswer;
import com.cloud.agent.api.CreateVolumeFromSnapshotCommand;
import com.cloud.agent.api.DeleteSnapshotBackupAnswer;
import com.cloud.agent.api.DeleteSnapshotBackupCommand;
import com.cloud.agent.api.DeleteSnapshotsDirCommand;
import com.cloud.agent.api.DeleteStoragePoolCommand;
import com.cloud.agent.api.GetHostStatsAnswer;
import com.cloud.agent.api.GetHostStatsCommand;
import com.cloud.agent.api.GetStorageStatsAnswer;
import com.cloud.agent.api.GetStorageStatsCommand;
import com.cloud.agent.api.GetVmStatsAnswer;
import com.cloud.agent.api.GetVmStatsCommand;
import com.cloud.agent.api.GetVncPortAnswer;
import com.cloud.agent.api.GetVncPortCommand;
import com.cloud.agent.api.HostStatsEntry;
import com.cloud.agent.api.MaintainAnswer;
import com.cloud.agent.api.MaintainCommand;
import com.cloud.agent.api.ManageSnapshotAnswer;
import com.cloud.agent.api.ManageSnapshotCommand;
import com.cloud.agent.api.MigrateAnswer;
import com.cloud.agent.api.MigrateCommand;
import com.cloud.agent.api.ModifySshKeysCommand;
import com.cloud.agent.api.ModifyStoragePoolAnswer;
import com.cloud.agent.api.ModifyStoragePoolCommand;
import com.cloud.agent.api.PingCommand;
import com.cloud.agent.api.PingRoutingCommand;
import com.cloud.agent.api.PingRoutingWithNwGroupsCommand;
import com.cloud.agent.api.PingTestCommand;
import com.cloud.agent.api.PoolEjectCommand;
import com.cloud.agent.api.PrepareForMigrationAnswer;
import com.cloud.agent.api.PrepareForMigrationCommand;
import com.cloud.agent.api.ReadyAnswer;
import com.cloud.agent.api.ReadyCommand;
import com.cloud.agent.api.RebootAnswer;
import com.cloud.agent.api.RebootCommand;
import com.cloud.agent.api.RebootRouterCommand;
import com.cloud.agent.api.SetupAnswer;
import com.cloud.agent.api.SetupCommand;
import com.cloud.agent.api.Start2Answer;
import com.cloud.agent.api.Start2Command;
import com.cloud.agent.api.StartAnswer;
import com.cloud.agent.api.StartCommand;
import com.cloud.agent.api.StartConsoleProxyAnswer;
import com.cloud.agent.api.StartConsoleProxyCommand;
import com.cloud.agent.api.StartRouterAnswer;
import com.cloud.agent.api.StartRouterCommand;
import com.cloud.agent.api.StartSecStorageVmAnswer;
import com.cloud.agent.api.StartSecStorageVmCommand;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupRoutingCommand;
import com.cloud.agent.api.StartupStorageCommand;
import com.cloud.agent.api.StopAnswer;
import com.cloud.agent.api.StopCommand;
import com.cloud.agent.api.StoragePoolInfo;
import com.cloud.agent.api.VmStatsEntry;
import com.cloud.agent.api.proxy.CheckConsoleProxyLoadCommand;
import com.cloud.agent.api.proxy.ConsoleProxyLoadAnswer;
import com.cloud.agent.api.proxy.WatchConsoleProxyLoadCommand;
import com.cloud.agent.api.routing.DhcpEntryCommand;
import com.cloud.agent.api.routing.IPAssocCommand;
import com.cloud.agent.api.routing.LoadBalancerCfgCommand;
import com.cloud.agent.api.routing.SavePasswordCommand;
import com.cloud.agent.api.routing.SetFirewallRuleCommand;
import com.cloud.agent.api.routing.RemoteAccessVpnCfgCommand;
import com.cloud.agent.api.routing.VmDataCommand;
import com.cloud.agent.api.storage.CopyVolumeAnswer;
import com.cloud.agent.api.storage.CopyVolumeCommand;
import com.cloud.agent.api.storage.CreateAnswer;
import com.cloud.agent.api.storage.CreateCommand;
import com.cloud.agent.api.storage.CreatePrivateTemplateAnswer;
import com.cloud.agent.api.storage.DestroyCommand;
import com.cloud.agent.api.storage.DownloadAnswer;
import com.cloud.agent.api.storage.PrimaryStorageDownloadCommand;
import com.cloud.agent.api.storage.ShareAnswer;
import com.cloud.agent.api.storage.ShareCommand;
import com.cloud.agent.api.to.NicTO;
import com.cloud.agent.api.to.StorageFilerTO;
import com.cloud.agent.api.to.VirtualMachineTO;
import com.cloud.agent.api.to.VirtualMachineTO.Monitor;
import com.cloud.agent.api.to.VirtualMachineTO.SshMonitor;
import com.cloud.agent.api.to.VolumeTO;
import com.cloud.exception.InternalErrorException;
import com.cloud.host.Host.Type;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.network.Network.BroadcastDomainType;
import com.cloud.network.Network.TrafficType;
import com.cloud.resource.ServerResource;
import com.cloud.storage.Storage;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.Storage.StoragePoolType;
import com.cloud.storage.StorageLayer;
import com.cloud.storage.StoragePoolVO;
import com.cloud.storage.Volume.VolumeType;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.resource.StoragePoolResource;
import com.cloud.storage.template.TemplateInfo;
import com.cloud.template.VirtualMachineTemplate.BootloaderType;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.Ternary;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.NetUtils;
import com.cloud.utils.script.Script;
import com.cloud.vm.ConsoleProxyVO;
import com.cloud.vm.DiskProfile;
import com.cloud.vm.DomainRouter;
import com.cloud.vm.SecondaryStorageVmVO;
import com.cloud.vm.State;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachineName;
import com.trilead.ssh2.SCPClient;
import com.xensource.xenapi.APIVersion;
import com.xensource.xenapi.Bond;
import com.xensource.xenapi.Connection;
import com.xensource.xenapi.Console;
import com.xensource.xenapi.Host;
import com.xensource.xenapi.HostCpu;
import com.xensource.xenapi.HostMetrics;
import com.xensource.xenapi.Network;
import com.xensource.xenapi.PBD;
import com.xensource.xenapi.PIF;
import com.xensource.xenapi.Pool;
import com.xensource.xenapi.SR;
import com.xensource.xenapi.Session;
import com.xensource.xenapi.Types;
import com.xensource.xenapi.Types.BadServerResponse;
import com.xensource.xenapi.Types.IpConfigurationMode;
import com.xensource.xenapi.Types.VmPowerState;
import com.xensource.xenapi.Types.XenAPIException;
import com.xensource.xenapi.VBD;
import com.xensource.xenapi.VDI;
import com.xensource.xenapi.VIF;
import com.xensource.xenapi.VLAN;
import com.xensource.xenapi.VM;
import com.xensource.xenapi.VMGuestMetrics;
import com.xensource.xenapi.XenAPIObject;
/**
* Encapsulates the interface to the XenServer API.
*
*/
@Local(value = ServerResource.class)
public abstract class CitrixResourceBase implements StoragePoolResource, ServerResource {
private static final Logger s_logger = Logger.getLogger(CitrixResourceBase.class);
protected static final XenServerConnectionPool _connPool = XenServerConnectionPool.getInstance();
protected static final int MB = 1024 * 1024;
protected String _name;
protected String _username;
protected String _password;
protected final int _retry = 24;
protected final int _sleep = 10000;
protected long _dcId;
protected String _pod;
protected String _cluster;
protected HashMap<String, State> _vms = new HashMap<String, State>(71);
protected String _patchPath;
protected String _privateNetworkName;
protected String _linkLocalPrivateNetworkName;
protected String _publicNetworkName;
protected String _storageNetworkName1;
protected String _storageNetworkName2;
protected String _guestNetworkName;
protected int _wait;
protected IAgentControl _agentControl;
int _userVMCap = 0;
final int _maxWeight = 256;
protected final XenServerHost _host = new XenServerHost();
// Guest and Host Performance Statistics
protected boolean _collectHostStats = false;
protected String _consolidationFunction = "AVERAGE";
protected int _pollingIntervalInSeconds = 60;
protected StorageLayer _storage;
protected boolean _canBridgeFirewall = false;
protected HashMap<StoragePoolType, StoragePoolResource> _pools = new HashMap<StoragePoolType, StoragePoolResource>(5);
public enum SRType {
NFS, LVM, ISCSI, ISO, LVMOISCSI;
@Override
public String toString() {
return super.toString().toLowerCase();
}
public boolean equals(String type) {
return super.toString().equalsIgnoreCase(type);
}
}
protected static HashMap<Types.VmPowerState, State> s_statesTable;
protected String _localGateway;
static {
s_statesTable = new HashMap<Types.VmPowerState, State>();
s_statesTable.put(Types.VmPowerState.HALTED, State.Stopped);
s_statesTable.put(Types.VmPowerState.PAUSED, State.Running);
s_statesTable.put(Types.VmPowerState.RUNNING, State.Running);
s_statesTable.put(Types.VmPowerState.SUSPENDED, State.Running);
s_statesTable.put(Types.VmPowerState.UNKNOWN, State.Unknown);
s_statesTable.put(Types.VmPowerState.UNRECOGNIZED, State.Unknown);
}
protected boolean isRefNull(XenAPIObject object) {
return (object == null || object.toWireString().equals("OpaqueRef:NULL"));
}
@Override
public void disconnected() {
s_logger.debug("Logging out of " + _host.uuid);
if (_host.pool != null) {
_connPool.disconnect(_host.uuid, _host.pool);
}
}
protected VDI cloudVDIcopy(VDI vdi, SR sr) throws BadServerResponse, XenAPIException, XmlRpcException{
Connection conn = getConnection();
return vdi.copy(conn, sr);
}
protected void destroyStoppedVm() {
Map<VM, VM.Record> vmentries = null;
Connection conn = getConnection();
for (int i = 0; i < 2; i++) {
try {
vmentries = VM.getAllRecords(conn);
break;
} catch (final Throwable e) {
s_logger.warn("Unable to get vms", e);
}
try {
Thread.sleep(1000);
} catch (final InterruptedException ex) {
}
}
if (vmentries == null) {
return;
}
for (Map.Entry<VM, VM.Record> vmentry : vmentries.entrySet()) {
VM.Record record = vmentry.getValue();
if (record.isControlDomain || record.isASnapshot || record.isATemplate) {
continue; // Skip DOM0
}
if (record.powerState != Types.VmPowerState.HALTED) {
continue;
}
try {
if (isRefNull(record.affinity) || !record.affinity.getUuid(conn).equals(_host.uuid)) {
continue;
}
vmentry.getKey().destroy(conn);
} catch (Exception e) {
String msg = "VM destroy failed for " + record.nameLabel + " due to " + e.getMessage();
s_logger.warn(msg, e);
}
}
}
protected void cleanupDiskMounts() {
Connection conn = getConnection();
Map<SR, SR.Record> srs;
try {
srs = SR.getAllRecords(conn);
} catch (XenAPIException e) {
s_logger.warn("Unable to get the SRs " + e.toString(), e);
throw new CloudRuntimeException("Unable to get SRs " + e.toString(), e);
} catch (XmlRpcException e) {
throw new CloudRuntimeException("Unable to get SRs " + e.getMessage());
}
for (Map.Entry<SR, SR.Record> sr : srs.entrySet()) {
SR.Record rec = sr.getValue();
if (SRType.NFS.equals(rec.type) || (SRType.ISO.equals(rec.type) && rec.nameLabel.endsWith("iso"))) {
if (rec.PBDs == null || rec.PBDs.size() == 0) {
cleanSR(sr.getKey(), rec);
continue;
}
for (PBD pbd : rec.PBDs) {
if (isRefNull(pbd)) {
continue;
}
PBD.Record pbdr = null;
try {
pbdr = pbd.getRecord(conn);
} catch (XenAPIException e) {
s_logger.warn("Unable to get pbd record " + e.toString());
} catch (XmlRpcException e) {
s_logger.warn("Unable to get pbd record " + e.getMessage());
}
if (pbdr == null) {
continue;
}
try {
if (pbdr.host.getUuid(conn).equals(_host.uuid)) {
if (!pbdr.currentlyAttached) {
pbdPlug(conn, pbd);
}
}
} catch (XenAPIException e) {
s_logger.warn("Catch XenAPIException due to" + e.toString(), e);
} catch (XmlRpcException e) {
s_logger.warn("Catch XmlRpcException due to" + e.getMessage(), e);
}
}
}
}
}
protected Pair<VM, VM.Record> getVmByNameLabel(Connection conn, Host host, String nameLabel, boolean getRecord) throws XmlRpcException, XenAPIException {
Set<VM> vms = host.getResidentVMs(conn);
for (VM vm : vms) {
VM.Record rec = null;
String name = null;
if (getRecord) {
rec = vm.getRecord(conn);
name = rec.nameLabel;
} else {
name = vm.getNameLabel(conn);
}
if (name.equals(nameLabel)) {
return new Pair<VM, VM.Record>(vm, rec);
}
}
return null;
}
protected boolean currentlyAttached(SR sr, SR.Record rec, PBD pbd, PBD.Record pbdr) {
String status = null;
if (SRType.NFS.equals(rec.type)) {
status = callHostPlugin("vmops", "checkMount", "mount", rec.uuid);
} else if (SRType.LVMOISCSI.equals(rec.type) ) {
String scsiid = pbdr.deviceConfig.get("SCSIid");
if (scsiid.isEmpty()) {
return false;
}
status = callHostPlugin("vmops", "checkIscsi", "scsiid", scsiid);
} else {
return true;
}
if (status != null && status.equalsIgnoreCase("1")) {
s_logger.debug("currently attached " + pbdr.uuid);
return true;
} else {
s_logger.debug("currently not attached " + pbdr.uuid);
return false;
}
}
protected boolean pingdomr(String host, String port) {
String status;
status = callHostPlugin("vmops", "pingdomr", "host", host, "port", port);
if (status == null || status.isEmpty()) {
return false;
}
return true;
}
protected boolean pingxenserver() {
Session slaveSession = null;
Connection slaveConn = null;
try {
URL slaveUrl = null;
slaveUrl = new URL("http://" + _host.ip);
slaveConn = new Connection(slaveUrl, 100);
slaveSession = Session.slaveLocalLoginWithPassword(slaveConn, _username, _password);
return true;
} catch (Exception e) {
return false;
} finally {
if( slaveSession != null ){
try{
Session.localLogout(slaveConn);
} catch (Exception e) {
}
slaveConn.dispose();
}
}
}
protected String logX(XenAPIObject obj, String msg) {
return new StringBuilder("Host ").append(_host.ip).append(" ").append(obj.toWireString()).append(": ").append(msg).toString();
}
protected void cleanSR(SR sr, SR.Record rec) {
Connection conn = getConnection();
if (rec.VDIs != null) {
for (VDI vdi : rec.VDIs) {
VDI.Record vdir;
try {
vdir = vdi.getRecord(conn);
} catch (XenAPIException e) {
s_logger.debug("Unable to get VDI: " + e.toString());
continue;
} catch (XmlRpcException e) {
s_logger.debug("Unable to get VDI: " + e.getMessage());
continue;
}
if (vdir.VBDs == null)
continue;
for (VBD vbd : vdir.VBDs) {
try {
VBD.Record vbdr = vbd.getRecord(conn);
VM.Record vmr = vbdr.VM.getRecord(conn);
if ((!isRefNull(vmr.residentOn) && vmr.residentOn.getUuid(conn).equals(_host.uuid))
|| (isRefNull(vmr.residentOn) && !isRefNull(vmr.affinity) && vmr.affinity.getUuid(conn).equals(_host.uuid))) {
if (vmr.powerState != VmPowerState.HALTED && vmr.powerState != VmPowerState.UNKNOWN && vmr.powerState != VmPowerState.UNRECOGNIZED) {
try {
vbdr.VM.hardShutdown(conn);
} catch (XenAPIException e) {
s_logger.debug("Shutdown hit error " + vmr.nameLabel + ": " + e.toString());
}
}
try {
vbdr.VM.destroy(conn);
} catch (XenAPIException e) {
s_logger.debug("Destroy hit error " + vmr.nameLabel + ": " + e.toString());
} catch (XmlRpcException e) {
s_logger.debug("Destroy hit error " + vmr.nameLabel + ": " + e.getMessage());
}
vbd.destroy(conn);
break;
}
} catch (XenAPIException e) {
s_logger.debug("Unable to get VBD: " + e.toString());
continue;
} catch (XmlRpcException e) {
s_logger.debug("Uanbel to get VBD: " + e.getMessage());
continue;
}
}
}
}
for (PBD pbd : rec.PBDs) {
PBD.Record pbdr = null;
try {
pbdr = pbd.getRecord(conn);
pbd.unplug(conn);
pbd.destroy(conn);
} catch (XenAPIException e) {
s_logger.warn("PBD " + ((pbdr != null) ? "(uuid:" + pbdr.uuid + ")" : "") + "destroy failed due to " + e.toString());
} catch (XmlRpcException e) {
s_logger.warn("PBD " + ((pbdr != null) ? "(uuid:" + pbdr.uuid + ")" : "") + "destroy failed due to " + e.getMessage());
}
}
try {
rec = sr.getRecord(conn);
if (rec.PBDs == null || rec.PBDs.size() == 0) {
sr.forget(conn);
return;
}
} catch (XenAPIException e) {
s_logger.warn("Unable to retrieve sr again: " + e.toString(), e);
} catch (XmlRpcException e) {
s_logger.warn("Unable to retrieve sr again: " + e.getMessage(), e);
}
}
@Override
public Answer executeRequest(Command cmd) {
if (cmd instanceof CreateCommand) {
return execute((CreateCommand) cmd);
} else if (cmd instanceof SetFirewallRuleCommand) {
return execute((SetFirewallRuleCommand) cmd);
} else if (cmd instanceof LoadBalancerCfgCommand) {
return execute((LoadBalancerCfgCommand) cmd);
} else if (cmd instanceof IPAssocCommand) {
return execute((IPAssocCommand) cmd);
} else if (cmd instanceof CheckConsoleProxyLoadCommand) {
return execute((CheckConsoleProxyLoadCommand) cmd);
} else if (cmd instanceof WatchConsoleProxyLoadCommand) {
return execute((WatchConsoleProxyLoadCommand) cmd);
} else if (cmd instanceof SavePasswordCommand) {
return execute((SavePasswordCommand) cmd);
} else if (cmd instanceof DhcpEntryCommand) {
return execute((DhcpEntryCommand) cmd);
} else if (cmd instanceof VmDataCommand) {
return execute((VmDataCommand) cmd);
} else if (cmd instanceof StartCommand) {
return execute((StartCommand) cmd);
} else if (cmd instanceof StartRouterCommand) {
return execute((StartRouterCommand) cmd);
} else if (cmd instanceof ReadyCommand) {
return execute((ReadyCommand) cmd);
} else if (cmd instanceof GetHostStatsCommand) {
return execute((GetHostStatsCommand) cmd);
} else if (cmd instanceof GetVmStatsCommand) {
return execute((GetVmStatsCommand) cmd);
} else if (cmd instanceof CheckHealthCommand) {
return execute((CheckHealthCommand) cmd);
} else if (cmd instanceof StopCommand) {
return execute((StopCommand) cmd);
} else if (cmd instanceof RebootRouterCommand) {
return execute((RebootRouterCommand) cmd);
} else if (cmd instanceof RebootCommand) {
return execute((RebootCommand) cmd);
} else if (cmd instanceof CheckVirtualMachineCommand) {
return execute((CheckVirtualMachineCommand) cmd);
} else if (cmd instanceof PrepareForMigrationCommand) {
return execute((PrepareForMigrationCommand) cmd);
} else if (cmd instanceof MigrateCommand) {
return execute((MigrateCommand) cmd);
} else if (cmd instanceof DestroyCommand) {
return execute((DestroyCommand) cmd);
} else if (cmd instanceof ShareCommand) {
return execute((ShareCommand) cmd);
} else if (cmd instanceof ModifyStoragePoolCommand) {
return execute((ModifyStoragePoolCommand) cmd);
} else if (cmd instanceof DeleteStoragePoolCommand) {
return execute((DeleteStoragePoolCommand) cmd);
} else if (cmd instanceof CopyVolumeCommand) {
return execute((CopyVolumeCommand) cmd);
} else if (cmd instanceof AttachVolumeCommand) {
return execute((AttachVolumeCommand) cmd);
} else if (cmd instanceof AttachIsoCommand) {
return execute((AttachIsoCommand) cmd);
} else if (cmd instanceof ManageSnapshotCommand) {
return execute((ManageSnapshotCommand) cmd);
} else if (cmd instanceof BackupSnapshotCommand) {
return execute((BackupSnapshotCommand) cmd);
} else if (cmd instanceof DeleteSnapshotBackupCommand) {
return execute((DeleteSnapshotBackupCommand) cmd);
} else if (cmd instanceof CreateVolumeFromSnapshotCommand) {
return execute((CreateVolumeFromSnapshotCommand) cmd);
} else if (cmd instanceof DeleteSnapshotsDirCommand) {
return execute((DeleteSnapshotsDirCommand) cmd);
} else if (cmd instanceof CreatePrivateTemplateFromVolumeCommand) {
return execute((CreatePrivateTemplateFromVolumeCommand) cmd);
} else if (cmd instanceof CreatePrivateTemplateFromSnapshotCommand) {
return execute((CreatePrivateTemplateFromSnapshotCommand) cmd);
} else if (cmd instanceof GetStorageStatsCommand) {
return execute((GetStorageStatsCommand) cmd);
} else if (cmd instanceof PrimaryStorageDownloadCommand) {
return execute((PrimaryStorageDownloadCommand) cmd);
} else if (cmd instanceof StartConsoleProxyCommand) {
return execute((StartConsoleProxyCommand) cmd);
} else if (cmd instanceof StartSecStorageVmCommand) {
return execute((StartSecStorageVmCommand) cmd);
} else if (cmd instanceof GetVncPortCommand) {
return execute((GetVncPortCommand) cmd);
} else if (cmd instanceof SetupCommand) {
return execute((SetupCommand) cmd);
} else if (cmd instanceof MaintainCommand) {
return execute((MaintainCommand) cmd);
} else if (cmd instanceof PingTestCommand) {
return execute((PingTestCommand) cmd);
} else if (cmd instanceof CheckOnHostCommand) {
return execute((CheckOnHostCommand) cmd);
} else if (cmd instanceof ModifySshKeysCommand) {
return execute((ModifySshKeysCommand) cmd);
} else if (cmd instanceof PoolEjectCommand) {
return execute((PoolEjectCommand) cmd);
} else if (cmd instanceof Start2Command) {
return execute((Start2Command)cmd);
} else if (cmd instanceof RemoteAccessVpnCfgCommand) {
return execute((RemoteAccessVpnCfgCommand)cmd);
} else {
return Answer.createUnsupportedCommandAnswer(cmd);
}
}
Pair<Network, String> getNetworkForTraffic(Connection conn, TrafficType type) throws XenAPIException, XmlRpcException {
if (type == TrafficType.Guest) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.guestNetwork), _host.guestPif);
} else if (type == TrafficType.Control) {
setupLinkLocalNetwork();
return new Pair<Network, String>(Network.getByUuid(conn, _host.linkLocalNetwork), null);
} else if (type == TrafficType.Management) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.privateNetwork), _host.privatePif);
} else if (type == TrafficType.Public) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.publicNetwork), _host.publicPif);
} else if (type == TrafficType.Storage) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.storageNetwork1), _host.storagePif1);
} else if (type == TrafficType.Vpn) {
return new Pair<Network, String>(Network.getByUuid(conn, _host.publicNetwork), _host.publicPif);
}
throw new CloudRuntimeException("Unsupported network type: " + type);
}
protected VIF createVif(Connection conn, String vmName, VM vm, NicTO nic) throws XmlRpcException, XenAPIException {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VIF for " + vmName + " on nic " + nic);
}
VIF.Record vifr = new VIF.Record();
vifr.VM = vm;
vifr.device = Integer.toString(nic.getDeviceId());
vifr.MAC = nic.getMac();
Pair<Network, String> network = getNetworkForTraffic(conn, nic.getType());
if (nic.getBroadcastType() == BroadcastDomainType.Vlan) {
URI broadcastUri = nic.getBroadcastUri();
assert broadcastUri.getScheme().equals(BroadcastDomainType.Vlan.scheme());
long vlan = Long.parseLong(broadcastUri.getHost());
vifr.network = enableVlanNetwork(conn, vlan, network.first(), network.second());
} else if (nic.getBroadcastType() == BroadcastDomainType.Native || nic.getBroadcastType() == BroadcastDomainType.LinkLocal) {
vifr.network = network.first();
}
if (nic.getNetworkRateMbps() != null) {
vifr.qosAlgorithmType = "ratelimit";
vifr.qosAlgorithmParams = new HashMap<String, String>();
// convert mbs to kilobyte per second
vifr.qosAlgorithmParams.put("kbps", Integer.toString(nic.getNetworkRateMbps() * 128));
}
VIF vif = VIF.create(conn, vifr);
if (s_logger.isDebugEnabled()) {
vifr = vif.getRecord(conn);
s_logger.debug("Created a vif " + vifr.uuid + " on " + nic.getDeviceId());
}
return vif;
}
protected VDI mount(Connection conn, String vmName, VolumeTO volume) throws XmlRpcException, XenAPIException {
if (volume.getType() == VolumeType.ISO) {
String isopath = volume.getPath();
int index = isopath.lastIndexOf("/");
String mountpoint = isopath.substring(0, index);
URI uri;
try {
uri = new URI(mountpoint);
} catch (URISyntaxException e) {
throw new CloudRuntimeException("Incorrect uri " + mountpoint, e);
}
SR isoSr = createIsoSRbyURI(uri, vmName, false);
String isoname = isopath.substring(index + 1);
VDI isoVdi = getVDIbyLocationandSR(isoname, isoSr);
if (isoVdi == null) {
throw new CloudRuntimeException("Unable to find ISO " + volume.getPath());
}
return isoVdi;
} else {
return VDI.getByUuid(conn, volume.getPath());
}
}
protected VBD createVbd(Connection conn, VolumeTO volume, String vmName, VM vm) throws XmlRpcException, XenAPIException {
VolumeType type = volume.getType();
VDI vdi = mount(conn, vmName, volume);
VBD.Record vbdr = new VBD.Record();
vbdr.VM = vm;
vbdr.VDI = vdi;
if (type == VolumeType.ROOT) {
vbdr.bootable = true;
}
vbdr.userdevice = Long.toString(volume.getDeviceId());
if (volume.getType() == VolumeType.ISO) {
vbdr.mode = Types.VbdMode.RO;
vbdr.type = Types.VbdType.CD;
} else {
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
}
VBD vbd = VBD.create(conn, vbdr);
if (s_logger.isDebugEnabled()) {
s_logger.debug("VBD " + vbd.getUuid(conn) + " created for " + volume);
}
return vbd;
}
protected VM createVmFromTemplate(Connection conn, VirtualMachineTO vmSpec, Host host) throws XenAPIException, XmlRpcException {
String guestOsTypeName = getGuestOsType(vmSpec.getOs());
Set<VM> templates = VM.getByNameLabel(conn, guestOsTypeName);
assert templates.size() == 1 : "Should only have 1 template but found " + templates.size();
VM template = templates.iterator().next();
VM vm = template.createClone(conn, vmSpec.getName());
vm.setAffinity(conn, host);
VM.Record vmr = vm.getRecord(conn);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Created VM " + vmr.uuid + " for " + vmSpec.getName());
}
for (Console console : vmr.consoles) {
console.destroy(conn);
}
vm.setIsATemplate(conn, false);
vm.removeFromOtherConfig(conn, "disks");
vm.setNameLabel(conn, vmSpec.getName());
setMemory(conn, vm, vmSpec.getMinRam());
vm.setVCPUsAtStartup(conn, (long)vmSpec.getCpus());
vm.setVCPUsMax(conn, (long)vmSpec.getCpus());
Map<String, String> vcpuParams = new HashMap<String, String>();
Integer speed = vmSpec.getSpeed();
if (speed != null) {
int utilization = _userVMCap; //cpu_cap
//Configuration cpu.uservm.cap is not available in default installation. Using this parameter is not encouraged
int cpuWeight = _maxWeight; //cpu_weight
// weight based allocation
cpuWeight = (int)((speed*0.99) / _host.speed * _maxWeight);
if (cpuWeight > _maxWeight) {
cpuWeight = _maxWeight;
}
vcpuParams.put("weight", Integer.toString(cpuWeight));
vcpuParams.put("cap", Integer.toString(utilization));
}
if (vcpuParams.size() > 0) {
vm.setVCPUsParams(conn, vcpuParams);
}
vm.setActionsAfterCrash(conn, Types.OnCrashBehaviour.DESTROY);
vm.setActionsAfterShutdown(conn, Types.OnNormalExit.DESTROY);
String bootArgs = vmSpec.getBootArgs();
if (bootArgs != null && bootArgs.length() > 0) {
String pvargs = vm.getPVArgs(conn);
pvargs = pvargs + vmSpec.getBootArgs();
if (s_logger.isDebugEnabled()) {
s_logger.debug("PV args are " + pvargs);
}
vm.setPVArgs(conn, pvargs);
}
if (!(guestOsTypeName.startsWith("Windows") || guestOsTypeName.startsWith("Citrix") || guestOsTypeName.startsWith("Other"))) {
if (vmSpec.getBootloader() == BootloaderType.CD) {
vm.setPVBootloader(conn, "eliloader");
vm.addToOtherConfig(conn, "install-repository", "cdrom");
} else if (vmSpec.getBootloader() == BootloaderType.PyGrub ){
vm.setPVBootloader(conn, "pygrub");
} else {
vm.destroy(conn);
throw new CloudRuntimeException("Unable to handle boot loader type: " + vmSpec.getBootloader());
}
}
return vm;
}
protected String handleVmStartFailure(String vmName, VM vm, String message, Throwable th) {
String msg = "Unable to start " + vmName + " due to " + message;
s_logger.warn(msg, th);
if (vm == null) {
return msg;
}
Connection conn = getConnection();
try {
VM.Record vmr = vm.getRecord(conn);
if (vmr.powerState == VmPowerState.RUNNING) {
try {
vm.hardShutdown(conn);
} catch (Exception e) {
s_logger.warn("VM hardshutdown failed due to ", e);
}
}
if (vm.getPowerState(conn) == VmPowerState.HALTED) {
try {
vm.destroy(conn);
} catch (Exception e) {
s_logger.warn("VM destroy failed due to ", e);
}
}
for (VBD vbd : vmr.VBDs) {
try {
vbd.unplug(conn);
vbd.destroy(conn);
} catch (Exception e) {
s_logger.warn("Unable to clean up VBD due to ", e);
}
}
for (VIF vif : vmr.VIFs) {
try {
vif.unplug(conn);
vif.destroy(conn);
} catch (Exception e) {
s_logger.warn("Unable to cleanup VIF", e);
}
}
} catch (Exception e) {
s_logger.warn("VM getRecord failed due to ", e);
}
return msg;
}
protected VBD createPatchVbd(Connection conn, String vmName, VM vm) throws XmlRpcException, XenAPIException {
VBD.Record cdromVBDR = new VBD.Record();
cdromVBDR.VM = vm;
cdromVBDR.empty = true;
cdromVBDR.bootable = false;
cdromVBDR.userdevice = "3";
cdromVBDR.mode = Types.VbdMode.RO;
cdromVBDR.type = Types.VbdType.CD;
VBD cdromVBD = VBD.create(conn, cdromVBDR);
cdromVBD.insert(conn, VDI.getByUuid(conn, _host.systemvmisouuid));
return cdromVBD;
}
protected Start2Answer execute(Start2Command cmd) {
VirtualMachineTO vmSpec = cmd.getVirtualMachine();
String vmName = vmSpec.getName();
Connection conn = getConnection();
State state = State.Stopped;
VM vm = null;
try {
Host host = Host.getByUuid(conn, _host.uuid);
synchronized (_vms) {
_vms.put(vmName, State.Starting);
}
vm = createVmFromTemplate(conn, vmSpec, host);
for (VolumeTO disk : vmSpec.getDisks()) {
createVbd(conn, disk, vmName, vm);
}
if (vmSpec.getType() != VirtualMachine.Type.User) {
createPatchVbd(conn, vmName, vm);
}
for (NicTO nic : vmSpec.getNics()) {
createVif(conn, vmName, vm, nic);
}
vm.startOn(conn, host, false, true);
if (_canBridgeFirewall) {
String result = null;
if (vmSpec.getType() != VirtualMachine.Type.User) {
result = callHostPlugin("vmops", "default_network_rules_systemvm", "vmName", vmName);
} else {
}
if (result == null || result.isEmpty() || !Boolean.parseBoolean(result)) {
s_logger.warn("Failed to program default network rules for " + vmName);
} else {
s_logger.info("Programmed default network rules for " + vmName);
}
}
Monitor monitor = vmSpec.getMonitor();
if (monitor != null && monitor instanceof SshMonitor) {
SshMonitor sshMon = (SshMonitor)monitor;
String privateIp = sshMon.getIp();
int cmdPort = sshMon.getPort();
if (s_logger.isDebugEnabled()) {
s_logger.debug("Ping command port, " + privateIp + ":" + cmdPort);
}
String result = connect(vmName, privateIp, cmdPort);
if (result != null) {
throw new CloudRuntimeException("Can not ping System vm " + vmName + "due to:" + result);
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Ping command port succeeded for vm " + vmName);
}
}
state = State.Running;
return new Start2Answer(cmd);
} catch (XmlRpcException e) {
s_logger.warn("Exception ", e);
String msg = handleVmStartFailure(vmName, vm, "", e);
return new Start2Answer(cmd, msg);
} catch (XenAPIException e) {
s_logger.warn("Exception ", e);
String msg = handleVmStartFailure(vmName, vm, "", e);
return new Start2Answer(cmd, msg);
} catch (Exception e) {
s_logger.warn("Exception ", e);
String msg = handleVmStartFailure(vmName, vm, "", e);
return new Start2Answer(cmd, msg);
} finally {
synchronized (_vms) {
if (state != State.Stopped) {
_vms.put(vmName, state);
} else {
_vms.remove(vmName);
}
}
}
}
protected Answer execute(ModifySshKeysCommand cmd) {
return new Answer(cmd);
}
private boolean doPingTest(final String computingHostIp) {
String args = "-h " + computingHostIp;
String result = callHostPlugin("vmops", "pingtest", "args", args);
if (result == null || result.isEmpty())
return false;
return true;
}
protected CheckOnHostAnswer execute(CheckOnHostCommand cmd) {
return new CheckOnHostAnswer(cmd, null, "Not Implmeneted");
}
private boolean doPingTest(final String domRIp, final String vmIp) {
String args = "-i " + domRIp + " -p " + vmIp;
String result = callHostPlugin("vmops", "pingtest", "args", args);
if (result == null || result.isEmpty())
return false;
return true;
}
private Answer execute(PingTestCommand cmd) {
boolean result = false;
final String computingHostIp = cmd.getComputingHostIp(); // TODO, split the command into 2 types
if (computingHostIp != null) {
result = doPingTest(computingHostIp);
} else {
result = doPingTest(cmd.getRouterIp(), cmd.getPrivateIp());
}
if (!result) {
return new Answer(cmd, false, "PingTestCommand failed");
}
return new Answer(cmd);
}
protected MaintainAnswer execute(MaintainCommand cmd) {
Connection conn = getConnection();
try {
Pool pool = Pool.getByUuid(conn, _host.pool);
Pool.Record poolr = pool.getRecord(conn);
Host.Record hostr = poolr.master.getRecord(conn);
if (!_host.uuid.equals(hostr.uuid)) {
s_logger.debug("Not the master node so just return ok: " + _host.ip);
return new MaintainAnswer(cmd);
}
Map<Host, Host.Record> hostMap = Host.getAllRecords(conn);
if (hostMap.size() == 1) {
s_logger.debug("There's no one to take over as master");
return new MaintainAnswer(cmd,false, "Only master in the pool");
}
Host newMaster = null;
Host.Record newMasterRecord = null;
for (Map.Entry<Host, Host.Record> entry : hostMap.entrySet()) {
if (!_host.uuid.equals(entry.getValue().uuid)) {
newMaster = entry.getKey();
newMasterRecord = entry.getValue();
s_logger.debug("New master for the XenPool is " + newMasterRecord.uuid + " : " + newMasterRecord.address);
try {
_connPool.switchMaster(_host.ip, _host.pool, conn, newMaster, _username, _password, _wait);
return new MaintainAnswer(cmd, "New Master is " + newMasterRecord.address);
} catch (XenAPIException e) {
s_logger.warn("Unable to switch the new master to " + newMasterRecord.uuid + ": " + newMasterRecord.address + " Trying again...");
} catch (XmlRpcException e) {
s_logger.warn("Unable to switch the new master to " + newMasterRecord.uuid + ": " + newMasterRecord.address + " Trying again...");
}
}
}
return new MaintainAnswer(cmd, false, "Unable to find an appropriate host to set as the new master");
} catch (XenAPIException e) {
s_logger.warn("Unable to put server in maintainence mode", e);
return new MaintainAnswer(cmd, false, e.getMessage());
} catch (XmlRpcException e) {
s_logger.warn("Unable to put server in maintainence mode", e);
return new MaintainAnswer(cmd, false, e.getMessage());
}
}
protected SetupAnswer execute(SetupCommand cmd) {
return new SetupAnswer(cmd);
}
protected Answer execute(StartSecStorageVmCommand cmd) {
final String vmName = cmd.getVmName();
SecondaryStorageVmVO storage = cmd.getSecondaryStorageVmVO();
try {
Connection conn = getConnection();
Network network = Network.getByUuid(conn, _host.privateNetwork);
String bootArgs = cmd.getBootArgs();
bootArgs += " zone=" + _dcId;
bootArgs += " pod=" + _pod;
bootArgs += " localgw=" + _localGateway;
String result = startSystemVM(vmName, storage.getVlanId(), network, cmd.getVolumes(), bootArgs, storage.getGuestMacAddress(), storage.getGuestIpAddress(), storage
.getPrivateMacAddress(), storage.getPublicMacAddress(), cmd.getProxyCmdPort(), storage.getRamSize(), cmd.getGuestOSDescription(), cmd.getNetworkRateMbps());
if (result == null) {
return new StartSecStorageVmAnswer(cmd);
}
return new StartSecStorageVmAnswer(cmd, result);
} catch (Exception e) {
String msg = "Exception caught while starting router vm " + vmName + " due to " + e.getMessage();
s_logger.warn(msg, e);
return new StartSecStorageVmAnswer(cmd, msg);
}
}
protected Answer execute(final SetFirewallRuleCommand cmd) {
String args;
if (cmd.isEnable()) {
args = "-A";
} else {
args = "-D";
}
args += " -P " + cmd.getProtocol().toLowerCase();
args += " -l " + cmd.getPublicIpAddress();
args += " -p " + cmd.getPublicPort();
args += " -n " + cmd.getRouterName();
args += " -i " + cmd.getRouterIpAddress();
args += " -r " + cmd.getPrivateIpAddress();
args += " -d " + cmd.getPrivatePort();
args += " -N " + cmd.getVlanNetmask();
String oldPrivateIP = cmd.getOldPrivateIP();
String oldPrivatePort = cmd.getOldPrivatePort();
if (oldPrivateIP != null) {
args += " -w " + oldPrivateIP;
}
if (oldPrivatePort != null) {
args += " -x " + oldPrivatePort;
}
String result = callHostPlugin("vmops", "setFirewallRule", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "SetFirewallRule failed");
}
return new Answer(cmd);
}
protected Answer execute(final LoadBalancerCfgCommand cmd) {
String routerIp = cmd.getRouterIp();
if (routerIp == null) {
return new Answer(cmd);
}
String tmpCfgFilePath = "/tmp/" + cmd.getRouterIp().replace('.', '_') + ".cfg";
String tmpCfgFileContents = "";
for (int i = 0; i < cmd.getConfig().length; i++) {
tmpCfgFileContents += cmd.getConfig()[i];
tmpCfgFileContents += "\n";
}
String result = callHostPlugin("vmops", "createFile", "filepath", tmpCfgFilePath, "filecontents", tmpCfgFileContents);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "LoadBalancerCfgCommand failed to create HA proxy cfg file.");
}
String[] addRules = cmd.getAddFwRules();
String[] removeRules = cmd.getRemoveFwRules();
String args = "";
args += "-i " + routerIp;
args += " -f " + tmpCfgFilePath;
StringBuilder sb = new StringBuilder();
if (addRules.length > 0) {
for (int i = 0; i < addRules.length; i++) {
sb.append(addRules[i]).append(',');
}
args += " -a " + sb.toString();
}
sb = new StringBuilder();
if (removeRules.length > 0) {
for (int i = 0; i < removeRules.length; i++) {
sb.append(removeRules[i]).append(',');
}
args += " -d " + sb.toString();
}
result = callHostPlugin("vmops", "setLoadBalancerRule", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "LoadBalancerCfgCommand failed");
}
callHostPlugin("vmops", "deleteFile", "filepath", tmpCfgFilePath);
return new Answer(cmd);
}
protected synchronized Answer execute(final DhcpEntryCommand cmd) {
String args = "-r " + cmd.getRouterPrivateIpAddress();
args += " -v " + cmd.getVmIpAddress();
args += " -m " + cmd.getVmMac();
args += " -n " + cmd.getVmName();
String result = callHostPlugin("vmops", "saveDhcpEntry", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "DhcpEntry failed");
}
return new Answer(cmd);
}
protected synchronized Answer execute(final RemoteAccessVpnCfgCommand cmd) {
String args = cmd.getRouterPrivateIpAddress();
if (cmd.isCreate()) {
args += " -r " + cmd.getIpRange();
args += " -p " + cmd.getPresharedKey();
args += " -s " + cmd.getVpnServerIp();
args += " -l " + cmd.getLocalIp();
args += " -c";
} else {
args += " -d";
}
String result = callHostPlugin("vmops", "lt2p_vpn", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "Configure VPN failed");
}
return new Answer(cmd);
}
protected Answer execute(final VmDataCommand cmd) {
String routerPrivateIpAddress = cmd.getRouterPrivateIpAddress();
String vmIpAddress = cmd.getVmIpAddress();
List<String[]> vmData = cmd.getVmData();
String[] vmDataArgs = new String[vmData.size() * 2 + 4];
vmDataArgs[0] = "routerIP";
vmDataArgs[1] = routerPrivateIpAddress;
vmDataArgs[2] = "vmIP";
vmDataArgs[3] = vmIpAddress;
int i = 4;
for (String[] vmDataEntry : vmData) {
String folder = vmDataEntry[0];
String file = vmDataEntry[1];
String contents = (vmDataEntry[2] != null) ? vmDataEntry[2] : "none";
vmDataArgs[i] = folder + "," + file;
vmDataArgs[i + 1] = contents;
i += 2;
}
String result = callHostPlugin("vmops", "vm_data", vmDataArgs);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "vm_data failed");
} else {
return new Answer(cmd);
}
}
protected Answer execute(final SavePasswordCommand cmd) {
final String password = cmd.getPassword();
final String routerPrivateIPAddress = cmd.getRouterPrivateIpAddress();
final String vmName = cmd.getVmName();
final String vmIpAddress = cmd.getVmIpAddress();
final String local = vmName;
// Run save_password_to_domr.sh
String args = "-r " + routerPrivateIPAddress;
args += " -v " + vmIpAddress;
args += " -p " + password;
args += " " + local;
String result = callHostPlugin("vmops", "savePassword", "args", args);
if (result == null || result.isEmpty()) {
return new Answer(cmd, false, "savePassword failed");
}
return new Answer(cmd);
}
protected void assignPublicIpAddress(final String vmName, final String privateIpAddress, final String publicIpAddress, final boolean add, final boolean firstIP,
final boolean sourceNat, final String vlanId, final String vlanGateway, final String vlanNetmask, final String vifMacAddress, String guestIp) throws InternalErrorException {
try {
Connection conn = getConnection();
VM router = getVM(conn, vmName);
// Determine the correct VIF on DomR to associate/disassociate the
// IP address with
VIF correctVif = getCorrectVif(router, vlanId);
// If we are associating an IP address and DomR doesn't have a VIF
// for the specified vlan ID, we need to add a VIF
// If we are disassociating the last IP address in the VLAN, we need
// to remove a VIF
boolean addVif = false;
boolean removeVif = false;
if (add && correctVif == null) {
addVif = true;
} else if (!add && firstIP) {
removeVif = true;
}
if (addVif) {
// Add a new VIF to DomR
String vifDeviceNum = getLowestAvailableVIFDeviceNum(router);
if (vifDeviceNum == null) {
throw new InternalErrorException("There were no more available slots for a new VIF on router: " + router.getNameLabel(conn));
}
correctVif = createVIF(conn, router, vifMacAddress, vlanId, 0, vifDeviceNum, true);
correctVif.plug(conn);
// Add iptables rule for network usage
networkUsage(privateIpAddress, "addVif", "eth" + correctVif.getDevice(conn));
}
if (correctVif == null) {
throw new InternalErrorException("Failed to find DomR VIF to associate/disassociate IP with.");
}
String args = null;
if (add) {
args = "-A";
} else {
args = "-D";
}
if (sourceNat) {
args += " -f";
long cidrSize = NetUtils.getCidrSize(vlanNetmask);
args += " -l ";
args += publicIpAddress + "/" + Long.toString(cidrSize);
} else {
args += " -l ";
args += publicIpAddress;
}
args += " -i ";
args += privateIpAddress;
args += " -c ";
args += "eth" + correctVif.getDevice(conn);
args += " -g ";
args += vlanGateway;
if(guestIp!=null){
args += " -G ";
args += guestIp;
}
String result = callHostPlugin("vmops", "ipassoc", "args", args);
if (result == null || result.isEmpty()) {
throw new InternalErrorException("Xen plugin \"ipassoc\" failed.");
}
if (removeVif) {
Network network = correctVif.getNetwork(conn);
// Mark this vif to be removed from network usage
networkUsage(privateIpAddress, "deleteVif", "eth" + correctVif.getDevice(conn));
// Remove the VIF from DomR
correctVif.unplug(conn);
correctVif.destroy(conn);
// Disable the VLAN network if necessary
disableVlanNetwork(network);
}
} catch (XenAPIException e) {
String msg = "Unable to assign public IP address due to " + e.toString();
s_logger.warn(msg, e);
throw new InternalErrorException(msg);
} catch (final XmlRpcException e) {
String msg = "Unable to assign public IP address due to " + e.getMessage();
s_logger.warn(msg, e);
throw new InternalErrorException(msg);
}
}
protected String networkUsage(final String privateIpAddress, final String option, final String vif) {
if (option.equals("get")) {
return "0:0";
}
return null;
}
protected Answer execute(final IPAssocCommand cmd) {
try {
assignPublicIpAddress(cmd.getRouterName(), cmd.getRouterIp(), cmd.getPublicIp(), cmd.isAdd(), cmd.isFirstIP(), cmd.isSourceNat(), cmd.getVlanId(),
cmd.getVlanGateway(), cmd.getVlanNetmask(), cmd.getVifMacAddress(), cmd.getGuestIp());
} catch (InternalErrorException e) {
return new Answer(cmd, false, e.getMessage());
}
return new Answer(cmd);
}
protected GetVncPortAnswer execute(GetVncPortCommand cmd) {
Connection conn = getConnection();
try {
Set<VM> vms = VM.getByNameLabel(conn, cmd.getName());
return new GetVncPortAnswer(cmd, getVncPort(vms.iterator().next()));
} catch (XenAPIException e) {
s_logger.warn("Unable to get vnc port " + e.toString(), e);
return new GetVncPortAnswer(cmd, e.toString());
} catch (Exception e) {
s_logger.warn("Unable to get vnc port ", e);
return new GetVncPortAnswer(cmd, e.getMessage());
}
}
protected Storage.StorageResourceType getStorageResourceType() {
return Storage.StorageResourceType.STORAGE_POOL;
}
protected CheckHealthAnswer execute(CheckHealthCommand cmd) {
boolean result = pingxenserver();
return new CheckHealthAnswer(cmd, result);
}
protected long[] getNetworkStats(String privateIP) {
String result = networkUsage(privateIP, "get", null);
long[] stats = new long[2];
if (result != null) {
String[] splitResult = result.split(":");
int i = 0;
while (i < splitResult.length - 1) {
stats[0] += (new Long(splitResult[i++])).longValue();
stats[1] += (new Long(splitResult[i++])).longValue();
}
}
return stats;
}
/**
* This is the method called for getting the HOST stats
*
* @param cmd
* @return
*/
protected GetHostStatsAnswer execute(GetHostStatsCommand cmd) {
// Connection conn = getConnection();
try {
HostStatsEntry hostStats = getHostStats(cmd, cmd.getHostGuid(), cmd.getHostId());
return new GetHostStatsAnswer(cmd, hostStats);
} catch (Exception e) {
String msg = "Unable to get Host stats" + e.toString();
s_logger.warn(msg, e);
return new GetHostStatsAnswer(cmd, null);
}
}
protected HostStatsEntry getHostStats(GetHostStatsCommand cmd, String hostGuid, long hostId) {
HostStatsEntry hostStats = new HostStatsEntry(hostId, 0, 0, 0, 0, "host", 0, 0, 0, 0);
Object[] rrdData = getRRDData(1); // call rrd method with 1 for host
if (rrdData == null) {
return null;
}
Integer numRows = (Integer) rrdData[0];
Integer numColumns = (Integer) rrdData[1];
Node legend = (Node) rrdData[2];
Node dataNode = (Node) rrdData[3];
NodeList legendChildren = legend.getChildNodes();
for (int col = 0; col < numColumns; col++) {
if (legendChildren == null || legendChildren.item(col) == null) {
continue;
}
String columnMetadata = getXMLNodeValue(legendChildren.item(col));
if (columnMetadata == null) {
continue;
}
String[] columnMetadataList = columnMetadata.split(":");
if (columnMetadataList.length != 4) {
continue;
}
String type = columnMetadataList[1];
String param = columnMetadataList[3];
if (type.equalsIgnoreCase("host")) {
if (param.contains("pif_eth0_rx")) {
hostStats.setNetworkReadKBs(getDataAverage(dataNode, col, numRows));
}
if (param.contains("pif_eth0_tx")) {
hostStats.setNetworkWriteKBs(getDataAverage(dataNode, col, numRows));
}
if (param.contains("memory_total_kib")) {
hostStats.setTotalMemoryKBs(getDataAverage(dataNode, col, numRows));
}
if (param.contains("memory_free_kib")) {
hostStats.setFreeMemoryKBs(getDataAverage(dataNode, col, numRows));
}
if (param.contains("cpu")) {
hostStats.setNumCpus(hostStats.getNumCpus() + 1);
hostStats.setCpuUtilization(hostStats.getCpuUtilization() + getDataAverage(dataNode, col, numRows));
}
if (param.contains("loadavg")) {
hostStats.setAverageLoad((hostStats.getAverageLoad() + getDataAverage(dataNode, col, numRows)));
}
}
}
// add the host cpu utilization
if (hostStats.getNumCpus() != 0) {
hostStats.setCpuUtilization(hostStats.getCpuUtilization() / hostStats.getNumCpus());
s_logger.debug("Host cpu utilization " + hostStats.getCpuUtilization());
}
return hostStats;
}
protected GetVmStatsAnswer execute(GetVmStatsCommand cmd) {
List<String> vmNames = cmd.getVmNames();
HashMap<String, VmStatsEntry> vmStatsNameMap = new HashMap<String, VmStatsEntry>();
if( vmNames.size() == 0 ) {
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
}
Connection conn = getConnection();
try {
// Determine the UUIDs of the requested VMs
List<String> vmUUIDs = new ArrayList<String>();
for (String vmName : vmNames) {
VM vm = getVM(conn, vmName);
vmUUIDs.add(vm.getUuid(conn));
}
HashMap<String, VmStatsEntry> vmStatsUUIDMap = getVmStats(cmd, vmUUIDs, cmd.getHostGuid());
if( vmStatsUUIDMap == null )
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
for (String vmUUID : vmStatsUUIDMap.keySet()) {
vmStatsNameMap.put(vmNames.get(vmUUIDs.indexOf(vmUUID)), vmStatsUUIDMap.get(vmUUID));
}
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
} catch (XenAPIException e) {
String msg = "Unable to get VM stats" + e.toString();
s_logger.warn(msg, e);
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
} catch (XmlRpcException e) {
String msg = "Unable to get VM stats" + e.getMessage();
s_logger.warn(msg, e);
return new GetVmStatsAnswer(cmd, vmStatsNameMap);
}
}
protected HashMap<String, VmStatsEntry> getVmStats(GetVmStatsCommand cmd, List<String> vmUUIDs, String hostGuid) {
HashMap<String, VmStatsEntry> vmResponseMap = new HashMap<String, VmStatsEntry>();
for (String vmUUID : vmUUIDs) {
vmResponseMap.put(vmUUID, new VmStatsEntry(0, 0, 0, 0, "vm"));
}
Object[] rrdData = getRRDData(2); // call rrddata with 2 for vm
if (rrdData == null) {
return null;
}
Integer numRows = (Integer) rrdData[0];
Integer numColumns = (Integer) rrdData[1];
Node legend = (Node) rrdData[2];
Node dataNode = (Node) rrdData[3];
NodeList legendChildren = legend.getChildNodes();
for (int col = 0; col < numColumns; col++) {
if (legendChildren == null || legendChildren.item(col) == null) {
continue;
}
String columnMetadata = getXMLNodeValue(legendChildren.item(col));
if (columnMetadata == null) {
continue;
}
String[] columnMetadataList = columnMetadata.split(":");
if (columnMetadataList.length != 4) {
continue;
}
String type = columnMetadataList[1];
String uuid = columnMetadataList[2];
String param = columnMetadataList[3];
if (type.equals("vm") && vmResponseMap.keySet().contains(uuid)) {
VmStatsEntry vmStatsAnswer = vmResponseMap.get(uuid);
vmStatsAnswer.setEntityType("vm");
if (param.contains("cpu")) {
vmStatsAnswer.setNumCPUs(vmStatsAnswer.getNumCPUs() + 1);
vmStatsAnswer.setCPUUtilization((vmStatsAnswer.getCPUUtilization() + getDataAverage(dataNode, col, numRows))*100);
} else if (param.equals("vif_0_rx")) {
vmStatsAnswer.setNetworkReadKBs(getDataAverage(dataNode, col, numRows)/(8*2));
} else if (param.equals("vif_0_tx")) {
vmStatsAnswer.setNetworkWriteKBs(getDataAverage(dataNode, col, numRows)/(8*2));
}
}
}
for (String vmUUID : vmResponseMap.keySet()) {
VmStatsEntry vmStatsAnswer = vmResponseMap.get(vmUUID);
if (vmStatsAnswer.getNumCPUs() != 0) {
vmStatsAnswer.setCPUUtilization(vmStatsAnswer.getCPUUtilization() / vmStatsAnswer.getNumCPUs());
s_logger.debug("Vm cpu utilization " + vmStatsAnswer.getCPUUtilization());
}
}
return vmResponseMap;
}
protected Object[] getRRDData(int flag) {
/*
* Note: 1 => called from host, hence host stats 2 => called from vm, hence vm stats
*/
String stats = "";
try {
if (flag == 1)
stats = getHostStatsRawXML();
if (flag == 2)
stats = getVmStatsRawXML();
} catch (Exception e1) {
s_logger.warn("Error whilst collecting raw stats from plugin:" + e1);
return null;
}
// s_logger.debug("The raw xml stream is:"+stats);
// s_logger.debug("Length of raw xml is:"+stats.length());
//stats are null when the host plugin call fails (host down state)
if(stats == null)
return null;
StringReader statsReader = new StringReader(stats);
InputSource statsSource = new InputSource(statsReader);
Document doc = null;
try {
doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(statsSource);
} catch (Exception e) {
s_logger.warn("Exception caught whilst processing the document via document factory:"+e);
return null;
}
if(doc==null){
s_logger.warn("Null document found after tryinh to parse the stats source");
return null;
}
NodeList firstLevelChildren = doc.getChildNodes();
NodeList secondLevelChildren = (firstLevelChildren.item(0)).getChildNodes();
Node metaNode = secondLevelChildren.item(0);
Node dataNode = secondLevelChildren.item(1);
Integer numRows = 0;
Integer numColumns = 0;
Node legend = null;
NodeList metaNodeChildren = metaNode.getChildNodes();
for (int i = 0; i < metaNodeChildren.getLength(); i++) {
Node n = metaNodeChildren.item(i);
if (n.getNodeName().equals("rows")) {
numRows = Integer.valueOf(getXMLNodeValue(n));
} else if (n.getNodeName().equals("columns")) {
numColumns = Integer.valueOf(getXMLNodeValue(n));
} else if (n.getNodeName().equals("legend")) {
legend = n;
}
}
return new Object[] { numRows, numColumns, legend, dataNode };
}
protected String getXMLNodeValue(Node n) {
return n.getChildNodes().item(0).getNodeValue();
}
protected double getDataAverage(Node dataNode, int col, int numRows) {
double value = 0;
double dummy = 0;
int numRowsUsed = 0;
for (int row = 0; row < numRows; row++) {
Node data = dataNode.getChildNodes().item(numRows - 1 - row).getChildNodes().item(col + 1);
Double currentDataAsDouble = Double.valueOf(getXMLNodeValue(data));
if (!currentDataAsDouble.equals(Double.NaN)) {
numRowsUsed += 1;
value += currentDataAsDouble;
}
}
if(numRowsUsed == 0)
{
if((!Double.isInfinite(value))&&(!Double.isNaN(value)))
{
return value;
}
else
{
s_logger.warn("Found an invalid value (infinity/NaN) in getDataAverage(), numRows=0");
return dummy;
}
}
else
{
if((!Double.isInfinite(value/numRowsUsed))&&(!Double.isNaN(value/numRowsUsed)))
{
return (value/numRowsUsed);
}
else
{
s_logger.warn("Found an invalid value (infinity/NaN) in getDataAverage(), numRows>0");
return dummy;
}
}
}
protected String getHostStatsRawXML() {
Date currentDate = new Date();
String startTime = String.valueOf(currentDate.getTime() / 1000 - 1000);
return callHostPlugin("vmops", "gethostvmstats", "collectHostStats", String.valueOf("true"), "consolidationFunction", _consolidationFunction, "interval", String
.valueOf(_pollingIntervalInSeconds), "startTime", startTime);
}
protected String getVmStatsRawXML() {
Date currentDate = new Date();
String startTime = String.valueOf(currentDate.getTime() / 1000 - 1000);
return callHostPlugin("vmops", "gethostvmstats", "collectHostStats", String.valueOf("false"), "consolidationFunction", _consolidationFunction, "interval", String
.valueOf(_pollingIntervalInSeconds), "startTime", startTime);
}
protected void recordWarning(final VM vm, final String message, final Throwable e) {
Connection conn = getConnection();
final StringBuilder msg = new StringBuilder();
try {
final Long domId = vm.getDomid(conn);
msg.append("[").append(domId != null ? domId : -1l).append("] ");
} catch (final BadServerResponse e1) {
} catch (final XmlRpcException e1) {
} catch (XenAPIException e1) {
}
msg.append(message);
}
protected State convertToState(Types.VmPowerState ps) {
final State state = s_statesTable.get(ps);
return state == null ? State.Unknown : state;
}
protected HashMap<String, State> getAllVms() {
final HashMap<String, State> vmStates = new HashMap<String, State>();
Connection conn = getConnection();
Set<VM> vms = null;
for (int i = 0; i < 2; i++) {
try {
Host host = Host.getByUuid(conn, _host.uuid);
vms = host.getResidentVMs(conn);
break;
} catch (final Throwable e) {
s_logger.warn("Unable to get vms", e);
}
try {
Thread.sleep(1000);
} catch (final InterruptedException ex) {
}
}
if (vms == null) {
return null;
}
for (VM vm : vms) {
VM.Record record = null;
for (int i = 0; i < 2; i++) {
try {
record = vm.getRecord(conn);
break;
} catch (XenAPIException e1) {
s_logger.debug("VM.getRecord failed on host:" + _host.uuid + " due to " + e1.toString());
} catch (XmlRpcException e1) {
s_logger.debug("VM.getRecord failed on host:" + _host.uuid + " due to " + e1.getMessage());
}
try {
Thread.sleep(1000);
} catch (final InterruptedException ex) {
}
}
if (record == null) {
continue;
}
if (record.isControlDomain || record.isASnapshot || record.isATemplate) {
continue; // Skip DOM0
}
VmPowerState ps = record.powerState;
final State state = convertToState(ps);
if (s_logger.isTraceEnabled()) {
s_logger.trace("VM " + record.nameLabel + ": powerstate = " + ps + "; vm state=" + state.toString());
}
vmStates.put(record.nameLabel, state);
}
return vmStates;
}
protected State getVmState(final String vmName) {
Connection conn = getConnection();
int retry = 3;
while (retry-- > 0) {
try {
Set<VM> vms = VM.getByNameLabel(conn, vmName);
for (final VM vm : vms) {
return convertToState(vm.getPowerState(conn));
}
} catch (final BadServerResponse e) {
// There is a race condition within xen such that if a vm is
// deleted and we
// happen to ask for it, it throws this stupid response. So
// if this happens,
// we take a nap and try again which then avoids the race
// condition because
// the vm's information is now cleaned up by xen. The error
// is as follows
// com.xensource.xenapi.Types$BadServerResponse
// [HANDLE_INVALID, VM,
// 3dde93f9-c1df-55a7-2cde-55e1dce431ab]
s_logger.info("Unable to get a vm PowerState due to " + e.toString() + ". We are retrying. Count: " + retry);
try {
Thread.sleep(3000);
} catch (final InterruptedException ex) {
}
} catch (XenAPIException e) {
String msg = "Unable to get a vm PowerState due to " + e.toString();
s_logger.warn(msg, e);
break;
} catch (final XmlRpcException e) {
String msg = "Unable to get a vm PowerState due to " + e.getMessage();
s_logger.warn(msg, e);
break;
}
}
return State.Stopped;
}
protected CheckVirtualMachineAnswer execute(final CheckVirtualMachineCommand cmd) {
final String vmName = cmd.getVmName();
final State state = getVmState(vmName);
Integer vncPort = null;
if (state == State.Running) {
synchronized (_vms) {
_vms.put(vmName, State.Running);
}
}
return new CheckVirtualMachineAnswer(cmd, state, vncPort);
}
protected PrepareForMigrationAnswer execute(final PrepareForMigrationCommand cmd) {
/*
*
* String result = null;
*
* List<VolumeVO> vols = cmd.getVolumes(); result = mountwithoutvdi(vols, cmd.getMappings()); if (result !=
* null) { return new PrepareForMigrationAnswer(cmd, false, result); }
*/
final String vmName = cmd.getVmName();
try {
Connection conn = getConnection();
Set<Host> hosts = Host.getAll(conn);
// workaround before implementing xenserver pool
// no migration
if (hosts.size() <= 1) {
return new PrepareForMigrationAnswer(cmd, false, "not in a same xenserver pool");
}
// if the vm have CD
// 1. make iosSR shared
// 2. create pbd in target xenserver
SR sr = getISOSRbyVmName(cmd.getVmName());
if (sr != null) {
Set<PBD> pbds = sr.getPBDs(conn);
boolean found = false;
for (PBD pbd : pbds) {
if (Host.getByUuid(conn, _host.uuid).equals(pbd.getHost(conn))) {
found = true;
break;
}
}
if (!found) {
sr.setShared(conn, true);
PBD pbd = pbds.iterator().next();
PBD.Record pbdr = new PBD.Record();
pbdr.deviceConfig = pbd.getDeviceConfig(conn);
pbdr.host = Host.getByUuid(conn, _host.uuid);
pbdr.SR = sr;
PBD newpbd = PBD.create(conn, pbdr);
newpbd.plug(conn);
}
}
Set<VM> vms = VM.getByNameLabel(conn, vmName);
if (vms.size() != 1) {
String msg = "There are " + vms.size() + " " + vmName;
s_logger.warn(msg);
return new PrepareForMigrationAnswer(cmd, false, msg);
}
VM vm = vms.iterator().next();
// check network
Set<VIF> vifs = vm.getVIFs(conn);
for (VIF vif : vifs) {
Network network = vif.getNetwork(conn);
Set<PIF> pifs = network.getPIFs(conn);
long vlan = -1;
PIF npif = null;
for (PIF pif : pifs) {
try {
vlan = pif.getVLAN(conn);
if (vlan != -1 ) {
VLAN vland = pif.getVLANMasterOf(conn);
npif = vland.getTaggedPIF(conn);
}
break;
}catch (Exception e) {
continue;
}
}
if (npif == null) {
continue;
}
network = npif.getNetwork(conn);
String nwuuid = network.getUuid(conn);
String pifuuid = null;
if(nwuuid.equalsIgnoreCase(_host.privateNetwork)) {
pifuuid = _host.privatePif;
} else if(nwuuid.equalsIgnoreCase(_host.publicNetwork)) {
pifuuid = _host.publicPif;
} else {
continue;
}
Network vlanNetwork = enableVlanNetwork(vlan, pifuuid);
if (vlanNetwork == null) {
throw new InternalErrorException("Failed to enable VLAN network with tag: " + vlan);
}
}
synchronized (_vms) {
_vms.put(cmd.getVmName(), State.Migrating);
}
return new PrepareForMigrationAnswer(cmd, true, null);
} catch (Exception e) {
String msg = "catch exception " + e.getMessage();
s_logger.warn(msg, e);
return new PrepareForMigrationAnswer(cmd, false, msg);
}
}
@Override
public DownloadAnswer execute(final PrimaryStorageDownloadCommand cmd) {
SR tmpltsr = null;
String tmplturl = cmd.getUrl();
int index = tmplturl.lastIndexOf("/");
String mountpoint = tmplturl.substring(0, index);
String tmpltname = null;
if (index < tmplturl.length() - 1)
tmpltname = tmplturl.substring(index + 1).replace(".vhd", "");
try {
Connection conn = getConnection();
String pUuid = cmd.getPoolUuid();
SR poolsr = null;
Set<SR> srs = SR.getByNameLabel(conn, pUuid);
if (srs.size() != 1) {
String msg = "There are " + srs.size() + " SRs with same name: " + pUuid;
s_logger.warn(msg);
return new DownloadAnswer(null, 0, msg, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR, "", "", 0);
} else {
poolsr = srs.iterator().next();
}
/* Does the template exist in primary storage pool? If yes, no copy */
VDI vmtmpltvdi = null;
VDI snapshotvdi = null;
Set<VDI> vdis = VDI.getByNameLabel(conn, "Template " + cmd.getName());
for (VDI vdi : vdis) {
VDI.Record vdir = vdi.getRecord(conn);
if (vdir.SR.equals(poolsr)) {
vmtmpltvdi = vdi;
break;
}
}
String uuid;
if (vmtmpltvdi == null) {
tmpltsr = createNfsSRbyURI(new URI(mountpoint), false);
tmpltsr.scan(conn);
VDI tmpltvdi = null;
if (tmpltname != null) {
tmpltvdi = getVDIbyUuid(tmpltname);
}
if (tmpltvdi == null) {
vdis = tmpltsr.getVDIs(conn);
for (VDI vdi : vdis) {
tmpltvdi = vdi;
break;
}
}
if (tmpltvdi == null) {
String msg = "Unable to find template vdi on secondary storage" + "host:" + _host.uuid + "pool: " + tmplturl;
s_logger.warn(msg);
return new DownloadAnswer(null, 0, msg, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR, "", "", 0);
}
vmtmpltvdi = cloudVDIcopy(tmpltvdi, poolsr);
snapshotvdi = vmtmpltvdi.snapshot(conn, new HashMap<String, String>());
vmtmpltvdi.destroy(conn);
snapshotvdi.setNameLabel(conn, "Template " + cmd.getName());
// vmtmpltvdi.setNameDescription(conn, cmd.getDescription());
uuid = snapshotvdi.getUuid(conn);
vmtmpltvdi = snapshotvdi;
} else
uuid = vmtmpltvdi.getUuid(conn);
// Determine the size of the template
long phySize = vmtmpltvdi.getPhysicalUtilisation(conn);
DownloadAnswer answer = new DownloadAnswer(null, 100, cmd, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOADED, uuid, uuid);
answer.setTemplateSize(phySize);
return answer;
} catch (XenAPIException e) {
String msg = "XenAPIException:" + e.toString() + "host:" + _host.uuid + "pool: " + tmplturl;
s_logger.warn(msg, e);
return new DownloadAnswer(null, 0, msg, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR, "", "", 0);
} catch (Exception e) {
String msg = "XenAPIException:" + e.getMessage() + "host:" + _host.uuid + "pool: " + tmplturl;
s_logger.warn(msg, e);
return new DownloadAnswer(null, 0, msg, com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOAD_ERROR, "", "", 0);
} finally {
removeSR(tmpltsr);
}
}
protected String removeSRSync(SR sr) {
if (sr == null) {
return null;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "Removing SR"));
}
Connection conn = getConnection();
long waittime = 0;
try {
Set<VDI> vdis = sr.getVDIs(conn);
for (VDI vdi : vdis) {
Map<java.lang.String, Types.VdiOperations> currentOperation = vdi.getCurrentOperations(conn);
if (currentOperation == null || currentOperation.size() == 0) {
continue;
}
if (waittime >= 1800000) {
String msg = "This template is being used, try late time";
s_logger.warn(msg);
return msg;
}
waittime += 30000;
try {
Thread.sleep(30000);
} catch (final InterruptedException ex) {
}
}
removeSR(sr);
return null;
} catch (XenAPIException e) {
s_logger.warn(logX(sr, "Unable to get current opertions " + e.toString()), e);
} catch (XmlRpcException e) {
s_logger.warn(logX(sr, "Unable to get current opertions " + e.getMessage()), e);
}
String msg = "Remove SR failed";
s_logger.warn(msg);
return msg;
}
protected void removeSR(SR sr) {
if (sr == null) {
return;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "Removing SR"));
}
for (int i = 0; i < 2; i++) {
Connection conn = getConnection();
try {
Set<VDI> vdis = sr.getVDIs(conn);
for (VDI vdi : vdis) {
vdi.forget(conn);
}
Set<PBD> pbds = sr.getPBDs(conn);
for (PBD pbd : pbds) {
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(pbd, "Unplugging pbd"));
}
if (pbd.getCurrentlyAttached(conn)) {
pbd.unplug(conn);
}
pbd.destroy(conn);
}
pbds = sr.getPBDs(conn);
if (pbds.size() == 0) {
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "Forgetting"));
}
sr.forget(conn);
return;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "There are still pbd attached"));
if (s_logger.isTraceEnabled()) {
for (PBD pbd : pbds) {
s_logger.trace(logX(pbd, " Still attached"));
}
}
}
} catch (XenAPIException e) {
s_logger.debug(logX(sr, "Catch XenAPIException: " + e.toString()));
} catch (XmlRpcException e) {
s_logger.debug(logX(sr, "Catch Exception: " + e.getMessage()));
}
}
s_logger.warn(logX(sr, "Unable to remove SR"));
}
protected MigrateAnswer execute(final MigrateCommand cmd) {
final String vmName = cmd.getVmName();
State state = null;
synchronized (_vms) {
state = _vms.get(vmName);
_vms.put(vmName, State.Stopping);
}
try {
Connection conn = getConnection();
Set<VM> vms = VM.getByNameLabel(conn, vmName);
String ipaddr = cmd.getDestinationIp();
Set<Host> hosts = Host.getAll(conn);
Host dsthost = null;
for (Host host : hosts) {
if (host.getAddress(conn).equals(ipaddr)) {
dsthost = host;
break;
}
}
// if it is windows, we will not fake it is migrateable,
// windows requires PV driver to migrate
for (VM vm : vms) {
if (!cmd.isWindows()) {
String uuid = vm.getUuid(conn);
String result = callHostPlugin("vmops", "preparemigration", "uuid", uuid);
if (result == null || result.isEmpty()) {
return new MigrateAnswer(cmd, false, "migration failed", null);
}
// check if pv version is successfully set up
int i = 0;
for (; i < 20; i++) {
try {
Thread.sleep(1000);
} catch (final InterruptedException ex) {
}
VMGuestMetrics vmmetric = vm.getGuestMetrics(conn);
if (isRefNull(vmmetric))
continue;
Map<String, String> PVversion = vmmetric.getPVDriversVersion(conn);
if (PVversion != null && PVversion.containsKey("major")) {
break;
}
}
if (i >= 20) {
String msg = "migration failed due to can not fake PV driver for " + vmName;
s_logger.warn(msg);
return new MigrateAnswer(cmd, false, msg, null);
}
}
final Map<String, String> options = new HashMap<String, String>();
vm.poolMigrate(conn, dsthost, options);
state = State.Stopping;
}
return new MigrateAnswer(cmd, true, "migration succeeded", null);
} catch (XenAPIException e) {
String msg = "migration failed due to " + e.toString();
s_logger.warn(msg, e);
return new MigrateAnswer(cmd, false, msg, null);
} catch (XmlRpcException e) {
String msg = "migration failed due to " + e.getMessage();
s_logger.warn(msg, e);
return new MigrateAnswer(cmd, false, msg, null);
} finally {
synchronized (_vms) {
_vms.put(vmName, state);
}
}
}
protected State getRealPowerState(String label) {
Connection conn = getConnection();
int i = 0;
s_logger.trace("Checking on the HALTED State");
for (; i < 20; i++) {
try {
Set<VM> vms = VM.getByNameLabel(conn, label);
if (vms == null || vms.size() == 0) {
continue;
}
VM vm = vms.iterator().next();
VmPowerState vps = vm.getPowerState(conn);
if (vps != null && vps != VmPowerState.HALTED && vps != VmPowerState.UNKNOWN && vps != VmPowerState.UNRECOGNIZED) {
return convertToState(vps);
}
} catch (XenAPIException e) {
String msg = "Unable to get real power state due to " + e.toString();
s_logger.warn(msg, e);
} catch (XmlRpcException e) {
String msg = "Unable to get real power state due to " + e.getMessage();
s_logger.warn(msg, e);
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
return State.Stopped;
}
protected Pair<VM, VM.Record> getControlDomain(Connection conn) throws XenAPIException, XmlRpcException {
Host host = Host.getByUuid(conn, _host.uuid);
Set<VM> vms = null;
vms = host.getResidentVMs(conn);
for (VM vm : vms) {
if (vm.getIsControlDomain(conn)) {
return new Pair<VM, VM.Record>(vm, vm.getRecord(conn));
}
}
throw new CloudRuntimeException("Com'on no control domain? What the crap?!#@!##$@");
}
protected HashMap<String, State> sync() {
HashMap<String, State> newStates;
HashMap<String, State> oldStates = null;
final HashMap<String, State> changes = new HashMap<String, State>();
synchronized (_vms) {
newStates = getAllVms();
if (newStates == null) {
s_logger.debug("Unable to get the vm states so no state sync at this point.");
return null;
}
oldStates = new HashMap<String, State>(_vms.size());
oldStates.putAll(_vms);
for (final Map.Entry<String, State> entry : newStates.entrySet()) {
final String vm = entry.getKey();
State newState = entry.getValue();
final State oldState = oldStates.remove(vm);
if (newState == State.Stopped && oldState != State.Stopping && oldState != null && oldState != State.Stopped) {
newState = getRealPowerState(vm);
}
if (s_logger.isTraceEnabled()) {
s_logger.trace("VM " + vm + ": xen has state " + newState + " and we have state " + (oldState != null ? oldState.toString() : "null"));
}
if (vm.startsWith("migrating")) {
s_logger.debug("Migrating from xen detected. Skipping");
continue;
}
if (oldState == null) {
_vms.put(vm, newState);
s_logger.debug("Detecting a new state but couldn't find a old state so adding it to the changes: " + vm);
changes.put(vm, newState);
} else if (oldState == State.Starting) {
if (newState == State.Running) {
_vms.put(vm, newState);
} else if (newState == State.Stopped) {
s_logger.debug("Ignoring vm " + vm + " because of a lag in starting the vm.");
}
} else if (oldState == State.Migrating) {
if (newState == State.Running) {
s_logger.debug("Detected that an migrating VM is now running: " + vm);
_vms.put(vm, newState);
}
} else if (oldState == State.Stopping) {
if (newState == State.Stopped) {
_vms.put(vm, newState);
} else if (newState == State.Running) {
s_logger.debug("Ignoring vm " + vm + " because of a lag in stopping the vm. ");
}
} else if (oldState != newState) {
_vms.put(vm, newState);
if (newState == State.Stopped) {
/*
* if (_vmsKilled.remove(vm)) { s_logger.debug("VM " + vm + " has been killed for storage. ");
* newState = State.Error; }
*/
}
changes.put(vm, newState);
}
}
for (final Map.Entry<String, State> entry : oldStates.entrySet()) {
final String vm = entry.getKey();
final State oldState = entry.getValue();
if (s_logger.isTraceEnabled()) {
s_logger.trace("VM " + vm + " is now missing from xen so reporting stopped");
}
if (oldState == State.Stopping) {
s_logger.debug("Ignoring VM " + vm + " in transition state stopping.");
_vms.remove(vm);
} else if (oldState == State.Starting) {
s_logger.debug("Ignoring VM " + vm + " in transition state starting.");
} else if (oldState == State.Stopped) {
_vms.remove(vm);
} else if (oldState == State.Migrating) {
s_logger.debug("Ignoring VM " + vm + " in migrating state.");
} else {
State state = State.Stopped;
/*
* if (_vmsKilled.remove(entry.getKey())) { s_logger.debug("VM " + vm +
* " has been killed by storage monitor"); state = State.Error; }
*/
changes.put(entry.getKey(), state);
}
}
}
return changes;
}
protected ReadyAnswer execute(ReadyCommand cmd) {
Long dcId = cmd.getDataCenterId();
// Ignore the result of the callHostPlugin. Even if unmounting the
// snapshots dir fails, let Ready command
// succeed.
callHostPlugin("vmopsSnapshot", "unmountSnapshotsDir", "dcId", dcId.toString());
return new ReadyAnswer(cmd);
}
//
// using synchronized on VM name in the caller does not prevent multiple
// commands being sent against
// the same VM, there will be a race condition here in finally clause and
// the main block if
// there are multiple requests going on
//
// Therefore, a lazy solution is to add a synchronized guard here
protected int getVncPort(VM vm) {
Connection conn = getConnection();
VM.Record record;
try {
record = vm.getRecord(conn);
} catch (XenAPIException e) {
String msg = "Unable to get vnc-port due to " + e.toString();
s_logger.warn(msg, e);
return -1;
} catch (XmlRpcException e) {
String msg = "Unable to get vnc-port due to " + e.getMessage();
s_logger.warn(msg, e);
return -1;
}
String hvm = "true";
if (record.HVMBootPolicy.isEmpty()) {
hvm = "false";
}
String vncport = callHostPlugin("vmops", "getvncport", "domID", record.domid.toString(), "hvm", hvm);
if (vncport == null || vncport.isEmpty()) {
return -1;
}
vncport = vncport.replace("\n", "");
return NumbersUtil.parseInt(vncport, -1);
}
protected Answer execute(final RebootCommand cmd) {
synchronized (_vms) {
_vms.put(cmd.getVmName(), State.Starting);
}
try {
Connection conn = getConnection();
Set<VM> vms = null;
try {
vms = VM.getByNameLabel(conn, cmd.getVmName());
} catch (XenAPIException e0) {
s_logger.debug("getByNameLabel failed " + e0.toString());
return new RebootAnswer(cmd, "getByNameLabel failed " + e0.toString());
} catch (Exception e0) {
s_logger.debug("getByNameLabel failed " + e0.getMessage());
return new RebootAnswer(cmd, "getByNameLabel failed");
}
for (VM vm : vms) {
try {
vm.cleanReboot(conn);
} catch (XenAPIException e) {
s_logger.debug("Do Not support Clean Reboot, fall back to hard Reboot: " + e.toString());
try {
vm.hardReboot(conn);
} catch (XenAPIException e1) {
s_logger.debug("Caught exception on hard Reboot " + e1.toString());
return new RebootAnswer(cmd, "reboot failed: " + e1.toString());
} catch (XmlRpcException e1) {
s_logger.debug("Caught exception on hard Reboot " + e1.getMessage());
return new RebootAnswer(cmd, "reboot failed");
}
} catch (XmlRpcException e) {
String msg = "Clean Reboot failed due to " + e.getMessage();
s_logger.warn(msg, e);
return new RebootAnswer(cmd, msg);
}
}
return new RebootAnswer(cmd, "reboot succeeded", null, null);
} finally {
synchronized (_vms) {
_vms.put(cmd.getVmName(), State.Running);
}
}
}
protected Answer execute(RebootRouterCommand cmd) {
Long bytesSent = 0L;
Long bytesRcvd = 0L;
if (VirtualMachineName.isValidRouterName(cmd.getVmName())) {
long[] stats = getNetworkStats(cmd.getPrivateIpAddress());
bytesSent = stats[0];
bytesRcvd = stats[1];
}
RebootAnswer answer = (RebootAnswer) execute((RebootCommand) cmd);
answer.setBytesSent(bytesSent);
answer.setBytesReceived(bytesRcvd);
if (answer.getResult()) {
String cnct = connect(cmd.getVmName(), cmd.getPrivateIpAddress());
networkUsage(cmd.getPrivateIpAddress(), "create", null);
if (cnct == null) {
return answer;
} else {
return new Answer(cmd, false, cnct);
}
}
return answer;
}
protected VM createVmFromTemplate(Connection conn, StartCommand cmd) throws XenAPIException, XmlRpcException {
Set<VM> templates;
VM vm = null;
String stdType = cmd.getGuestOSDescription();
String guestOsTypeName = getGuestOsType(stdType);
templates = VM.getByNameLabel(conn, guestOsTypeName);
assert templates.size() == 1 : "Should only have 1 template but found " + templates.size();
VM template = templates.iterator().next();
vm = template.createClone(conn, cmd.getVmName());
vm.removeFromOtherConfig(conn, "disks");
if (!(guestOsTypeName.startsWith("Windows") || guestOsTypeName.startsWith("Citrix") || guestOsTypeName.startsWith("Other"))) {
if (cmd.getBootFromISO())
vm.setPVBootloader(conn, "eliloader");
else
vm.setPVBootloader(conn, "pygrub");
vm.addToOtherConfig(conn, "install-repository", "cdrom");
}
return vm;
}
public boolean joinPool(String masterIp, String username, String password) {
Connection hostConn = null;
Connection poolConn = null;
Session hostSession = null;
URL hostUrl = null;
try {
// Connect and find out about the new connection to the new pool.
poolConn = _connPool.masterConnect(masterIp, username, password);
Set<Pool> pools = Pool.getAll(poolConn);
Pool pool = pools.iterator().next();
String poolUUID = pool.getUuid(poolConn);
//check if this host is already in pool
Set<Host> hosts = Host.getAll(poolConn);
for( Host host : hosts ) {
if(host.getAddress(poolConn).equals(_host.ip)) {
_host.pool = poolUUID;
return true;
}
}
hostUrl = new URL("http://" + _host.ip);
hostConn = new Connection(hostUrl, 100);
hostSession = Session.loginWithPassword(hostConn, _username, _password, APIVersion.latest().toString());
// Now join it.
Pool.join(hostConn, masterIp, username, password);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Joined the pool at " + masterIp);
}
try {
// slave will restart xapi in 10 sec
Thread.sleep(10000);
} catch (InterruptedException e) {
}
// check if the master of this host is set correctly.
Connection c = new Connection(hostUrl, 100);
int i;
for (i = 0 ; i < 15; i++) {
try {
Session.loginWithPassword(c, _username, _password, APIVersion.latest().toString());
s_logger.debug(_host.ip + " is still master, waiting for the conversion to the slave");
Session.logout(c);
c.dispose();
} catch (Types.HostIsSlave e) {
try {
Session.logout(c);
c.dispose();
} catch (XmlRpcException e1) {
s_logger.debug("Unable to logout of test connection due to " + e1.getMessage());
} catch (XenAPIException e1) {
s_logger.debug("Unable to logout of test connection due to " + e1.getMessage());
}
break;
} catch (XmlRpcException e) {
s_logger.debug("XmlRpcException: Still waiting for the conversion to the master");
} catch (Exception e) {
s_logger.debug("Exception: Still waiting for the conversion to the master");
}
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
}
}
if( i >= 15 ) {
throw new CloudRuntimeException(_host.ip + " didn't change to slave after waiting 30 secondary");
}
_host.pool = poolUUID;
return true;
} catch (MalformedURLException e) {
throw new CloudRuntimeException("Problem with url " + _host.ip);
} catch (XenAPIException e) {
String msg = "Unable to allow host " + _host.uuid
+ " to join pool " + masterIp + " due to " + e.toString();
s_logger.warn(msg, e);
throw new RuntimeException(msg);
} catch (XmlRpcException e) {
String msg = "Unable to allow host " + _host.uuid
+ " to join pool " + masterIp + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new RuntimeException(msg);
} finally {
if (poolConn != null) {
try {
Session.logout(poolConn);
} catch (Exception e) {
}
poolConn.dispose();
}
if(hostSession != null) {
try {
Session.logout(hostConn);
} catch (Exception e) {
}
}
}
}
protected void startvmfailhandle(VM vm, List<Ternary<SR, VDI, VolumeVO>> mounts) {
Connection conn = getConnection();
if (vm != null) {
try {
if (vm.getPowerState(conn) == VmPowerState.RUNNING) {
try {
vm.hardShutdown(conn);
} catch (Exception e) {
String msg = "VM hardshutdown failed due to " + e.toString();
s_logger.warn(msg);
}
}
if (vm.getPowerState(conn) == VmPowerState.HALTED) {
try {
vm.destroy(conn);
} catch (Exception e) {
String msg = "VM destroy failed due to " + e.toString();
s_logger.warn(msg);
}
}
} catch (Exception e) {
String msg = "VM getPowerState failed due to " + e.toString();
s_logger.warn(msg);
}
}
if (mounts != null) {
for (Ternary<SR, VDI, VolumeVO> mount : mounts) {
VDI vdi = mount.second();
Set<VBD> vbds = null;
try {
vbds = vdi.getVBDs(conn);
} catch (Exception e) {
String msg = "VDI getVBDS failed due to " + e.toString();
s_logger.warn(msg);
continue;
}
for (VBD vbd : vbds) {
try {
vbd.unplug(conn);
vbd.destroy(conn);
} catch (Exception e) {
String msg = "VBD destroy failed due to " + e.toString();
s_logger.warn(msg);
}
}
}
}
}
protected void setMemory(Connection conn, VM vm, long memsize) throws XmlRpcException, XenAPIException {
vm.setMemoryStaticMin(conn, memsize);
vm.setMemoryDynamicMin(conn, memsize);
vm.setMemoryDynamicMax(conn, memsize);
vm.setMemoryStaticMax(conn, memsize);
}
protected StartAnswer execute(StartCommand cmd) {
State state = State.Stopped;
Connection conn = getConnection();
VM vm = null;
SR isosr = null;
List<Ternary<SR, VDI, VolumeVO>> mounts = null;
for (int retry = 0; retry < 2; retry++) {
try {
synchronized (_vms) {
_vms.put(cmd.getVmName(), State.Starting);
}
List<VolumeVO> vols = cmd.getVolumes();
mounts = mount(vols);
if (retry == 1) {
// at the second time, try hvm
cmd.setGuestOSDescription("Other install media");
}
vm = createVmFromTemplate(conn, cmd);
long memsize = cmd.getRamSize() * 1024L * 1024L;
setMemory(conn, vm, memsize);
vm.setIsATemplate(conn, false);
vm.setVCPUsMax(conn, (long) cmd.getCpu());
vm.setVCPUsAtStartup(conn, (long) cmd.getCpu());
Host host = Host.getByUuid(conn, _host.uuid);
vm.setAffinity(conn, host);
Map<String, String> vcpuparam = new HashMap<String, String>();
vcpuparam.put("weight", Integer.toString(cmd.getCpuWeight()));
vcpuparam.put("cap", Integer.toString(cmd.getUtilization()));
vm.setVCPUsParams(conn, vcpuparam);
boolean bootFromISO = cmd.getBootFromISO();
/* create root VBD */
VBD.Record vbdr = new VBD.Record();
Ternary<SR, VDI, VolumeVO> mount = mounts.get(0);
vbdr.VM = vm;
vbdr.VDI = mount.second();
vbdr.bootable = !bootFromISO;
vbdr.userdevice = "0";
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
VBD.create(conn, vbdr);
/* create data VBDs */
for (int i = 1; i < mounts.size(); i++) {
mount = mounts.get(i);
// vdi.setNameLabel(conn, cmd.getVmName() + "-DATA");
vbdr.VM = vm;
vbdr.VDI = mount.second();
vbdr.bootable = false;
vbdr.userdevice = Long.toString(mount.third().getDeviceId());
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
vbdr.unpluggable = true;
VBD.create(conn, vbdr);
}
/* create CD-ROM VBD */
VBD.Record cdromVBDR = new VBD.Record();
cdromVBDR.VM = vm;
cdromVBDR.empty = true;
cdromVBDR.bootable = bootFromISO;
cdromVBDR.userdevice = "3";
cdromVBDR.mode = Types.VbdMode.RO;
cdromVBDR.type = Types.VbdType.CD;
VBD cdromVBD = VBD.create(conn, cdromVBDR);
/* insert the ISO VDI if isoPath is not null */
String isopath = cmd.getISOPath();
if (isopath != null) {
int index = isopath.lastIndexOf("/");
String mountpoint = isopath.substring(0, index);
URI uri = new URI(mountpoint);
isosr = createIsoSRbyURI(uri, cmd.getVmName(), false);
String isoname = isopath.substring(index + 1);
VDI isovdi = getVDIbyLocationandSR(isoname, isosr);
if (isovdi == null) {
String msg = " can not find ISO " + cmd.getISOPath();
s_logger.warn(msg);
return new StartAnswer(cmd, msg);
} else {
cdromVBD.insert(conn, isovdi);
}
}
createVIF(conn, vm, cmd.getGuestMacAddress(), cmd.getGuestNetworkId(), cmd.getNetworkRateMbps(), "0", false);
if (cmd.getExternalMacAddress() != null && cmd.getExternalVlan() != null) {
createVIF(conn, vm, cmd.getExternalMacAddress(), cmd.getExternalVlan(), 0, "1", true);
}
/* set action after crash as destroy */
vm.setActionsAfterCrash(conn, Types.OnCrashBehaviour.DESTROY);
vm.start(conn, false, true);
if (_canBridgeFirewall) {
String result = callHostPlugin("vmops", "default_network_rules",
"vmName", cmd.getVmName(),
"vmIP", cmd.getGuestIpAddress(),
"vmMAC", cmd.getGuestMacAddress(),
"vmID", Long.toString(cmd.getId()));
if (result == null || result.isEmpty() || !Boolean.parseBoolean(result)) {
s_logger.warn("Failed to program default network rules for vm " + cmd.getVmName());
} else {
s_logger.info("Programmed default network rules for vm " + cmd.getVmName());
}
}
state = State.Running;
return new StartAnswer(cmd);
} catch (XenAPIException e) {
String errormsg = e.toString();
String msg = "Exception caught while starting VM due to message:" + errormsg + " (" + e.getClass().getName() + ")";
if (!errormsg.contains("Unable to find partition containing kernel") && !errormsg.contains("Unable to access a required file in the specified repository")) {
s_logger.warn(msg, e);
startvmfailhandle(vm, mounts);
removeSR(isosr);
} else {
startvmfailhandle(vm, mounts);
removeSR(isosr);
continue;
}
state = State.Stopped;
return new StartAnswer(cmd, msg);
} catch (Exception e) {
String msg = "Exception caught while starting VM due to message:" + e.getMessage();
s_logger.warn(msg, e);
startvmfailhandle(vm, mounts);
removeSR(isosr);
state = State.Stopped;
return new StartAnswer(cmd, msg);
} finally {
synchronized (_vms) {
_vms.put(cmd.getVmName(), state);
}
}
}
String msg = "Start VM failed";
return new StartAnswer(cmd, msg);
}
protected VIF createVIF(Connection conn, VM vm, String mac, int rate, String devNum, Network network) throws XenAPIException, XmlRpcException,
InternalErrorException {
VIF.Record vifr = new VIF.Record();
vifr.VM = vm;
vifr.device = devNum;
vifr.MAC = mac;
vifr.network = network;
if ( rate == 0 ) rate = 200;
vifr.qosAlgorithmType = "ratelimit";
vifr.qosAlgorithmParams = new HashMap<String, String>();
// convert mbs to kilobyte per second
vifr.qosAlgorithmParams.put("kbps", Integer.toString(rate * 128));
return VIF.create(conn, vifr);
}
protected VIF createVIF(Connection conn, VM vm, String mac, String vlanTag, int rate, String devNum, boolean isPub) throws XenAPIException, XmlRpcException,
InternalErrorException {
String nwUuid = (isPub ? _host.publicNetwork : _host.guestNetwork);
String pifUuid = (isPub ? _host.publicPif : _host.guestPif);
Network vlanNetwork = null;
if ("untagged".equalsIgnoreCase(vlanTag)) {
vlanNetwork = Network.getByUuid(conn, nwUuid);
} else {
vlanNetwork = enableVlanNetwork(Long.valueOf(vlanTag), pifUuid);
}
if (vlanNetwork == null) {
throw new InternalErrorException("Failed to enable VLAN network with tag: " + vlanTag);
}
return createVIF(conn, vm, mac, rate, devNum, vlanNetwork);
}
protected StopAnswer execute(final StopCommand cmd) {
String vmName = cmd.getVmName();
try {
Connection conn = getConnection();
Set<VM> vms = VM.getByNameLabel(conn, vmName);
// stop vm which is running on this host or is in halted state
for (VM vm : vms) {
VM.Record vmr = vm.getRecord(conn);
if (vmr.powerState != VmPowerState.RUNNING)
continue;
if (isRefNull(vmr.residentOn))
continue;
if (vmr.residentOn.getUuid(conn).equals(_host.uuid))
continue;
vms.remove(vm);
}
if (vms.size() == 0) {
s_logger.warn("VM does not exist on XenServer" + _host.uuid);
synchronized (_vms) {
_vms.remove(vmName);
}
return new StopAnswer(cmd, "VM does not exist", 0, 0L, 0L);
}
Long bytesSent = 0L;
Long bytesRcvd = 0L;
for (VM vm : vms) {
VM.Record vmr = vm.getRecord(conn);
if (vmr.isControlDomain) {
String msg = "Tring to Shutdown control domain";
s_logger.warn(msg);
return new StopAnswer(cmd, msg);
}
if (vmr.powerState == VmPowerState.RUNNING && !isRefNull(vmr.residentOn) && !vmr.residentOn.getUuid(conn).equals(_host.uuid)) {
String msg = "Stop Vm " + vmName + " failed due to this vm is not running on this host: " + _host.uuid + " but host:" + vmr.residentOn.getUuid(conn);
s_logger.warn(msg);
return new StopAnswer(cmd, msg);
}
State state = null;
synchronized (_vms) {
state = _vms.get(vmName);
_vms.put(vmName, State.Stopping);
}
try {
if (vmr.powerState == VmPowerState.RUNNING) {
/* when stop a vm, set affinity to current xenserver */
vm.setAffinity(conn, vm.getResidentOn(conn));
try {
if (VirtualMachineName.isValidRouterName(vmName)) {
if(cmd.getPrivateRouterIpAddress() != null){
long[] stats = getNetworkStats(cmd.getPrivateRouterIpAddress());
bytesSent = stats[0];
bytesRcvd = stats[1];
}
}
if (_canBridgeFirewall) {
String result = callHostPlugin("vmops", "destroy_network_rules_for_vm", "vmName", cmd.getVmName());
if (result == null || result.isEmpty() || !Boolean.parseBoolean(result)) {
s_logger.warn("Failed to remove network rules for vm " + cmd.getVmName());
} else {
s_logger.info("Removed network rules for vm " + cmd.getVmName());
}
}
vm.cleanShutdown(conn);
} catch (XenAPIException e) {
s_logger.debug("Do Not support Clean Shutdown, fall back to hard Shutdown: " + e.toString());
try {
vm.hardShutdown(conn);
} catch (XenAPIException e1) {
String msg = "Hard Shutdown failed due to " + e1.toString();
s_logger.warn(msg, e1);
return new StopAnswer(cmd, msg);
} catch (XmlRpcException e1) {
String msg = "Hard Shutdown failed due to " + e1.getMessage();
s_logger.warn(msg, e1);
return new StopAnswer(cmd, msg);
}
} catch (XmlRpcException e) {
String msg = "Clean Shutdown failed due to " + e.getMessage();
s_logger.warn(msg, e);
return new StopAnswer(cmd, msg);
}
}
} catch (Exception e) {
String msg = "Catch exception " + e.getClass().toString() + " when stop VM:" + cmd.getVmName();
s_logger.debug(msg);
return new StopAnswer(cmd, msg);
} finally {
try {
if (vm.getPowerState(conn) == VmPowerState.HALTED) {
Set<VIF> vifs = vm.getVIFs(conn);
List<Network> networks = new ArrayList<Network>();
for (VIF vif : vifs) {
networks.add(vif.getNetwork(conn));
}
List<VDI> vdis = getVdis(vm);
vm.destroy(conn);
for( VDI vdi : vdis ){
umount(vdi);
}
state = State.Stopped;
SR sr = getISOSRbyVmName(cmd.getVmName());
removeSR(sr);
// Disable any VLAN networks that aren't used
// anymore
for (Network network : networks) {
if (network.getNameLabel(conn).startsWith("VLAN")) {
disableVlanNetwork(network);
}
}
}
} catch (XenAPIException e) {
String msg = "VM destroy failed in Stop " + vmName + " Command due to " + e.toString();
s_logger.warn(msg, e);
} catch (Exception e) {
String msg = "VM destroy failed in Stop " + vmName + " Command due to " + e.getMessage();
s_logger.warn(msg, e);
} finally {
synchronized (_vms) {
_vms.put(vmName, state);
}
}
}
}
return new StopAnswer(cmd, "Stop VM " + vmName + " Succeed", 0, bytesSent, bytesRcvd);
} catch (XenAPIException e) {
String msg = "Stop Vm " + vmName + " fail due to " + e.toString();
s_logger.warn(msg, e);
return new StopAnswer(cmd, msg);
} catch (XmlRpcException e) {
String msg = "Stop Vm " + vmName + " fail due to " + e.getMessage();
s_logger.warn(msg, e);
return new StopAnswer(cmd, msg);
}
}
private List<VDI> getVdis(VM vm) {
List<VDI> vdis = new ArrayList<VDI>();
try {
Connection conn = getConnection();
Set<VBD> vbds =vm.getVBDs(conn);
for( VBD vbd : vbds ) {
vdis.add(vbd.getVDI(conn));
}
} catch (XenAPIException e) {
String msg = "getVdis can not get VPD due to " + e.toString();
s_logger.warn(msg, e);
} catch (XmlRpcException e) {
String msg = "getVdis can not get VPD due to " + e.getMessage();
s_logger.warn(msg, e);
}
return vdis;
}
protected String connect(final String vmName, final String ipAddress, final int port) {
for (int i = 0; i <= _retry; i++) {
try {
Connection conn = getConnection();
Set<VM> vms = VM.getByNameLabel(conn, vmName);
if (vms.size() < 1) {
String msg = "VM " + vmName + " is not running";
s_logger.warn(msg);
return msg;
}
} catch (Exception e) {
String msg = "VM.getByNameLabel " + vmName + " failed due to " + e.toString();
s_logger.warn(msg, e);
return msg;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Trying to connect to " + ipAddress);
}
if (pingdomr(ipAddress, Integer.toString(port)))
return null;
try {
Thread.sleep(_sleep);
} catch (final InterruptedException e) {
}
}
String msg = "Timeout, Unable to logon to " + ipAddress;
s_logger.debug(msg);
return msg;
}
protected String connect(final String vmname, final String ipAddress) {
return connect(vmname, ipAddress, 3922);
}
protected StartRouterAnswer execute(StartRouterCommand cmd) {
final String vmName = cmd.getVmName();
final DomainRouter router = cmd.getRouter();
try {
String tag = router.getVnet();
Network network = null;
if ("untagged".equalsIgnoreCase(tag)) {
Connection conn = getConnection();
network = Network.getByUuid(conn, _host.guestNetwork);
} else {
network = enableVlanNetwork(Long.parseLong(tag), _host.guestPif);
}
if (network == null) {
throw new InternalErrorException("Failed to enable VLAN network with tag: " + tag);
}
String bootArgs = cmd.getBootArgs();
String result = startSystemVM(vmName, router.getVlanId(), network, cmd.getVolumes(), bootArgs, router.getGuestMacAddress(), router.getPrivateIpAddress(), router
.getPrivateMacAddress(), router.getPublicMacAddress(), 3922, router.getRamSize(), cmd.getGuestOSDescription(), cmd.getNetworkRateMbps());
if (result == null) {
networkUsage(router.getPrivateIpAddress(), "create", null);
return new StartRouterAnswer(cmd);
}
return new StartRouterAnswer(cmd, result);
} catch (Exception e) {
String msg = "Exception caught while starting router vm " + vmName + " due to " + e.getMessage();
s_logger.warn(msg, e);
return new StartRouterAnswer(cmd, msg);
}
}
protected String startSystemVM(String vmName, String vlanId, Network nw0, List<VolumeVO> vols, String bootArgs, String guestMacAddr, String privateIp, String privateMacAddr,
String publicMacAddr, int cmdPort, long ramSize, String getGuestOSDescription, int networkRateMbps) {
setupLinkLocalNetwork();
VM vm = null;
List<Ternary<SR, VDI, VolumeVO>> mounts = null;
Connection conn = getConnection();
State state = State.Stopped;
try {
synchronized (_vms) {
_vms.put(vmName, State.Starting);
}
mounts = mount(vols);
assert mounts.size() == 1 : "System VMs should have only 1 partition but we actually have " + mounts.size();
Ternary<SR, VDI, VolumeVO> mount = mounts.get(0);
Set<VM> templates = VM.getByNameLabel(conn, getGuestOsType(getGuestOSDescription));
if (templates.size() == 0) {
String msg = " can not find systemvm template " + getGuestOsType(getGuestOSDescription) ;
s_logger.warn(msg);
return msg;
}
VM template = templates.iterator().next();
vm = template.createClone(conn, vmName);
vm.removeFromOtherConfig(conn, "disks");
vm.setPVBootloader(conn, "pygrub");
long memsize = ramSize * 1024L * 1024L;
setMemory(conn, vm, memsize);
vm.setIsATemplate(conn, false);
vm.setVCPUsAtStartup(conn, 1L);
Host host = Host.getByUuid(conn, _host.uuid);
vm.setAffinity(conn, host);
/* create VBD */
VBD.Record vbdr = new VBD.Record();
vbdr.VM = vm;
vbdr.VDI = mount.second();
vbdr.bootable = true;
vbdr.userdevice = "0";
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
VBD.create(conn, vbdr);
/* create CD-ROM VBD */
VBD.Record cdromVBDR = new VBD.Record();
cdromVBDR.VM = vm;
cdromVBDR.empty = true;
cdromVBDR.bootable = false;
cdromVBDR.userdevice = "3";
cdromVBDR.mode = Types.VbdMode.RO;
cdromVBDR.type = Types.VbdType.CD;
VBD cdromVBD = VBD.create(conn, cdromVBDR);
cdromVBD.insert(conn, VDI.getByUuid(conn, _host.systemvmisouuid));
/* create VIF0 */
Network network = null;
if (VirtualMachineName.isValidConsoleProxyName(vmName) || VirtualMachineName.isValidSecStorageVmName(vmName, null)) {
network = Network.getByUuid(conn, _host.linkLocalNetwork);
} else {
network = nw0;
}
createVIF(conn, vm, guestMacAddr, networkRateMbps, "0", network);
/* create VIF1 */
/* For routing vm, set its network as link local bridge */
if (VirtualMachineName.isValidRouterName(vmName) && privateIp.startsWith("169.254")) {
network = Network.getByUuid(conn, _host.linkLocalNetwork);
} else {
network = Network.getByUuid(conn, _host.privateNetwork);
}
createVIF(conn, vm, privateMacAddr, networkRateMbps, "1", network);
/* create VIF2 */
if( !publicMacAddr.equalsIgnoreCase("FE:FF:FF:FF:FF:FF") ) {
network = null;
if ("untagged".equalsIgnoreCase(vlanId)) {
network = Network.getByUuid(conn, _host.publicNetwork);
} else {
network = enableVlanNetwork(Long.valueOf(vlanId), _host.publicPif);
if (network == null) {
throw new InternalErrorException("Failed to enable VLAN network with tag: " + vlanId);
}
}
createVIF(conn, vm, publicMacAddr, networkRateMbps, "2", network);
}
/* set up PV dom argument */
String pvargs = vm.getPVArgs(conn);
pvargs = pvargs + bootArgs;
if (s_logger.isInfoEnabled())
s_logger.info("PV args for system vm are " + pvargs);
vm.setPVArgs(conn, pvargs);
/* destroy console */
Set<Console> consoles = vm.getRecord(conn).consoles;
for (Console console : consoles) {
console.destroy(conn);
}
/* set action after crash as destroy */
vm.setActionsAfterCrash(conn, Types.OnCrashBehaviour.DESTROY);
vm.start(conn, false, true);
if (_canBridgeFirewall) {
String result = callHostPlugin("vmops", "default_network_rules_systemvm", "vmName", vmName);
if (result == null || result.isEmpty() || !Boolean.parseBoolean(result)) {
s_logger.warn("Failed to program default system vm network rules for " + vmName);
} else {
s_logger.info("Programmed default system vm network rules for " + vmName);
}
}
if (s_logger.isInfoEnabled())
s_logger.info("Ping system vm command port, " + privateIp + ":" + cmdPort);
state = State.Running;
String result = connect(vmName, privateIp, cmdPort);
if (result != null) {
String msg = "Can not ping System vm " + vmName + "due to:" + result;
s_logger.warn(msg);
throw new CloudRuntimeException(msg);
} else {
if (s_logger.isInfoEnabled())
s_logger.info("Ping system vm command port succeeded for vm " + vmName);
}
return null;
} catch (XenAPIException e) {
String msg = "Exception caught while starting System vm " + vmName + " due to " + e.toString();
s_logger.warn(msg, e);
startvmfailhandle(vm, mounts);
state = State.Stopped;
return msg;
} catch (Exception e) {
String msg = "Exception caught while starting System vm " + vmName + " due to " + e.getMessage();
s_logger.warn(msg, e);
startvmfailhandle(vm, mounts);
state = State.Stopped;
return msg;
} finally {
synchronized (_vms) {
_vms.put(vmName, state);
}
}
}
// TODO : need to refactor it to reuse code with StartRouter
protected Answer execute(final StartConsoleProxyCommand cmd) {
final String vmName = cmd.getVmName();
final ConsoleProxyVO proxy = cmd.getProxy();
try {
Connection conn = getConnection();
Network network = Network.getByUuid(conn, _host.privateNetwork);
String bootArgs = cmd.getBootArgs();
bootArgs += " zone=" + _dcId;
bootArgs += " pod=" + _pod;
bootArgs += " guid=Proxy." + proxy.getId();
bootArgs += " proxy_vm=" + proxy.getId();
bootArgs += " localgw=" + _localGateway;
String result = startSystemVM(vmName, proxy.getVlanId(), network, cmd.getVolumes(), bootArgs, proxy.getGuestMacAddress(), proxy.getGuestIpAddress(), proxy
.getPrivateMacAddress(), proxy.getPublicMacAddress(), cmd.getProxyCmdPort(), proxy.getRamSize(), cmd.getGuestOSDescription(), cmd.getNetworkRateMbps());
if (result == null) {
return new StartConsoleProxyAnswer(cmd);
}
return new StartConsoleProxyAnswer(cmd, result);
} catch (Exception e) {
String msg = "Exception caught while starting router vm " + vmName + " due to " + e.getMessage();
s_logger.warn(msg, e);
return new StartConsoleProxyAnswer(cmd, msg);
}
}
protected boolean isDeviceUsed(VM vm, Long deviceId) {
// Figure out the disk number to attach the VM to
String msg = null;
try {
Connection conn = getConnection();
Set<String> allowedVBDDevices = vm.getAllowedVBDDevices(conn);
if (allowedVBDDevices.contains(deviceId.toString())) {
return false;
}
return true;
} catch (XmlRpcException e) {
msg = "Catch XmlRpcException due to: " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
msg = "Catch XenAPIException due to: " + e.toString();
s_logger.warn(msg, e);
}
throw new CloudRuntimeException("When check deviceId " + msg);
}
protected String getUnusedDeviceNum(VM vm) {
// Figure out the disk number to attach the VM to
try {
Connection conn = getConnection();
Set<String> allowedVBDDevices = vm.getAllowedVBDDevices(conn);
if (allowedVBDDevices.size() == 0)
throw new CloudRuntimeException("Could not find an available slot in VM with name: " + vm.getNameLabel(conn) + " to attach a new disk.");
return allowedVBDDevices.iterator().next();
} catch (XmlRpcException e) {
String msg = "Catch XmlRpcException due to: " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
String msg = "Catch XenAPIException due to: " + e.toString();
s_logger.warn(msg, e);
}
throw new CloudRuntimeException("Could not find an available slot in VM with name to attach a new disk.");
}
protected String callHostPlugin(String plugin, String cmd, String... params) {
//default time out is 300 s
return callHostPluginWithTimeOut(plugin, cmd, 300, params);
}
protected String callHostPluginWithTimeOut(String plugin, String cmd, int timeout, String... params) {
Map<String, String> args = new HashMap<String, String>();
try {
Connection conn = getConnection();
for (int i = 0; i < params.length; i += 2) {
args.put(params[i], params[i + 1]);
}
if (s_logger.isTraceEnabled()) {
s_logger.trace("callHostPlugin executing for command " + cmd + " with " + getArgsString(args));
}
Host host = Host.getByUuid(conn, _host.uuid);
String result = host.callPlugin(conn, plugin, cmd, args);
if (s_logger.isTraceEnabled()) {
s_logger.trace("callHostPlugin Result: " + result);
}
return result.replace("\n", "");
} catch (XenAPIException e) {
s_logger.warn("callHostPlugin failed for cmd: " + cmd + " with args " + getArgsString(args) + " due to " + e.toString());
} catch (XmlRpcException e) {
s_logger.debug("callHostPlugin failed for cmd: " + cmd + " with args " + getArgsString(args) + " due to " + e.getMessage());
}
return null;
}
protected String getArgsString(Map<String, String> args) {
StringBuilder argString = new StringBuilder();
for (Map.Entry<String, String> arg : args.entrySet()) {
argString.append(arg.getKey() + ": " + arg.getValue() + ", ");
}
return argString.toString();
}
protected boolean setIptables() {
String result = callHostPlugin("vmops", "setIptables");
if (result == null || result.isEmpty())
return false;
return true;
}
protected Nic getLocalNetwork(Connection conn, String name) throws XmlRpcException, XenAPIException {
if( name == null) {
return null;
}
Set<Network> networks = Network.getByNameLabel(conn, name);
for (Network network : networks) {
Network.Record nr = network.getRecord(conn);
for (PIF pif : nr.PIFs) {
PIF.Record pr = pif.getRecord(conn);
if (_host.uuid.equals(pr.host.getUuid(conn))) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Found a network called " + name + " on host=" + _host.ip + "; Network=" + nr.uuid + "; pif=" + pr.uuid);
}
if (pr.bondMasterOf != null && pr.bondMasterOf.size() > 0) {
if (pr.bondMasterOf.size() > 1) {
String msg = new StringBuilder("Unsupported configuration. Network " + name + " has more than one bond. Network=").append(nr.uuid)
.append("; pif=").append(pr.uuid).toString();
s_logger.warn(msg);
return null;
}
Bond bond = pr.bondMasterOf.iterator().next();
Set<PIF> slaves = bond.getSlaves(conn);
for (PIF slave : slaves) {
PIF.Record spr = slave.getRecord(conn);
if (spr.management) {
Host host = Host.getByUuid(conn, _host.uuid);
if (!transferManagementNetwork(conn, host, slave, spr, pif)) {
String msg = new StringBuilder("Unable to transfer management network. slave=" + spr.uuid + "; master=" + pr.uuid + "; host="
+ _host.uuid).toString();
s_logger.warn(msg);
return null;
}
break;
}
}
}
return new Nic(network, nr, pif, pr);
}
}
}
return null;
}
protected VIF getCorrectVif(VM router, String vlanId) {
try {
Connection conn = getConnection();
Set<VIF> routerVIFs = router.getVIFs(conn);
for (VIF vif : routerVIFs) {
Network vifNetwork = vif.getNetwork(conn);
if (vlanId.equals("untagged")) {
if (vifNetwork.getUuid(conn).equals(_host.publicNetwork)) {
return vif;
}
} else {
if (vifNetwork.getNameLabel(conn).equals("VLAN" + vlanId)) {
return vif;
}
}
}
} catch (XmlRpcException e) {
String msg = "Caught XmlRpcException: " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
String msg = "Caught XenAPIException: " + e.toString();
s_logger.warn(msg, e);
}
return null;
}
protected String getLowestAvailableVIFDeviceNum(VM vm) {
try {
Connection conn = getConnection();
Set<String> availableDeviceNums = vm.getAllowedVIFDevices(conn);
Iterator<String> deviceNumsIterator = availableDeviceNums.iterator();
List<Integer> sortedDeviceNums = new ArrayList<Integer>();
while (deviceNumsIterator.hasNext()) {
try {
sortedDeviceNums.add(Integer.valueOf(deviceNumsIterator.next()));
} catch (NumberFormatException e) {
s_logger.debug("Obtained an invalid value for an available VIF device number for VM: " + vm.getNameLabel(conn));
return null;
}
}
Collections.sort(sortedDeviceNums);
return String.valueOf(sortedDeviceNums.get(0));
} catch (XmlRpcException e) {
String msg = "Caught XmlRpcException: " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
String msg = "Caught XenAPIException: " + e.toString();
s_logger.warn(msg, e);
}
return null;
}
protected VDI mount(StoragePoolType pooltype, String volumeFolder, String volumePath) {
return getVDIbyUuid(volumePath);
}
protected List<Ternary<SR, VDI, VolumeVO>> mount(List<VolumeVO> vos) {
ArrayList<Ternary<SR, VDI, VolumeVO>> mounts = new ArrayList<Ternary<SR, VDI, VolumeVO>>(vos.size());
for (VolumeVO vol : vos) {
String vdiuuid = vol.getPath();
SR sr = null;
VDI vdi = null;
// Look up the VDI
vdi = getVDIbyUuid(vdiuuid);
Ternary<SR, VDI, VolumeVO> ter = new Ternary<SR, VDI, VolumeVO>(sr, vdi, vol);
if( vol.getVolumeType() == VolumeType.ROOT ) {
mounts.add(0, ter);
} else {
mounts.add(ter);
}
}
return mounts;
}
protected Network getNetworkByName(String name) throws BadServerResponse, XenAPIException, XmlRpcException {
Connection conn = getConnection();
Set<Network> networks = Network.getByNameLabel(conn, name);
if (networks.size() > 0) {
assert networks.size() == 1 : "How did we find more than one network with this name label" + name + "? Strange....";
return networks.iterator().next(); // Found it.
}
return null;
}
protected synchronized Network getNetworkByName(Connection conn, String name, boolean lookForPif) throws XenAPIException, XmlRpcException {
Network found = null;
Set<Network> networks = Network.getByNameLabel(conn, name);
if (networks.size() == 1) {
found = networks.iterator().next();
} else if (networks.size() > 1) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Found more than one network with the name " + name);
}
for (Network network : networks) {
if (!lookForPif) {
found = network;
break;
}
Network.Record netr = network.getRecord(conn);
s_logger.debug("Checking network " + netr.uuid);
if (netr.PIFs.size() == 0) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Network " + netr.uuid + " has no pifs so skipping that.");
}
} else {
for (PIF pif : netr.PIFs) {
PIF.Record pifr = pif.getRecord(conn);
if (_host.uuid.equals(pifr.host.getUuid(conn))) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Network " + netr.uuid + " has a pif " + pifr.uuid + " for our host ");
}
found = network;
break;
}
}
}
}
}
return found;
}
protected Network enableVlanNetwork(long tag, String pifUuid) throws XenAPIException, XmlRpcException {
// In XenServer, vlan is added by
// 1. creating a network.
// 2. creating a vlan associating network with the pif.
// We always create
// 1. a network with VLAN[vlan id in decimal]
// 2. a vlan associating the network created with the pif to private
// network.
Connection conn = getConnection();
Network vlanNetwork = null;
String name = "VLAN" + Long.toString(tag);
synchronized (name.intern()) {
vlanNetwork = getNetworkByName(name);
if (vlanNetwork == null) { // Can't find it, then create it.
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VLAN network for " + tag + " on host " + _host.ip);
}
Network.Record nwr = new Network.Record();
nwr.nameLabel = name;
nwr.bridge = name;
vlanNetwork = Network.create(conn, nwr);
}
PIF nPif = PIF.getByUuid(conn, pifUuid);
PIF.Record nPifr = nPif.getRecord(conn);
Network.Record vlanNetworkr = vlanNetwork.getRecord(conn);
if (vlanNetworkr.PIFs != null) {
for (PIF pif : vlanNetworkr.PIFs) {
PIF.Record pifr = pif.getRecord(conn);
if(pifr.host.equals(nPifr.host)) {
if (pifr.device.equals(nPifr.device) ) {
pif.plug(conn);
return vlanNetwork;
} else {
throw new CloudRuntimeException("Creating VLAN " + tag + " on " + nPifr.device + " failed due to this VLAN is already created on " + pifr.device);
}
}
}
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VLAN " + tag + " on host " + _host.ip + " on device " + nPifr.device);
}
VLAN vlan = VLAN.create(conn, nPif, tag, vlanNetwork);
PIF untaggedPif = vlan.getUntaggedPIF(conn);
if (!untaggedPif.getCurrentlyAttached(conn)) {
untaggedPif.plug(conn);
}
}
return vlanNetwork;
}
protected Network enableVlanNetwork(Connection conn, long tag, Network network, String pifUuid) throws XenAPIException, XmlRpcException {
// In XenServer, vlan is added by
// 1. creating a network.
// 2. creating a vlan associating network with the pif.
// We always create
// 1. a network with VLAN[vlan id in decimal]
// 2. a vlan associating the network created with the pif to private
// network.
Network vlanNetwork = null;
String name = "VLAN" + Long.toString(tag);
vlanNetwork = getNetworkByName(conn, name, true);
if (vlanNetwork == null) { // Can't find it, then create it.
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VLAN network for " + tag + " on host " + _host.ip);
}
Network.Record nwr = new Network.Record();
nwr.nameLabel = name;
nwr.bridge = name;
vlanNetwork = Network.create(conn, nwr);
}
PIF nPif = PIF.getByUuid(conn, pifUuid);
PIF.Record nPifr = nPif.getRecord(conn);
Network.Record vlanNetworkr = vlanNetwork.getRecord(conn);
if (vlanNetworkr.PIFs != null) {
for (PIF pif : vlanNetworkr.PIFs) {
PIF.Record pifr = pif.getRecord(conn);
if (pifr.device.equals(nPifr.device) && pifr.host.equals(nPifr.host)) {
return vlanNetwork;
}
}
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating VLAN " + tag + " on host " + _host.ip + " on device " + nPifr.device);
}
VLAN vlan = VLAN.create(conn, nPif, tag, vlanNetwork);
VLAN.Record vlanr = vlan.getRecord(conn);
if (s_logger.isDebugEnabled()) {
s_logger.debug("VLAN is created for " + tag + ". The uuid is " + vlanr.uuid);
}
return vlanNetwork;
}
protected void disableVlanNetwork(Network network) throws InternalErrorException {
try {
Connection conn = getConnection();
if (network.getVIFs(conn).isEmpty()) {
Iterator<PIF> pifs = network.getPIFs(conn).iterator();
while (pifs.hasNext()) {
PIF pif = pifs.next();
pif.unplug(conn);
}
}
} catch (XenAPIException e) {
String msg = "Unable to disable VLAN network due to " + e.toString();
s_logger.warn(msg, e);
} catch (Exception e) {
String msg = "Unable to disable VLAN network due to " + e.getMessage();
s_logger.warn(msg, e);
}
}
protected SR getLocalLVMSR() {
Connection conn = getConnection();
try {
Map<SR, SR.Record> map = SR.getAllRecords(conn);
for (Map.Entry<SR, SR.Record> entry : map.entrySet()) {
SR.Record srRec = entry.getValue();
if (SRType.LVM.equals(srRec.type)) {
Set<PBD> pbds = srRec.PBDs;
if (pbds == null) {
continue;
}
for (PBD pbd : pbds) {
Host host = pbd.getHost(conn);
if (!isRefNull(host) && host.getUuid(conn).equals(_host.uuid)) {
if (!pbd.getCurrentlyAttached(conn)) {
pbd.plug(conn);
}
SR sr = entry.getKey();
sr.scan(conn);
return sr;
}
}
}
}
} catch (XenAPIException e) {
String msg = "Unable to get local LVMSR in host:" + _host.uuid + e.toString();
s_logger.warn(msg);
} catch (XmlRpcException e) {
String msg = "Unable to get local LVMSR in host:" + _host.uuid + e.getCause();
s_logger.warn(msg);
}
return null;
}
protected StartupStorageCommand initializeLocalSR() {
SR lvmsr = getLocalLVMSR();
if (lvmsr == null) {
return null;
}
try {
Connection conn = getConnection();
String lvmuuid = lvmsr.getUuid(conn);
long cap = lvmsr.getPhysicalSize(conn);
if (cap < 0)
return null;
long avail = cap - lvmsr.getPhysicalUtilisation(conn);
lvmsr.setNameLabel(conn, lvmuuid);
String name = "VMOps local storage pool in host : " + _host.uuid;
lvmsr.setNameDescription(conn, name);
Host host = Host.getByUuid(conn, _host.uuid);
String address = host.getAddress(conn);
StoragePoolInfo pInfo = new StoragePoolInfo(name, lvmuuid, address, SRType.LVM.toString(), SRType.LVM.toString(), StoragePoolType.LVM, cap, avail);
StartupStorageCommand cmd = new StartupStorageCommand();
cmd.setPoolInfo(pInfo);
cmd.setGuid(_host.uuid);
cmd.setResourceType(Storage.StorageResourceType.STORAGE_POOL);
return cmd;
} catch (XenAPIException e) {
String msg = "build startupstoragecommand err in host:" + _host.uuid + e.toString();
s_logger.warn(msg);
} catch (XmlRpcException e) {
String msg = "build startupstoragecommand err in host:" + _host.uuid + e.getMessage();
s_logger.warn(msg);
}
return null;
}
@Override
public PingCommand getCurrentStatus(long id) {
try {
if (!pingxenserver()) {
Thread.sleep(1000);
if (!pingxenserver()) {
s_logger.warn(" can not ping xenserver " + _host.uuid);
return null;
}
}
HashMap<String, State> newStates = sync();
if (newStates == null) {
newStates = new HashMap<String, State>();
}
if (!_canBridgeFirewall) {
return new PingRoutingCommand(getType(), id, newStates);
} else {
HashMap<String, Pair<Long, Long>> nwGrpStates = syncNetworkGroups(id);
return new PingRoutingWithNwGroupsCommand(getType(), id, newStates, nwGrpStates);
}
} catch (Exception e) {
s_logger.warn("Unable to get current status", e);
return null;
}
}
private HashMap<String, Pair<Long,Long>> syncNetworkGroups(long id) {
HashMap<String, Pair<Long,Long>> states = new HashMap<String, Pair<Long,Long>>();
String result = callHostPlugin("vmops", "get_rule_logs_for_vms", "host_uuid", _host.uuid);
s_logger.trace("syncNetworkGroups: id=" + id + " got: " + result);
String [] rulelogs = result != null ?result.split(";"): new String [0];
for (String rulesforvm: rulelogs){
String [] log = rulesforvm.split(",");
if (log.length != 6) {
continue;
}
//output = ','.join([vmName, vmID, vmIP, domID, signature, seqno])
try {
states.put(log[0], new Pair<Long,Long>(Long.parseLong(log[1]), Long.parseLong(log[5])));
} catch (NumberFormatException nfe) {
states.put(log[0], new Pair<Long,Long>(-1L, -1L));
}
}
return states;
}
@Override
public Type getType() {
return com.cloud.host.Host.Type.Routing;
}
protected void getPVISO(StartupStorageCommand sscmd) {
Connection conn = getConnection();
try {
Set<VDI> vids = VDI.getByNameLabel(conn, "xs-tools.iso");
if (vids.isEmpty())
return;
VDI pvISO = vids.iterator().next();
String uuid = pvISO.getUuid(conn);
Map<String, TemplateInfo> pvISOtmlt = new HashMap<String, TemplateInfo>();
TemplateInfo tmplt = new TemplateInfo("xs-tools.iso", uuid, pvISO.getVirtualSize(conn), true);
pvISOtmlt.put("xs-tools", tmplt);
sscmd.setTemplateInfo(pvISOtmlt);
} catch (XenAPIException e) {
s_logger.debug("Can't get xs-tools.iso: " + e.toString());
} catch (XmlRpcException e) {
s_logger.debug("Can't get xs-tools.iso: " + e.toString());
}
}
protected boolean can_bridge_firewall() {
return false;
}
protected boolean getHostInfo() throws IllegalArgumentException{
Connection conn = getConnection();
try {
Host myself = Host.getByUuid(conn, _host.uuid);
Set<HostCpu> hcs = myself.getHostCPUs(conn);
_host.cpus = hcs.size();
for (final HostCpu hc : hcs) {
_host.speed = hc.getSpeed(conn).intValue();
break;
}
Set<SR> srs = SR.getByNameLabel(conn, "XenServer Tools");
if( srs.size() != 1 ) {
throw new CloudRuntimeException("There are " + srs.size() + " SRs with name XenServer Tools");
}
SR sr = srs.iterator().next();
sr.scan(conn);
SR.Record srr = sr.getRecord(conn);
_host.systemvmisouuid = null;
for( VDI vdi : srr.VDIs ) {
VDI.Record vdir = vdi.getRecord(conn);
if(vdir.nameLabel.contains("systemvm-premium")){
_host.systemvmisouuid = vdir.uuid;
break;
}
}
if( _host.systemvmisouuid == null ) {
for( VDI vdi : srr.VDIs ) {
VDI.Record vdir = vdi.getRecord(conn);
if(vdir.nameLabel.contains("systemvm")){
_host.systemvmisouuid = vdir.uuid;
break;
}
}
}
if( _host.systemvmisouuid == null ) {
throw new CloudRuntimeException("can not find systemvmiso");
}
String name = "cloud-private";
if (_privateNetworkName != null) {
name = _privateNetworkName;
}
_localGateway = callHostPlugin("vmops", "getgateway", "mgmtIP", myself.getAddress(conn));
if (_localGateway == null || _localGateway.isEmpty()) {
s_logger.warn("can not get gateway for host :" + _host.uuid);
return false;
}
_canBridgeFirewall = can_bridge_firewall();
Nic privateNic = getLocalNetwork(conn, name);
if (privateNic == null) {
s_logger.debug("Unable to find any private network. Trying to determine that by route for host " + _host.ip);
name = callHostPlugin("vmops", "getnetwork", "mgmtIP", myself.getAddress(conn));
if (name == null || name.isEmpty()) {
s_logger.warn("Unable to determine the private network for host " + _host.ip);
return false;
}
_privateNetworkName = name;
privateNic = getLocalNetwork(conn, name);
if (privateNic == null) {
s_logger.warn("Unable to get private network " + name);
return false;
}
} else {
_privateNetworkName = name;
}
_host.privatePif = privateNic.pr.uuid;
_host.privateNetwork = privateNic.nr.uuid;
Nic guestNic = null;
if (_guestNetworkName != null && !_guestNetworkName.equals(_privateNetworkName)) {
guestNic = getLocalNetwork(conn, _guestNetworkName);
if (guestNic == null) {
s_logger.warn("Unable to find guest network " + _guestNetworkName);
throw new IllegalArgumentException("Unable to find guest network " + _guestNetworkName + " for host " + _host.ip);
}
} else {
guestNic = privateNic;
_guestNetworkName = _privateNetworkName;
}
_host.guestNetwork = guestNic.nr.uuid;
_host.guestPif = guestNic.pr.uuid;
Nic publicNic = null;
if (_publicNetworkName != null && !_publicNetworkName.equals(_guestNetworkName)) {
publicNic = getLocalNetwork(conn, _publicNetworkName);
if (publicNic == null) {
s_logger.warn("Unable to find public network " + _publicNetworkName + " for host " + _host.ip);
throw new IllegalArgumentException("Unable to find public network " + _publicNetworkName + " for host " + _host.ip);
}
} else {
publicNic = guestNic;
_publicNetworkName = _guestNetworkName;
}
_host.publicPif = publicNic.pr.uuid;
_host.publicNetwork = publicNic.nr.uuid;
Nic storageNic1 = getLocalNetwork(conn, _storageNetworkName1);
if (storageNic1 == null) {
storageNic1 = privateNic;
_storageNetworkName1 = _privateNetworkName;
}
_host.storageNetwork1 = storageNic1.nr.uuid;
_host.storagePif1 = storageNic1.pr.uuid;
Nic storageNic2 = getLocalNetwork(conn, _storageNetworkName2);
if (storageNic2 == null) {
storageNic2 = privateNic;
_storageNetworkName2 = _privateNetworkName;
}
_host.storageNetwork2 = storageNic2.nr.uuid;
_host.storagePif2 = storageNic2.pr.uuid;
s_logger.info("Private Network is " + _privateNetworkName + " for host " + _host.ip);
s_logger.info("Guest Network is " + _guestNetworkName + " for host " + _host.ip);
s_logger.info("Public Network is " + _publicNetworkName + " for host " + _host.ip);
s_logger.info("Storage Network 1 is " + _storageNetworkName1 + " for host " + _host.ip);
s_logger.info("Storage Network 2 is " + _storageNetworkName2 + " for host " + _host.ip);
return true;
} catch (XenAPIException e) {
s_logger.warn("Unable to get host information for " + _host.ip, e);
return false;
} catch (XmlRpcException e) {
s_logger.warn("Unable to get host information for " + _host.ip, e);
return false;
}
}
private void setupLinkLocalNetwork() {
try {
Network.Record rec = new Network.Record();
Connection conn = getConnection();
Set<Network> networks = Network.getByNameLabel(conn, _linkLocalPrivateNetworkName);
Network linkLocal = null;
if (networks.size() == 0) {
rec.nameDescription = "link local network used by system vms";
rec.nameLabel = _linkLocalPrivateNetworkName;
Map<String, String> configs = new HashMap<String, String>();
configs.put("ip_begin", NetUtils.getLinkLocalGateway());
configs.put("ip_end", NetUtils.getLinkLocalIpEnd());
configs.put("netmask", NetUtils.getLinkLocalNetMask());
rec.otherConfig = configs;
linkLocal = Network.create(conn, rec);
} else {
linkLocal = networks.iterator().next();
}
/* Make sure there is a physical bridge on this network */
VIF dom0vif = null;
Pair<VM, VM.Record> vm = getControlDomain(conn);
VM dom0 = vm.first();
Set<VIF> vifs = dom0.getVIFs(conn);
if (vifs.size() != 0) {
for (VIF vif : vifs) {
Map<String, String> otherConfig = vif.getOtherConfig(conn);
if (otherConfig != null) {
String nameLabel = otherConfig.get("nameLabel");
if ((nameLabel != null) && nameLabel.equalsIgnoreCase("link_local_network_vif")) {
dom0vif = vif;
}
}
}
}
/* create temp VIF0 */
if (dom0vif == null) {
s_logger.debug("Can't find a vif on dom0 for link local, creating a new one");
VIF.Record vifr = new VIF.Record();
vifr.VM = dom0;
vifr.device = getLowestAvailableVIFDeviceNum(dom0);
if (vifr.device == null) {
s_logger.debug("Failed to create link local network, no vif available");
return;
}
Map<String, String> config = new HashMap<String, String>();
config.put("nameLabel", "link_local_network_vif");
vifr.otherConfig = config;
vifr.MAC = "FE:FF:FF:FF:FF:FF";
vifr.network = linkLocal;
dom0vif = VIF.create(conn, vifr);
dom0vif.plug(conn);
} else {
s_logger.debug("already have a vif on dom0 for link local network");
if (!dom0vif.getCurrentlyAttached(conn)) {
dom0vif.plug(conn);
}
}
String brName = linkLocal.getBridge(conn);
callHostPlugin("vmops", "setLinkLocalIP", "brName", brName);
_host.linkLocalNetwork = linkLocal.getUuid(conn);
} catch (XenAPIException e) {
s_logger.warn("Unable to create local link network", e);
} catch (XmlRpcException e) {
// TODO Auto-generated catch block
s_logger.warn("Unable to create local link network", e);
}
}
protected boolean transferManagementNetwork(Connection conn, Host host, PIF src, PIF.Record spr, PIF dest) throws XmlRpcException, XenAPIException {
dest.reconfigureIp(conn, spr.ipConfigurationMode, spr.IP, spr.netmask, spr.gateway, spr.DNS);
Host.managementReconfigure(conn, dest);
String hostUuid = null;
int count = 0;
while (count < 10) {
try {
Thread.sleep(10000);
hostUuid = host.getUuid(conn);
if (hostUuid != null) {
break;
}
} catch (XmlRpcException e) {
s_logger.debug("Waiting for host to come back: " + e.getMessage());
} catch (XenAPIException e) {
s_logger.debug("Waiting for host to come back: " + e.getMessage());
} catch (InterruptedException e) {
s_logger.debug("Gotta run");
return false;
}
}
if (hostUuid == null) {
s_logger.warn("Unable to transfer the management network from " + spr.uuid);
return false;
}
src.reconfigureIp(conn, IpConfigurationMode.NONE, null, null, null, null);
return true;
}
@Override
public StartupCommand[] initialize() throws IllegalArgumentException{
disconnected();
setupServer();
if (!getHostInfo()) {
s_logger.warn("Unable to get host information for " + _host.ip);
return null;
}
destroyStoppedVm();
StartupRoutingCommand cmd = new StartupRoutingCommand();
fillHostInfo(cmd);
cleanupDiskMounts();
Map<String, State> changes = null;
synchronized (_vms) {
_vms.clear();
changes = sync();
}
cmd.setHypervisorType(HypervisorType.XenServer);
cmd.setChanges(changes);
cmd.setCluster(_cluster);
StartupStorageCommand sscmd = initializeLocalSR();
if (sscmd != null) {
/* report pv driver iso */
getPVISO(sscmd);
return new StartupCommand[] { cmd, sscmd };
}
return new StartupCommand[] { cmd };
}
protected String getPoolUuid() {
Connection conn = getConnection();
try {
Map<Pool, Pool.Record> pools = Pool.getAllRecords(conn);
assert (pools.size() == 1) : "Tell me how pool size can be " + pools.size();
Pool.Record rec = pools.values().iterator().next();
return rec.uuid;
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to get pool ", e);
} catch (XmlRpcException e) {
throw new CloudRuntimeException("Unable to get pool ", e);
}
}
protected void setupServer() {
Connection conn = getConnection();
String version = CitrixResourceBase.class.getPackage().getImplementationVersion();
try {
Host host = Host.getByUuid(conn, _host.uuid);
/* enable host in case it is disabled somehow */
host.enable(conn);
/* push patches to XenServer */
Host.Record hr = host.getRecord(conn);
Iterator<String> it = hr.tags.iterator();
while (it.hasNext()) {
String tag = it.next();
if (tag.startsWith("vmops-version-")) {
if (tag.contains(version)) {
s_logger.info(logX(host, "Host " + hr.address + " is already setup."));
return;
} else {
it.remove();
}
}
}
com.trilead.ssh2.Connection sshConnection = new com.trilead.ssh2.Connection(hr.address, 22);
try {
sshConnection.connect(null, 60000, 60000);
if (!sshConnection.authenticateWithPassword(_username, _password)) {
throw new CloudRuntimeException("Unable to authenticate");
}
SCPClient scp = new SCPClient(sshConnection);
String path = _patchPath.substring(0, _patchPath.lastIndexOf(File.separator) + 1);
List<File> files = getPatchFiles();
if( files == null || files.isEmpty() ) {
throw new CloudRuntimeException("Can not find patch file");
}
for( File file :files) {
Properties props = new Properties();
props.load(new FileInputStream(file));
for (Map.Entry<Object, Object> entry : props.entrySet()) {
String k = (String) entry.getKey();
String v = (String) entry.getValue();
assert (k != null && k.length() > 0 && v != null && v.length() > 0) : "Problems with " + k + "=" + v;
String[] tokens = v.split(",");
String f = null;
if (tokens.length == 3 && tokens[0].length() > 0) {
if (tokens[0].startsWith("/")) {
f = tokens[0];
} else if (tokens[0].startsWith("~")) {
String homedir = System.getenv("HOME");
f = homedir + tokens[0].substring(1) + k;
} else {
f = path + tokens[0] + '/' + k;
}
} else {
f = path + k;
}
String d = tokens[tokens.length - 1];
f = f.replace('/', File.separatorChar);
String p = "0755";
if (tokens.length == 3) {
p = tokens[1];
} else if (tokens.length == 2) {
p = tokens[0];
}
if (!new File(f).exists()) {
s_logger.warn("We cannot locate " + f);
continue;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Copying " + f + " to " + d + " on " + hr.address + " with permission " + p);
}
scp.put(f, d, p);
}
}
} catch (IOException e) {
throw new CloudRuntimeException("Unable to setup the server correctly", e);
} finally {
sshConnection.close();
}
if (!setIptables()) {
s_logger.warn("set xenserver Iptable failed");
}
hr.tags.add("vmops-version-" + version);
host.setTags(conn, hr.tags);
} catch (XenAPIException e) {
String msg = "Xen setup failed due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException("Unable to get host information " + e.toString(), e);
} catch (XmlRpcException e) {
String msg = "Xen setup failed due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException("Unable to get host information ", e);
}
}
protected List<File> getPatchFiles() {
List<File> files = new ArrayList<File>();
File file = new File(_patchPath);
files.add(file);
return files;
}
protected SR getSRByNameLabelandHost(String name) throws BadServerResponse, XenAPIException, XmlRpcException {
Connection conn = getConnection();
Set<SR> srs = SR.getByNameLabel(conn, name);
SR ressr = null;
for (SR sr : srs) {
Set<PBD> pbds;
pbds = sr.getPBDs(conn);
for (PBD pbd : pbds) {
PBD.Record pbdr = pbd.getRecord(conn);
if (pbdr.host != null && pbdr.host.getUuid(conn).equals(_host.uuid)) {
if (!pbdr.currentlyAttached) {
pbd.plug(conn);
}
ressr = sr;
break;
}
}
}
return ressr;
}
protected GetStorageStatsAnswer execute(final GetStorageStatsCommand cmd) {
try {
Connection conn = getConnection();
Set<SR> srs = SR.getByNameLabel(conn, cmd.getStorageId());
if (srs.size() != 1) {
String msg = "There are " + srs.size() + " storageid: " + cmd.getStorageId();
s_logger.warn(msg);
return new GetStorageStatsAnswer(cmd, msg);
}
SR sr = srs.iterator().next();
sr.scan(conn);
long capacity = sr.getPhysicalSize(conn);
long used = sr.getPhysicalUtilisation(conn);
return new GetStorageStatsAnswer(cmd, capacity, used);
} catch (XenAPIException e) {
String msg = "GetStorageStats Exception:" + e.toString() + "host:" + _host.uuid + "storageid: " + cmd.getStorageId();
s_logger.warn(msg);
return new GetStorageStatsAnswer(cmd, msg);
} catch (XmlRpcException e) {
String msg = "GetStorageStats Exception:" + e.getMessage() + "host:" + _host.uuid + "storageid: " + cmd.getStorageId();
s_logger.warn(msg);
return new GetStorageStatsAnswer(cmd, msg);
}
}
private void pbdPlug(Connection conn, PBD pbd) {
String pbdUuid = "";
String hostAddr = "";
try {
pbdUuid = pbd.getUuid(conn);
hostAddr = pbd.getHost(conn).getAddress(conn);
pbd.plug(conn);
} catch (Exception e) {
String msg = "PBD " + pbdUuid + " is not attached! and PBD plug failed due to "
+ e.toString() + ". Please check this PBD in host : " + hostAddr;
s_logger.warn(msg, e);
}
}
protected boolean checkSR(SR sr) {
try {
Connection conn = getConnection();
SR.Record srr = sr.getRecord(conn);
Set<PBD> pbds = sr.getPBDs(conn);
if (pbds.size() == 0) {
String msg = "There is no PBDs for this SR: " + srr.nameLabel + " on host:" + _host.uuid;
s_logger.warn(msg);
return false;
}
Set<Host> hosts = null;
if (srr.shared) {
hosts = Host.getAll(conn);
for (Host host : hosts) {
boolean found = false;
for (PBD pbd : pbds) {
if (host.equals(pbd.getHost(conn))) {
PBD.Record pbdr = pbd.getRecord(conn);
if (!pbdr.currentlyAttached) {
pbdPlug(conn, pbd);
}
pbds.remove(pbd);
found = true;
break;
}
}
if (!found) {
PBD.Record pbdr = srr.PBDs.iterator().next().getRecord(conn);
pbdr.host = host;
pbdr.uuid = "";
PBD pbd = PBD.create(conn, pbdr);
pbdPlug(conn, pbd);
}
}
} else {
for (PBD pbd : pbds) {
PBD.Record pbdr = pbd.getRecord(conn);
if (!pbdr.currentlyAttached) {
pbdPlug(conn, pbd);
}
}
}
} catch (Exception e) {
String msg = "checkSR failed host:" + _host.uuid + " due to " + e.toString();
s_logger.warn(msg);
return false;
}
return true;
}
protected Answer execute(ModifyStoragePoolCommand cmd) {
StoragePoolVO pool = cmd.getPool();
StorageFilerTO poolTO = new StorageFilerTO(pool);
try {
Connection conn = getConnection();
SR sr = getStorageRepository(conn, poolTO);
long capacity = sr.getPhysicalSize(conn);
long available = capacity - sr.getPhysicalUtilisation(conn);
if (capacity == -1) {
String msg = "Pool capacity is -1! pool: " + pool.getName() + pool.getHostAddress() + pool.getPath();
s_logger.warn(msg);
return new Answer(cmd, false, msg);
}
Map<String, TemplateInfo> tInfo = new HashMap<String, TemplateInfo>();
ModifyStoragePoolAnswer answer = new ModifyStoragePoolAnswer(cmd, capacity, available, tInfo);
return answer;
} catch (XenAPIException e) {
String msg = "ModifyStoragePoolCommand XenAPIException:" + e.toString() + " host:" + _host.uuid + " pool: " + pool.getName() + pool.getHostAddress() + pool.getPath();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
} catch (Exception e) {
String msg = "ModifyStoragePoolCommand XenAPIException:" + e.getMessage() + " host:" + _host.uuid + " pool: " + pool.getName() + pool.getHostAddress() + pool.getPath();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
}
protected Answer execute(DeleteStoragePoolCommand cmd) {
StoragePoolVO pool = cmd.getPool();
StorageFilerTO poolTO = new StorageFilerTO(pool);
try {
Connection conn = getConnection();
SR sr = getStorageRepository(conn, poolTO);
removeSR(sr);
Answer answer = new Answer(cmd, true, "success");
return answer;
} catch (Exception e) {
String msg = "DeleteStoragePoolCommand XenAPIException:" + e.getMessage() + " host:" + _host.uuid + " pool: " + pool.getName() + pool.getHostAddress() + pool.getPath();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
}
public Connection getConnection() {
return _connPool.connect(_host.uuid, _host.pool, _host.ip, _username, _password, _wait);
}
protected void fillHostInfo(StartupRoutingCommand cmd) {
long speed = 0;
int cpus = 0;
long ram = 0;
Connection conn = getConnection();
long dom0Ram = 0;
final StringBuilder caps = new StringBuilder();
try {
Host host = Host.getByUuid(conn, _host.uuid);
Host.Record hr = host.getRecord(conn);
Map<String, String> details = cmd.getHostDetails();
if (details == null) {
details = new HashMap<String, String>();
}
if (_privateNetworkName != null) {
details.put("private.network.device", _privateNetworkName);
}
if (_publicNetworkName != null) {
details.put("public.network.device", _publicNetworkName);
}
if (_guestNetworkName != null) {
details.put("guest.network.device", _guestNetworkName);
}
details.put("can_bridge_firewall", Boolean.toString(_canBridgeFirewall));
cmd.setHostDetails(details);
cmd.setName(hr.nameLabel);
cmd.setGuid(_host.uuid);
cmd.setDataCenter(Long.toString(_dcId));
for (final String cap : hr.capabilities) {
if (cap.length() > 0) {
caps.append(cap).append(" , ");
}
}
if (caps.length() > 0) {
caps.delete(caps.length() - 3, caps.length());
}
cmd.setCaps(caps.toString());
cmd.setSpeed(_host.speed);
cmd.setCpus(_host.cpus);
long free = 0;
HostMetrics hm = host.getMetrics(conn);
ram = hm.getMemoryTotal(conn);
free = hm.getMemoryFree(conn);
Set<VM> vms = host.getResidentVMs(conn);
for (VM vm : vms) {
if (vm.getIsControlDomain(conn)) {
dom0Ram = vm.getMemoryDynamicMax(conn);
break;
}
}
// assume the memory Virtualization overhead is 1/64
ram = (ram - dom0Ram) * 63/64;
cmd.setMemory(ram);
cmd.setDom0MinMemory(dom0Ram);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total Ram: " + ram + " Free Ram: " + free + " dom0 Ram: " + dom0Ram);
}
PIF pif = PIF.getByUuid(conn, _host.privatePif);
PIF.Record pifr = pif.getRecord(conn);
if (pifr.IP != null && pifr.IP.length() > 0) {
cmd.setPrivateIpAddress(pifr.IP);
cmd.setPrivateMacAddress(pifr.MAC);
cmd.setPrivateNetmask(pifr.netmask);
}
pif = PIF.getByUuid(conn, _host.storagePif1);
pifr = pif.getRecord(conn);
if (pifr.IP != null && pifr.IP.length() > 0) {
cmd.setStorageIpAddress(pifr.IP);
cmd.setStorageMacAddress(pifr.MAC);
cmd.setStorageNetmask(pifr.netmask);
}
if (_host.storagePif2 != null) {
pif = PIF.getByUuid(conn, _host.storagePif2);
pifr = pif.getRecord(conn);
if (pifr.IP != null && pifr.IP.length() > 0) {
cmd.setStorageIpAddressDeux(pifr.IP);
cmd.setStorageMacAddressDeux(pifr.MAC);
cmd.setStorageNetmaskDeux(pifr.netmask);
}
}
Map<String, String> configs = hr.otherConfig;
cmd.setIqn(configs.get("iscsi_iqn"));
cmd.setPod(_pod);
cmd.setVersion(CitrixResourceBase.class.getPackage().getImplementationVersion());
} catch (final XmlRpcException e) {
throw new CloudRuntimeException("XML RPC Exception" + e.getMessage(), e);
} catch (XenAPIException e) {
throw new CloudRuntimeException("XenAPIException" + e.toString(), e);
}
}
public CitrixResourceBase() {
}
protected String getPatchPath() {
return "scripts/vm/hypervisor/xenserver/xcpserver";
}
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
_name = name;
_host.uuid = (String) params.get("guid");
try {
_dcId = Long.parseLong((String) params.get("zone"));
} catch (NumberFormatException e) {
throw new ConfigurationException("Unable to get the zone " + params.get("zone"));
}
_name = _host.uuid;
_host.ip = (String) params.get("url");
_host.pool = (String) params.get("pool");
_username = (String) params.get("username");
_password = (String) params.get("password");
_pod = (String) params.get("pod");
_cluster = (String)params.get("cluster");
_privateNetworkName = (String) params.get("private.network.device");
_publicNetworkName = (String) params.get("public.network.device");
_guestNetworkName = (String)params.get("guest.network.device");
_linkLocalPrivateNetworkName = (String) params.get("private.linkLocal.device");
if (_linkLocalPrivateNetworkName == null)
_linkLocalPrivateNetworkName = "cloud_link_local_network";
_storageNetworkName1 = (String) params.get("storage.network.device1");
if (_storageNetworkName1 == null) {
_storageNetworkName1 = "cloud-stor1";
}
_storageNetworkName2 = (String) params.get("storage.network.device2");
if (_storageNetworkName2 == null) {
_storageNetworkName2 = "cloud-stor2";
}
String value = (String) params.get("wait");
_wait = NumbersUtil.parseInt(value, 1800);
if (_pod == null) {
throw new ConfigurationException("Unable to get the pod");
}
if (_host.ip == null) {
throw new ConfigurationException("Unable to get the host address");
}
if (_username == null) {
throw new ConfigurationException("Unable to get the username");
}
if (_password == null) {
throw new ConfigurationException("Unable to get the password");
}
if (_host.uuid == null) {
throw new ConfigurationException("Unable to get the uuid");
}
String patchPath = getPatchPath();
_patchPath = Script.findScript(patchPath, "patch");
if (_patchPath == null) {
throw new ConfigurationException("Unable to find all of patch files for xenserver");
}
_storage = (StorageLayer) params.get(StorageLayer.InstanceConfigKey);
if (_storage == null) {
value = (String) params.get(StorageLayer.ClassConfigKey);
if (value == null) {
value = "com.cloud.storage.JavaStorageLayer";
}
try {
Class<?> clazz = Class.forName(value);
_storage = (StorageLayer) ComponentLocator.inject(clazz);
_storage.configure("StorageLayer", params);
} catch (ClassNotFoundException e) {
throw new ConfigurationException("Unable to find class " + value);
}
}
return true;
}
void destroyVDI(VDI vdi) {
try {
Connection conn = getConnection();
vdi.destroy(conn);
} catch (Exception e) {
String msg = "destroy VDI failed due to " + e.toString();
s_logger.warn(msg);
}
}
@Override
public CreateAnswer execute(CreateCommand cmd) {
StorageFilerTO pool = cmd.getPool();
DiskProfile dskch = cmd.getDiskCharacteristics();
VDI vdi = null;
Connection conn = getConnection();
try {
SR poolSr = getStorageRepository(conn, pool);
if (cmd.getTemplateUrl() != null) {
VDI tmpltvdi = null;
tmpltvdi = getVDIbyUuid(cmd.getTemplateUrl());
vdi = tmpltvdi.createClone(conn, new HashMap<String, String>());
vdi.setNameLabel(conn, dskch.getName());
} else {
VDI.Record vdir = new VDI.Record();
vdir.nameLabel = dskch.getName();
vdir.SR = poolSr;
vdir.type = Types.VdiType.USER;
if(cmd.getSize()!=0)
vdir.virtualSize = cmd.getSize();
else
vdir.virtualSize = dskch.getSize();
vdi = VDI.create(conn, vdir);
}
VDI.Record vdir;
vdir = vdi.getRecord(conn);
s_logger.debug("Succesfully created VDI for " + cmd + ". Uuid = " + vdir.uuid);
VolumeTO vol = new VolumeTO(cmd.getVolumeId(), dskch.getType(), Storage.StorageResourceType.STORAGE_POOL, pool.getType(), vdir.nameLabel, pool.getPath(), vdir.uuid,
vdir.virtualSize);
return new CreateAnswer(cmd, vol);
} catch (Exception e) {
s_logger.warn("Unable to create volume; Pool=" + pool + "; Disk: " + dskch, e);
return new CreateAnswer(cmd, e);
}
}
protected SR getISOSRbyVmName(String vmName) {
Connection conn = getConnection();
try {
Set<SR> srs = SR.getByNameLabel(conn, vmName + "-ISO");
if (srs.size() == 0) {
return null;
} else if (srs.size() == 1) {
return srs.iterator().next();
} else {
String msg = "getIsoSRbyVmName failed due to there are more than 1 SR having same Label";
s_logger.warn(msg);
}
} catch (XenAPIException e) {
String msg = "getIsoSRbyVmName failed due to " + e.toString();
s_logger.warn(msg, e);
} catch (Exception e) {
String msg = "getIsoSRbyVmName failed due to " + e.getMessage();
s_logger.warn(msg, e);
}
return null;
}
protected SR createNfsSRbyURI(URI uri, boolean shared) {
try {
Connection conn = getConnection();
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating a " + (shared ? "shared SR for " : "not shared SR for ") + uri);
}
Map<String, String> deviceConfig = new HashMap<String, String>();
String path = uri.getPath();
path = path.replace("//", "/");
deviceConfig.put("server", uri.getHost());
deviceConfig.put("serverpath", path);
String name = UUID.nameUUIDFromBytes(new String(uri.getHost() + path).getBytes()).toString();
if (!shared) {
Set<SR> srs = SR.getByNameLabel(conn, name);
for (SR sr : srs) {
SR.Record record = sr.getRecord(conn);
if (SRType.NFS.equals(record.type) && record.contentType.equals("user") && !record.shared) {
removeSRSync(sr);
}
}
}
Host host = Host.getByUuid(conn, _host.uuid);
SR sr = SR.create(conn, host, deviceConfig, new Long(0), name, uri.getHost() + uri.getPath(), SRType.NFS.toString(), "user", shared, new HashMap<String, String>());
if( !checkSR(sr) ) {
throw new Exception("no attached PBD");
}
if (s_logger.isDebugEnabled()) {
s_logger.debug(logX(sr, "Created a SR; UUID is " + sr.getUuid(conn) + " device config is " + deviceConfig));
}
sr.scan(conn);
return sr;
} catch (XenAPIException e) {
String msg = "Can not create second storage SR mountpoint: " + uri.getHost() + uri.getPath() + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "Can not create second storage SR mountpoint: " + uri.getHost() + uri.getPath() + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
protected SR createIsoSRbyURI(URI uri, String vmName, boolean shared) {
try {
Connection conn = getConnection();
Map<String, String> deviceConfig = new HashMap<String, String>();
String path = uri.getPath();
path = path.replace("//", "/");
deviceConfig.put("location", uri.getHost() + ":" + uri.getPath());
Host host = Host.getByUuid(conn, _host.uuid);
SR sr = SR.create(conn, host, deviceConfig, new Long(0), uri.getHost() + uri.getPath(), "iso", "iso", "iso", shared, new HashMap<String, String>());
sr.setNameLabel(conn, vmName + "-ISO");
sr.setNameDescription(conn, deviceConfig.get("location"));
sr.scan(conn);
return sr;
} catch (XenAPIException e) {
String msg = "createIsoSRbyURI failed! mountpoint: " + uri.getHost() + uri.getPath() + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "createIsoSRbyURI failed! mountpoint: " + uri.getHost() + uri.getPath() + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
protected VDI getVDIbyLocationandSR(String loc, SR sr) {
Connection conn = getConnection();
try {
Set<VDI> vdis = sr.getVDIs(conn);
for (VDI vdi : vdis) {
if (vdi.getLocation(conn).startsWith(loc)) {
return vdi;
}
}
String msg = "can not getVDIbyLocationandSR " + loc;
s_logger.warn(msg);
return null;
} catch (XenAPIException e) {
String msg = "getVDIbyLocationandSR exception " + loc + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "getVDIbyLocationandSR exception " + loc + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
protected VDI getVDIbyUuid(String uuid) {
try {
Connection conn = getConnection();
return VDI.getByUuid(conn, uuid);
} catch (XenAPIException e) {
String msg = "VDI getByUuid for uuid: " + uuid + " failed due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "VDI getByUuid for uuid: " + uuid + " failed due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
protected SR getIscsiSR(StorageFilerTO pool) {
Connection conn = getConnection();
synchronized (pool.getUuid().intern()) {
Map<String, String> deviceConfig = new HashMap<String, String>();
try {
String target = pool.getHost();
String path = pool.getPath();
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
String tmp[] = path.split("/");
if (tmp.length != 3) {
String msg = "Wrong iscsi path " + pool.getPath() + " it should be /targetIQN/LUN";
s_logger.warn(msg);
throw new CloudRuntimeException(msg);
}
String targetiqn = tmp[1].trim();
String lunid = tmp[2].trim();
String scsiid = "";
Set<SR> srs = SR.getByNameLabel(conn, pool.getUuid());
for (SR sr : srs) {
if (!SRType.LVMOISCSI.equals(sr.getType(conn)))
continue;
Set<PBD> pbds = sr.getPBDs(conn);
if (pbds.isEmpty())
continue;
PBD pbd = pbds.iterator().next();
Map<String, String> dc = pbd.getDeviceConfig(conn);
if (dc == null)
continue;
if (dc.get("target") == null)
continue;
if (dc.get("targetIQN") == null)
continue;
if (dc.get("lunid") == null)
continue;
if (target.equals(dc.get("target")) && targetiqn.equals(dc.get("targetIQN")) && lunid.equals(dc.get("lunid"))) {
if (checkSR(sr)) {
return sr;
}
throw new CloudRuntimeException("SR check failed for storage pool: " + pool.getUuid() + "on host:" + _host.uuid);
}
}
deviceConfig.put("target", target);
deviceConfig.put("targetIQN", targetiqn);
Host host = Host.getByUuid(conn, _host.uuid);
SR sr = null;
try {
sr = SR.create(conn, host, deviceConfig, new Long(0), pool.getUuid(), Long.toString(pool.getId()), SRType.LVMOISCSI.toString(), "user", true,
new HashMap<String, String>());
} catch (XenAPIException e) {
String errmsg = e.toString();
if (errmsg.contains("SR_BACKEND_FAILURE_107")) {
String lun[] = errmsg.split("<LUN>");
boolean found = false;
for (int i = 1; i < lun.length; i++) {
int blunindex = lun[i].indexOf("<LUNid>") + 7;
int elunindex = lun[i].indexOf("</LUNid>");
String ilun = lun[i].substring(blunindex, elunindex);
ilun = ilun.trim();
if (ilun.equals(lunid)) {
int bscsiindex = lun[i].indexOf("<SCSIid>") + 8;
int escsiindex = lun[i].indexOf("</SCSIid>");
scsiid = lun[i].substring(bscsiindex, escsiindex);
scsiid = scsiid.trim();
found = true;
break;
}
}
if (!found) {
String msg = "can not find LUN " + lunid + " in " + errmsg;
s_logger.warn(msg);
throw new CloudRuntimeException(msg);
}
} else {
String msg = "Unable to create Iscsi SR " + deviceConfig + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
deviceConfig.put("SCSIid", scsiid);
sr = SR.create(conn, host, deviceConfig, new Long(0), pool.getUuid(), Long.toString(pool.getId()), SRType.LVMOISCSI.toString(), "user", true,
new HashMap<String, String>());
sr.scan(conn);
return sr;
} catch (XenAPIException e) {
String msg = "Unable to create Iscsi SR " + deviceConfig + " due to " + e.toString();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
} catch (Exception e) {
String msg = "Unable to create Iscsi SR " + deviceConfig + " due to " + e.getMessage();
s_logger.warn(msg, e);
throw new CloudRuntimeException(msg, e);
}
}
}
protected SR getNfsSR(StorageFilerTO pool) {
Connection conn = getConnection();
Map<String, String> deviceConfig = new HashMap<String, String>();
try {
String server = pool.getHost();
String serverpath = pool.getPath();
serverpath = serverpath.replace("//", "/");
Set<SR> srs = SR.getAll(conn);
for (SR sr : srs) {
if (!SRType.NFS.equals(sr.getType(conn)))
continue;
Set<PBD> pbds = sr.getPBDs(conn);
if (pbds.isEmpty())
continue;
PBD pbd = pbds.iterator().next();
Map<String, String> dc = pbd.getDeviceConfig(conn);
if (dc == null)
continue;
if (dc.get("server") == null)
continue;
if (dc.get("serverpath") == null)
continue;
if (server.equals(dc.get("server")) && serverpath.equals(dc.get("serverpath"))) {
if (checkSR(sr)) {
return sr;
}
throw new CloudRuntimeException("SR check failed for storage pool: " + pool.getUuid() + "on host:" + _host.uuid);
}
}
deviceConfig.put("server", server);
deviceConfig.put("serverpath", serverpath);
Host host = Host.getByUuid(conn, _host.uuid);
SR sr = SR.create(conn, host, deviceConfig, new Long(0), pool.getUuid(), Long.toString(pool.getId()), SRType.NFS.toString(), "user", true,
new HashMap<String, String>());
sr.scan(conn);
return sr;
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to create NFS SR " + pool.toString(), e);
} catch (XmlRpcException e) {
throw new CloudRuntimeException("Unable to create NFS SR " + pool.toString(), e);
}
}
@Override
public Answer execute(DestroyCommand cmd) {
VolumeTO vol = cmd.getVolume();
Connection conn = getConnection();
// Look up the VDI
String volumeUUID = vol.getPath();
VDI vdi = null;
try {
vdi = getVDIbyUuid(volumeUUID);
} catch (Exception e) {
String msg = "getVDIbyUuid for " + volumeUUID + " failed due to " + e.toString();
s_logger.warn(msg);
return new Answer(cmd, true, "Success");
}
Set<VBD> vbds = null;
try {
vbds = vdi.getVBDs(conn);
} catch (Exception e) {
String msg = "VDI getVBDS for " + volumeUUID + " failed due to " + e.toString();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
for (VBD vbd : vbds) {
try {
vbd.unplug(conn);
vbd.destroy(conn);
} catch (Exception e) {
String msg = "VM destroy for " + volumeUUID + " failed due to " + e.toString();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
}
try {
vdi.destroy(conn);
} catch (Exception e) {
String msg = "VDI destroy for " + volumeUUID + " failed due to " + e.toString();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
return new Answer(cmd, true, "Success");
}
@Override
public ShareAnswer execute(final ShareCommand cmd) {
if (!cmd.isShare()) {
SR sr = getISOSRbyVmName(cmd.getVmName());
Connection conn = getConnection();
try {
if (sr != null) {
Set<VM> vms = VM.getByNameLabel(conn, cmd.getVmName());
if (vms.size() == 0) {
removeSR(sr);
}
}
} catch (Exception e) {
String msg = "SR.getNameLabel failed due to " + e.getMessage() + e.toString();
s_logger.warn(msg);
}
}
return new ShareAnswer(cmd, new HashMap<String, Integer>());
}
@Override
public CopyVolumeAnswer execute(final CopyVolumeCommand cmd) {
String volumeUUID = cmd.getVolumePath();
StoragePoolVO pool = cmd.getPool();
StorageFilerTO poolTO = new StorageFilerTO(pool);
String secondaryStorageURL = cmd.getSecondaryStorageURL();
URI uri = null;
try {
uri = new URI(secondaryStorageURL);
} catch (URISyntaxException e) {
return new CopyVolumeAnswer(cmd, false, "Invalid secondary storage URL specified.", null, null);
}
String remoteVolumesMountPath = uri.getHost() + ":" + uri.getPath() + "/volumes/";
String volumeFolder = String.valueOf(cmd.getVolumeId()) + "/";
boolean toSecondaryStorage = cmd.toSecondaryStorage();
String errorMsg = "Failed to copy volume";
SR primaryStoragePool = null;
SR secondaryStorage = null;
VDI srcVolume = null;
VDI destVolume = null;
Connection conn = getConnection();
try {
if (toSecondaryStorage) {
// Create the volume folder
if (!createSecondaryStorageFolder(remoteVolumesMountPath, volumeFolder)) {
throw new InternalErrorException("Failed to create the volume folder.");
}
// Create a SR for the volume UUID folder
secondaryStorage = createNfsSRbyURI(new URI(secondaryStorageURL + "/volumes/" + volumeFolder), false);
// Look up the volume on the source primary storage pool
srcVolume = getVDIbyUuid(volumeUUID);
// Copy the volume to secondary storage
destVolume = cloudVDIcopy(srcVolume, secondaryStorage);
} else {
// Mount the volume folder
secondaryStorage = createNfsSRbyURI(new URI(secondaryStorageURL + "/volumes/" + volumeFolder), false);
// Look up the volume on secondary storage
Set<VDI> vdis = secondaryStorage.getVDIs(conn);
for (VDI vdi : vdis) {
if (vdi.getUuid(conn).equals(volumeUUID)) {
srcVolume = vdi;
break;
}
}
if (srcVolume == null) {
throw new InternalErrorException("Failed to find volume on secondary storage.");
}
// Copy the volume to the primary storage pool
primaryStoragePool = getStorageRepository(conn, poolTO);
destVolume = cloudVDIcopy(srcVolume, primaryStoragePool);
}
String srUUID;
if (primaryStoragePool == null) {
srUUID = secondaryStorage.getUuid(conn);
} else {
srUUID = primaryStoragePool.getUuid(conn);
}
String destVolumeUUID = destVolume.getUuid(conn);
return new CopyVolumeAnswer(cmd, true, null, srUUID, destVolumeUUID);
} catch (XenAPIException e) {
s_logger.warn(errorMsg + ": " + e.toString(), e);
return new CopyVolumeAnswer(cmd, false, e.toString(), null, null);
} catch (Exception e) {
s_logger.warn(errorMsg + ": " + e.toString(), e);
return new CopyVolumeAnswer(cmd, false, e.getMessage(), null, null);
} finally {
if (!toSecondaryStorage && srcVolume != null) {
// Delete the volume on secondary storage
destroyVDI(srcVolume);
}
removeSR(secondaryStorage);
if (!toSecondaryStorage) {
// Delete the volume folder on secondary storage
deleteSecondaryStorageFolder(remoteVolumesMountPath, volumeFolder);
}
}
}
protected AttachVolumeAnswer execute(final AttachVolumeCommand cmd) {
boolean attach = cmd.getAttach();
String vmName = cmd.getVmName();
Long deviceId = cmd.getDeviceId();
String errorMsg;
if (attach) {
errorMsg = "Failed to attach volume";
} else {
errorMsg = "Failed to detach volume";
}
Connection conn = getConnection();
try {
// Look up the VDI
VDI vdi = mount(cmd.getPooltype(), cmd.getVolumeFolder(),cmd.getVolumePath());
// Look up the VM
VM vm = getVM(conn, vmName);
/* For HVM guest, if no pv driver installed, no attach/detach */
boolean isHVM;
if (vm.getPVBootloader(conn).equalsIgnoreCase(""))
isHVM = true;
else
isHVM = false;
VMGuestMetrics vgm = vm.getGuestMetrics(conn);
boolean pvDrvInstalled = false;
if (!isRefNull(vgm) && vgm.getPVDriversUpToDate(conn)) {
pvDrvInstalled = true;
}
if (isHVM && !pvDrvInstalled) {
s_logger.warn(errorMsg + ": You attempted an operation on a VM which requires PV drivers to be installed but the drivers were not detected");
return new AttachVolumeAnswer(cmd, "You attempted an operation that requires PV drivers to be installed on the VM. Please install them by inserting xen-pv-drv.iso.");
}
if (attach) {
// Figure out the disk number to attach the VM to
String diskNumber = null;
if( deviceId != null ) {
if( deviceId.longValue() == 3 ) {
String msg = "Device 3 is reserved for CD-ROM, choose other device";
return new AttachVolumeAnswer(cmd,msg);
}
if(isDeviceUsed(vm, deviceId)) {
String msg = "Device " + deviceId + " is used in VM " + vmName;
return new AttachVolumeAnswer(cmd,msg);
}
diskNumber = deviceId.toString();
} else {
diskNumber = getUnusedDeviceNum(vm);
}
// Create a new VBD
VBD.Record vbdr = new VBD.Record();
vbdr.VM = vm;
vbdr.VDI = vdi;
vbdr.bootable = false;
vbdr.userdevice = diskNumber;
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
vbdr.unpluggable = true;
VBD vbd = VBD.create(conn, vbdr);
// Attach the VBD to the VM
vbd.plug(conn);
// Update the VDI's label to include the VM name
vdi.setNameLabel(conn, vmName + "-DATA");
return new AttachVolumeAnswer(cmd, Long.parseLong(diskNumber));
} else {
// Look up all VBDs for this VDI
Set<VBD> vbds = vdi.getVBDs(conn);
// Detach each VBD from its VM, and then destroy it
for (VBD vbd : vbds) {
VBD.Record vbdr = vbd.getRecord(conn);
if (vbdr.currentlyAttached) {
vbd.unplug(conn);
}
vbd.destroy(conn);
}
// Update the VDI's label to be "detached"
vdi.setNameLabel(conn, "detached");
umount(vdi);
return new AttachVolumeAnswer(cmd);
}
} catch (XenAPIException e) {
String msg = errorMsg + " for uuid: " + cmd.getVolumePath() + " due to " + e.toString();
s_logger.warn(msg, e);
return new AttachVolumeAnswer(cmd, msg);
} catch (Exception e) {
String msg = errorMsg + " for uuid: " + cmd.getVolumePath() + " due to " + e.getMessage();
s_logger.warn(msg, e);
return new AttachVolumeAnswer(cmd, msg);
}
}
protected void umount(VDI vdi) {
}
protected Answer execute(final AttachIsoCommand cmd) {
boolean attach = cmd.isAttach();
String vmName = cmd.getVmName();
String isoURL = cmd.getIsoPath();
String errorMsg;
if (attach) {
errorMsg = "Failed to attach ISO";
} else {
errorMsg = "Failed to detach ISO";
}
Connection conn = getConnection();
try {
if (attach) {
VBD isoVBD = null;
// Find the VM
VM vm = getVM(conn, vmName);
// Find the ISO VDI
VDI isoVDI = getIsoVDIByURL(conn, vmName, isoURL);
// Find the VM's CD-ROM VBD
Set<VBD> vbds = vm.getVBDs(conn);
for (VBD vbd : vbds) {
String userDevice = vbd.getUserdevice(conn);
Types.VbdType type = vbd.getType(conn);
if (userDevice.equals("3") && type == Types.VbdType.CD) {
isoVBD = vbd;
break;
}
}
if (isoVBD == null) {
throw new CloudRuntimeException("Unable to find CD-ROM VBD for VM: " + vmName);
} else {
// If an ISO is already inserted, eject it
if (isoVBD.getEmpty(conn) == false) {
isoVBD.eject(conn);
}
// Insert the new ISO
isoVBD.insert(conn, isoVDI);
}
return new Answer(cmd);
} else {
// Find the VM
VM vm = getVM(conn, vmName);
String vmUUID = vm.getUuid(conn);
// Find the ISO VDI
VDI isoVDI = getIsoVDIByURL(conn, vmName, isoURL);
SR sr = isoVDI.getSR(conn);
// Look up all VBDs for this VDI
Set<VBD> vbds = isoVDI.getVBDs(conn);
// Iterate through VBDs, and if the VBD belongs the VM, eject
// the ISO from it
for (VBD vbd : vbds) {
VM vbdVM = vbd.getVM(conn);
String vbdVmUUID = vbdVM.getUuid(conn);
if (vbdVmUUID.equals(vmUUID)) {
// If an ISO is already inserted, eject it
if (!vbd.getEmpty(conn)) {
vbd.eject(conn);
}
break;
}
}
if (!sr.getNameLabel(conn).startsWith("XenServer Tools")) {
removeSR(sr);
}
return new Answer(cmd);
}
} catch (XenAPIException e) {
s_logger.warn(errorMsg + ": " + e.toString(), e);
return new Answer(cmd, false, e.toString());
} catch (Exception e) {
s_logger.warn(errorMsg + ": " + e.toString(), e);
return new Answer(cmd, false, e.getMessage());
}
}
protected ManageSnapshotAnswer execute(final ManageSnapshotCommand cmd) {
long snapshotId = cmd.getSnapshotId();
String snapshotName = cmd.getSnapshotName();
// By default assume failure
boolean success = false;
String cmdSwitch = cmd.getCommandSwitch();
String snapshotOp = "Unsupported snapshot command." + cmdSwitch;
if (cmdSwitch.equals(ManageSnapshotCommand.CREATE_SNAPSHOT)) {
snapshotOp = "create";
} else if (cmdSwitch.equals(ManageSnapshotCommand.DESTROY_SNAPSHOT)) {
snapshotOp = "destroy";
}
String details = "ManageSnapshotCommand operation: " + snapshotOp + " Failed for snapshotId: " + snapshotId;
String snapshotUUID = null;
Connection conn = getConnection();
try {
if (cmdSwitch.equals(ManageSnapshotCommand.CREATE_SNAPSHOT)) {
// Look up the volume
String volumeUUID = cmd.getVolumePath();
VDI volume = VDI.getByUuid(conn, volumeUUID);
// Create a snapshot
VDI snapshot = volume.snapshot(conn, new HashMap<String, String>());
if (snapshotName != null) {
snapshot.setNameLabel(conn, snapshotName);
}
// Determine the UUID of the snapshot
snapshotUUID = snapshot.getUuid(conn);
String preSnapshotUUID = cmd.getSnapshotPath();
//check if it is a empty snapshot
if( preSnapshotUUID != null) {
SR sr = volume.getSR(conn);
String srUUID = sr.getUuid(conn);
String type = sr.getType(conn);
Boolean isISCSI = SRType.LVMOISCSI.equals(type);
String snapshotParentUUID = getVhdParent(srUUID, snapshotUUID, isISCSI);
String preSnapshotParentUUID = getVhdParent(srUUID, preSnapshotUUID, isISCSI);
if( snapshotParentUUID != null && snapshotParentUUID.equals(preSnapshotParentUUID)) {
// this is empty snapshot, remove it
snapshot.destroy(conn);
snapshotUUID = preSnapshotUUID;
}
}
success = true;
details = null;
} else if (cmd.getCommandSwitch().equals(ManageSnapshotCommand.DESTROY_SNAPSHOT)) {
// Look up the snapshot
snapshotUUID = cmd.getSnapshotPath();
VDI snapshot = getVDIbyUuid(snapshotUUID);
snapshot.destroy(conn);
snapshotUUID = null;
success = true;
details = null;
}
} catch (XenAPIException e) {
details += ", reason: " + e.toString();
s_logger.warn(details, e);
} catch (Exception e) {
details += ", reason: " + e.toString();
s_logger.warn(details, e);
}
return new ManageSnapshotAnswer(cmd, snapshotId, snapshotUUID, success, details);
}
protected CreatePrivateTemplateAnswer execute(final CreatePrivateTemplateFromVolumeCommand cmd) {
String secondaryStoragePoolURL = cmd.getSecondaryStorageURL();
String volumeUUID = cmd.getVolumePath();
Long accountId = cmd.getAccountId();
String userSpecifiedName = cmd.getTemplateName();
Long templateId = cmd.getTemplateId();
String details = null;
SR tmpltSR = null;
boolean result = false;
try {
URI uri = new URI(secondaryStoragePoolURL);
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
String installPath = "template/tmpl/" + accountId + "/" + templateId;
if( !createSecondaryStorageFolder(secondaryStorageMountPath, installPath)) {
details = " Filed to create folder " + installPath + " in secondary storage";
s_logger.warn(details);
return new CreatePrivateTemplateAnswer(cmd, false, details);
}
Connection conn = getConnection();
VDI volume = getVDIbyUuid(volumeUUID);
// create template SR
URI tmpltURI = new URI(secondaryStoragePoolURL + "/" + installPath);
tmpltSR = createNfsSRbyURI(tmpltURI, false);
// copy volume to template SR
VDI tmpltVDI = cloudVDIcopy(volume, tmpltSR);
if (userSpecifiedName != null) {
tmpltVDI.setNameLabel(conn, userSpecifiedName);
}
String tmpltSrUUID = tmpltSR.getUuid(conn);
String tmpltUUID = tmpltVDI.getUuid(conn);
String tmpltFilename = tmpltUUID + ".vhd";
long virtualSize = tmpltVDI.getVirtualSize(conn);
long size = tmpltVDI.getPhysicalUtilisation(conn);
// create the template.properties file
result = postCreatePrivateTemplate(tmpltSrUUID, tmpltFilename, tmpltUUID, userSpecifiedName, null, size, virtualSize, templateId);
if (!result) {
throw new CloudRuntimeException("Could not create the template.properties file on secondary storage dir: " + tmpltURI);
}
return new CreatePrivateTemplateAnswer(cmd, true, null, installPath, virtualSize, tmpltUUID, ImageFormat.VHD);
} catch (XenAPIException e) {
details = "Creating template from volume " + volumeUUID + " failed due to " + e.getMessage();
s_logger.error(details, e);
} catch (Exception e) {
details = "Creating template from volume " + volumeUUID + " failed due to " + e.getMessage();
s_logger.error(details, e);
} finally {
// Remove the secondary storage SR
removeSR(tmpltSR);
}
return new CreatePrivateTemplateAnswer(cmd, result, details);
}
protected CreatePrivateTemplateAnswer execute(final CreatePrivateTemplateFromSnapshotCommand cmd) {
String primaryStorageNameLabel = cmd.getPrimaryStoragePoolNameLabel();
Long dcId = cmd.getDataCenterId();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String backedUpSnapshotUuid = cmd.getSnapshotUuid();
Long newTemplateId = cmd.getNewTemplateId();
String userSpecifiedName = cmd.getTemplateName();
// By default, assume failure
String details = null;
SR snapshotSR = null;
SR tmpltSR = null;
boolean result = false;
try {
URI uri = new URI(secondaryStoragePoolURL);
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
String installPath = "template/tmpl/" + accountId + "/" + newTemplateId;
if( !createSecondaryStorageFolder(secondaryStorageMountPath, installPath)) {
details = " Filed to create folder " + installPath + " in secondary storage";
s_logger.warn(details);
return new CreatePrivateTemplateAnswer(cmd, false, details);
}
Connection conn = getConnection();
// create snapshot SR
URI snapshotURI = new URI(secondaryStoragePoolURL + "/snapshots/" + accountId + "/" + volumeId );
snapshotSR = createNfsSRbyURI(snapshotURI, false);
snapshotSR.scan(conn);
VDI snapshotVDI = getVDIbyUuid(backedUpSnapshotUuid);
// create template SR
URI tmpltURI = new URI(secondaryStoragePoolURL + "/" + installPath);
tmpltSR = createNfsSRbyURI(tmpltURI, false);
// copy snapshotVDI to template SR
VDI tmpltVDI = cloudVDIcopy(snapshotVDI, tmpltSR);
String tmpltSrUUID = tmpltSR.getUuid(conn);
String tmpltUUID = tmpltVDI.getUuid(conn);
String tmpltFilename = tmpltUUID + ".vhd";
long virtualSize = tmpltVDI.getVirtualSize(conn);
long size = tmpltVDI.getPhysicalUtilisation(conn);
// create the template.properties file
result = postCreatePrivateTemplate(tmpltSrUUID, tmpltFilename, tmpltUUID, userSpecifiedName, null, size, virtualSize, newTemplateId);
if (!result) {
throw new CloudRuntimeException("Could not create the template.properties file on secondary storage dir: " + tmpltURI);
}
return new CreatePrivateTemplateAnswer(cmd, true, null, installPath, virtualSize, tmpltUUID, ImageFormat.VHD);
} catch (XenAPIException e) {
details = "Creating template from snapshot " + backedUpSnapshotUuid + " failed due to " + e.getMessage();
s_logger.error(details, e);
} catch (Exception e) {
details = "Creating template from snapshot " + backedUpSnapshotUuid + " failed due to " + e.getMessage();
s_logger.error(details, e);
} finally {
// Remove the secondary storage SR
removeSR(snapshotSR);
removeSR(tmpltSR);
}
return new CreatePrivateTemplateAnswer(cmd, result, details);
}
protected BackupSnapshotAnswer execute(final BackupSnapshotCommand cmd) {
String primaryStorageNameLabel = cmd.getPrimaryStoragePoolNameLabel();
Long dcId = cmd.getDataCenterId();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String snapshotUuid = cmd.getSnapshotUuid(); // not null: Precondition.
String prevSnapshotUuid = cmd.getPrevSnapshotUuid();
String prevBackupUuid = cmd.getPrevBackupUuid();
// By default assume failure
String details = null;
boolean success = false;
String snapshotBackupUuid = null;
try {
Connection conn = getConnection();
SR primaryStorageSR = getSRByNameLabelandHost(primaryStorageNameLabel);
if (primaryStorageSR == null) {
throw new InternalErrorException("Could not backup snapshot because the primary Storage SR could not be created from the name label: " + primaryStorageNameLabel);
}
String primaryStorageSRUuid = primaryStorageSR.getUuid(conn);
Boolean isISCSI = SRType.LVMOISCSI.equals(primaryStorageSR.getType(conn));
URI uri = new URI(secondaryStoragePoolURL);
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
if (prevBackupUuid == null) {
// the first snapshot is always a full snapshot
String folder = "snapshots/" + accountId + "/" + volumeId;
if( !createSecondaryStorageFolder(secondaryStorageMountPath, folder)) {
details = " Filed to create folder " + folder + " in secondary storage";
s_logger.warn(details);
return new BackupSnapshotAnswer(cmd, success, details, snapshotBackupUuid);
}
String snapshotMountpoint = secondaryStoragePoolURL + "/" + folder;
SR snapshotSr = null;
try {
snapshotSr = createNfsSRbyURI(new URI(snapshotMountpoint), false);
VDI snapshotVdi = getVDIbyUuid(snapshotUuid);
VDI backedVdi = snapshotVdi.copy(conn, snapshotSr);
snapshotBackupUuid = backedVdi.getUuid(conn);
success = true;
} finally {
if( snapshotSr != null) {
removeSR(snapshotSr);
}
}
} else {
snapshotBackupUuid = backupSnapshot(primaryStorageSRUuid, dcId, accountId, volumeId, secondaryStorageMountPath,
snapshotUuid, prevSnapshotUuid, prevBackupUuid, isISCSI);
success = (snapshotBackupUuid != null);
}
if (success) {
details = "Successfully backedUp the snapshotUuid: " + snapshotUuid + " to secondary storage.";
// Mark the snapshot as removed in the database.
// When the next snapshot is taken, it will be
// 1) deleted from the DB 2) The snapshotUuid will be deleted from the primary
// 3) the snapshotBackupUuid will be copied to secondary
// 4) if possible it will be coalesced with the next snapshot.
if (prevSnapshotUuid != null) {
// Destroy the previous snapshot, if it exists.
// We destroy the previous snapshot only if the current snapshot
// backup succeeds.
// The aim is to keep the VDI of the last 'successful' snapshot
// so that it doesn't get merged with the
// new one
// and muddle the vhd chain on the secondary storage.
destroySnapshotOnPrimaryStorage(prevSnapshotUuid);
}
}
} catch (XenAPIException e) {
details = "BackupSnapshot Failed due to " + e.toString();
s_logger.warn(details, e);
} catch (Exception e) {
details = "BackupSnapshot Failed due to " + e.getMessage();
s_logger.warn(details, e);
}
return new BackupSnapshotAnswer(cmd, success, details, snapshotBackupUuid);
}
protected CreateVolumeFromSnapshotAnswer execute(final CreateVolumeFromSnapshotCommand cmd) {
String primaryStorageNameLabel = cmd.getPrimaryStoragePoolNameLabel();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String backedUpSnapshotUuid = cmd.getSnapshotUuid();
// By default, assume the command has failed and set the params to be
// passed to CreateVolumeFromSnapshotAnswer appropriately
boolean result = false;
// Generic error message.
String details = null;
String volumeUUID = null;
SR snapshotSR = null;
if (secondaryStoragePoolURL == null) {
details += " because the URL passed: " + secondaryStoragePoolURL + " is invalid.";
return new CreateVolumeFromSnapshotAnswer(cmd, result, details, volumeUUID);
}
try {
Connection conn = getConnection();
SR primaryStorageSR = getSRByNameLabelandHost(primaryStorageNameLabel);
if (primaryStorageSR == null) {
throw new InternalErrorException("Could not create volume from snapshot because the primary Storage SR could not be created from the name label: "
+ primaryStorageNameLabel);
}
// Get the absolute path of the snapshot on the secondary storage.
URI snapshotURI = new URI(secondaryStoragePoolURL + "/snapshots/" + accountId + "/" + volumeId );
snapshotSR = createNfsSRbyURI(snapshotURI, false);
snapshotSR.scan(conn);
VDI snapshotVDI = getVDIbyUuid(backedUpSnapshotUuid);
VDI volumeVDI = cloudVDIcopy(snapshotVDI, primaryStorageSR);
volumeUUID = volumeVDI.getUuid(conn);
result = true;
} catch (XenAPIException e) {
details += " due to " + e.toString();
s_logger.warn(details, e);
} catch (Exception e) {
details += " due to " + e.getMessage();
s_logger.warn(details, e);
} finally {
// In all cases, if the temporary SR was created, forget it.
if (snapshotSR != null) {
removeSR(snapshotSR);
}
}
if (!result) {
// Is this logged at a higher level?
s_logger.error(details);
}
// In all cases return something.
return new CreateVolumeFromSnapshotAnswer(cmd, result, details, volumeUUID);
}
protected DeleteSnapshotBackupAnswer execute(final DeleteSnapshotBackupCommand cmd) {
Long dcId = cmd.getDataCenterId();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String backupUUID = cmd.getSnapshotUuid();
String details = null;
boolean success = false;
URI uri = null;
try {
uri = new URI(secondaryStoragePoolURL);
} catch (URISyntaxException e) {
details = "Error finding the secondary storage URL" + e.getMessage();
s_logger.error(details, e);
}
if (uri != null) {
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
if (secondaryStorageMountPath == null) {
details = "Couldn't delete snapshot because the URL passed: " + secondaryStoragePoolURL + " is invalid.";
} else {
details = deleteSnapshotBackup(dcId, accountId, volumeId, secondaryStorageMountPath, backupUUID);
success = (details != null && details.equals("1"));
if (success) {
s_logger.debug("Successfully deleted snapshot backup " + backupUUID);
}
}
}
return new DeleteSnapshotBackupAnswer(cmd, success, details);
}
protected Answer execute(DeleteSnapshotsDirCommand cmd) {
Long dcId = cmd.getDataCenterId();
Long accountId = cmd.getAccountId();
Long volumeId = cmd.getVolumeId();
String secondaryStoragePoolURL = cmd.getSecondaryStoragePoolURL();
String snapshotUUID = cmd.getSnapshotUuid();
String primaryStorageNameLabel = cmd.getPrimaryStoragePoolNameLabel();
String details = null;
boolean success = false;
SR primaryStorageSR = null;
try {
primaryStorageSR = getSRByNameLabelandHost(primaryStorageNameLabel);
if (primaryStorageSR == null) {
details = "Primary Storage SR could not be created from the name label: " + primaryStorageNameLabel;
}
} catch (XenAPIException e) {
details = "Couldn't determine primary SR type " + e.getMessage();
s_logger.error(details, e);
} catch (Exception e) {
details = "Couldn't determine primary SR type " + e.getMessage();
s_logger.error(details, e);
}
if (primaryStorageSR != null) {
if (snapshotUUID != null) {
VDI snapshotVDI = getVDIbyUuid(snapshotUUID);
if (snapshotVDI != null) {
destroyVDI(snapshotVDI);
}
}
}
URI uri = null;
try {
uri = new URI(secondaryStoragePoolURL);
} catch (URISyntaxException e) {
details = "Error finding the secondary storage URL" + e.getMessage();
s_logger.error(details, e);
}
if (uri != null) {
String secondaryStorageMountPath = uri.getHost() + ":" + uri.getPath();
if (secondaryStorageMountPath == null) {
details = "Couldn't delete snapshotsDir because the URL passed: " + secondaryStoragePoolURL + " is invalid.";
} else {
details = deleteSnapshotsDir(dcId, accountId, volumeId, secondaryStorageMountPath);
success = (details != null && details.equals("1"));
if (success) {
s_logger.debug("Successfully deleted snapshotsDir for volume: " + volumeId);
}
}
}
return new Answer(cmd, success, details);
}
protected VM getVM(Connection conn, String vmName) {
// Look up VMs with the specified name
Set<VM> vms;
try {
vms = VM.getByNameLabel(conn, vmName);
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to get " + vmName + ": " + e.toString(), e);
} catch (Exception e) {
throw new CloudRuntimeException("Unable to get " + vmName + ": " + e.getMessage(), e);
}
// If there are no VMs, throw an exception
if (vms.size() == 0)
throw new CloudRuntimeException("VM with name: " + vmName + " does not exist.");
// If there is more than one VM, print a warning
if (vms.size() > 1)
s_logger.warn("Found " + vms.size() + " VMs with name: " + vmName);
// Return the first VM in the set
return vms.iterator().next();
}
protected VDI getIsoVDIByURL(Connection conn, String vmName, String isoURL) {
SR isoSR = null;
String mountpoint = null;
if (isoURL.startsWith("xs-tools")) {
try {
Set<VDI> vdis = VDI.getByNameLabel(conn, isoURL);
if (vdis.isEmpty()) {
throw new CloudRuntimeException("Could not find ISO with URL: " + isoURL);
}
return vdis.iterator().next();
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to get pv iso: " + isoURL + " due to " + e.toString());
} catch (Exception e) {
throw new CloudRuntimeException("Unable to get pv iso: " + isoURL + " due to " + e.toString());
}
}
int index = isoURL.lastIndexOf("/");
mountpoint = isoURL.substring(0, index);
URI uri;
try {
uri = new URI(mountpoint);
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
throw new CloudRuntimeException("isoURL is wrong: " + isoURL);
}
isoSR = getISOSRbyVmName(vmName);
if (isoSR == null) {
isoSR = createIsoSRbyURI(uri, vmName, false);
}
String isoName = isoURL.substring(index + 1);
VDI isoVDI = getVDIbyLocationandSR(isoName, isoSR);
if (isoVDI != null) {
return isoVDI;
} else {
throw new CloudRuntimeException("Could not find ISO with URL: " + isoURL);
}
}
protected SR getStorageRepository(Connection conn, StorageFilerTO pool) {
Set<SR> srs;
try {
srs = SR.getByNameLabel(conn, pool.getUuid());
} catch (XenAPIException e) {
throw new CloudRuntimeException("Unable to get SR " + pool.getUuid() + " due to " + e.toString(), e);
} catch (Exception e) {
throw new CloudRuntimeException("Unable to get SR " + pool.getUuid() + " due to " + e.getMessage(), e);
}
if (srs.size() > 1) {
throw new CloudRuntimeException("More than one storage repository was found for pool with uuid: " + pool.getUuid());
} else if (srs.size() == 1) {
SR sr = srs.iterator().next();
if (s_logger.isDebugEnabled()) {
s_logger.debug("SR retrieved for " + pool.getId() + " is mapped to " + sr.toString());
}
if (checkSR(sr)) {
return sr;
}
throw new CloudRuntimeException("SR check failed for storage pool: " + pool.getUuid() + "on host:" + _host.uuid);
} else {
if (pool.getType() == StoragePoolType.NetworkFilesystem)
return getNfsSR(pool);
else if (pool.getType() == StoragePoolType.IscsiLUN)
return getIscsiSR(pool);
else
throw new CloudRuntimeException("The pool type: " + pool.getType().name() + " is not supported.");
}
}
protected Answer execute(final CheckConsoleProxyLoadCommand cmd) {
return executeProxyLoadScan(cmd, cmd.getProxyVmId(), cmd.getProxyVmName(), cmd.getProxyManagementIp(), cmd.getProxyCmdPort());
}
protected Answer execute(final WatchConsoleProxyLoadCommand cmd) {
return executeProxyLoadScan(cmd, cmd.getProxyVmId(), cmd.getProxyVmName(), cmd.getProxyManagementIp(), cmd.getProxyCmdPort());
}
protected Answer executeProxyLoadScan(final Command cmd, final long proxyVmId, final String proxyVmName, final String proxyManagementIp, final int cmdPort) {
String result = null;
final StringBuffer sb = new StringBuffer();
sb.append("http://").append(proxyManagementIp).append(":" + cmdPort).append("/cmd/getstatus");
boolean success = true;
try {
final URL url = new URL(sb.toString());
final URLConnection conn = url.openConnection();
// setting TIMEOUTs to avoid possible waiting until death situations
conn.setConnectTimeout(5000);
conn.setReadTimeout(5000);
final InputStream is = conn.getInputStream();
final BufferedReader reader = new BufferedReader(new InputStreamReader(is));
final StringBuilder sb2 = new StringBuilder();
String line = null;
try {
while ((line = reader.readLine()) != null)
sb2.append(line + "\n");
result = sb2.toString();
} catch (final IOException e) {
success = false;
} finally {
try {
is.close();
} catch (final IOException e) {
s_logger.warn("Exception when closing , console proxy address : " + proxyManagementIp);
success = false;
}
}
} catch (final IOException e) {
s_logger.warn("Unable to open console proxy command port url, console proxy address : " + proxyManagementIp);
success = false;
}
return new ConsoleProxyLoadAnswer(cmd, proxyVmId, proxyVmName, success, result);
}
protected boolean createSecondaryStorageFolder(String remoteMountPath, String newFolder) {
String result = callHostPlugin("vmopsSnapshot", "create_secondary_storage_folder", "remoteMountPath", remoteMountPath, "newFolder", newFolder);
return (result != null);
}
protected boolean deleteSecondaryStorageFolder(String remoteMountPath, String folder) {
String result = callHostPlugin("vmopsSnapshot", "delete_secondary_storage_folder", "remoteMountPath", remoteMountPath, "folder", folder);
return (result != null);
}
protected boolean postCreatePrivateTemplate(String tmpltSrUUID,String tmpltFilename, String templateName, String templateDescription, String checksum, long size, long virtualSize, long templateId) {
if (templateDescription == null) {
templateDescription = "";
}
if (checksum == null) {
checksum = "";
}
String result = callHostPluginWithTimeOut("vmopsSnapshot", "post_create_private_template", 110*60, "tmpltSrUUID", tmpltSrUUID, "templateFilename", tmpltFilename, "templateName", templateName, "templateDescription", templateDescription,
"checksum", checksum, "size", String.valueOf(size), "virtualSize", String.valueOf(virtualSize), "templateId", String.valueOf(templateId));
boolean success = false;
if (result != null && !result.isEmpty()) {
// Else, command threw an exception which has already been logged.
if (result.equalsIgnoreCase("1")) {
s_logger.debug("Successfully created template.properties file on secondary storage for " + tmpltFilename);
success = true;
} else {
s_logger.warn("Could not create template.properties file on secondary storage for " + tmpltFilename + " for templateId: " + templateId);
}
}
return success;
}
// Each argument is put in a separate line for readability.
// Using more lines does not harm the environment.
protected String backupSnapshot(String primaryStorageSRUuid, Long dcId, Long accountId, Long volumeId, String secondaryStorageMountPath,
String snapshotUuid, String prevSnapshotUuid, String prevBackupUuid, Boolean isISCSI) {
String backupSnapshotUuid = null;
if (prevSnapshotUuid == null) {
prevSnapshotUuid = "";
}
if (prevBackupUuid == null) {
prevBackupUuid = "";
}
// Each argument is put in a separate line for readability.
// Using more lines does not harm the environment.
String results = callHostPluginWithTimeOut("vmopsSnapshot", "backupSnapshot", 110*60, "primaryStorageSRUuid", primaryStorageSRUuid, "dcId", dcId.toString(), "accountId", accountId.toString(), "volumeId",
volumeId.toString(), "secondaryStorageMountPath", secondaryStorageMountPath, "snapshotUuid", snapshotUuid, "prevSnapshotUuid", prevSnapshotUuid, "prevBackupUuid",
prevBackupUuid, "isISCSI", isISCSI.toString());
if (results == null || results.isEmpty()) {
// errString is already logged.
return null;
}
String[] tmp = results.split("#");
String status = tmp[0];
backupSnapshotUuid = tmp[1];
// status == "1" if and only if backupSnapshotUuid != null
// So we don't rely on status value but return backupSnapshotUuid as an
// indicator of success.
String failureString = "Could not copy backupUuid: " + backupSnapshotUuid + " of volumeId: " + volumeId + " from primary storage " + primaryStorageSRUuid
+ " to secondary storage " + secondaryStorageMountPath;
if (status != null && status.equalsIgnoreCase("1") && backupSnapshotUuid != null) {
s_logger.debug("Successfully copied backupUuid: " + backupSnapshotUuid + " of volumeId: " + volumeId + " to secondary storage");
} else {
s_logger.debug(failureString + ". Failed with status: " + status);
return null;
}
return backupSnapshotUuid;
}
protected String getVhdParent(String primaryStorageSRUuid, String snapshotUuid, Boolean isISCSI) {
String parentUuid = callHostPlugin("vmopsSnapshot", "getVhdParent", "primaryStorageSRUuid", primaryStorageSRUuid,
"snapshotUuid", snapshotUuid, "isISCSI", isISCSI.toString());
if (parentUuid == null || parentUuid.isEmpty()) {
s_logger.debug("Unable to get parent of VHD " + snapshotUuid + " in SR " + primaryStorageSRUuid);
// errString is already logged.
return null;
}
return parentUuid;
}
protected boolean destroySnapshotOnPrimaryStorage(String snapshotUuid) {
// Precondition snapshotUuid != null
try {
Connection conn = getConnection();
VDI snapshot = getVDIbyUuid(snapshotUuid);
if (snapshot == null) {
throw new InternalErrorException("Could not destroy snapshot " + snapshotUuid + " because the snapshot VDI was null");
}
snapshot.destroy(conn);
s_logger.debug("Successfully destroyed snapshotUuid: " + snapshotUuid + " on primary storage");
return true;
} catch (XenAPIException e) {
String msg = "Destroy snapshotUuid: " + snapshotUuid + " on primary storage failed due to " + e.toString();
s_logger.error(msg, e);
} catch (Exception e) {
String msg = "Destroy snapshotUuid: " + snapshotUuid + " on primary storage failed due to " + e.getMessage();
s_logger.warn(msg, e);
}
return false;
}
protected String deleteSnapshotBackup(Long dcId, Long accountId, Long volumeId, String secondaryStorageMountPath, String backupUUID) {
// If anybody modifies the formatting below again, I'll skin them
String result = callHostPlugin("vmopsSnapshot", "deleteSnapshotBackup", "backupUUID", backupUUID, "dcId", dcId.toString(), "accountId", accountId.toString(),
"volumeId", volumeId.toString(), "secondaryStorageMountPath", secondaryStorageMountPath);
return result;
}
protected String deleteSnapshotsDir(Long dcId, Long accountId, Long volumeId, String secondaryStorageMountPath) {
// If anybody modifies the formatting below again, I'll skin them
String result = callHostPlugin("vmopsSnapshot", "deleteSnapshotsDir", "dcId", dcId.toString(), "accountId", accountId.toString(), "volumeId", volumeId.toString(),
"secondaryStorageMountPath", secondaryStorageMountPath);
return result;
}
@Override
public boolean start() {
return true;
}
@Override
public boolean stop() {
disconnected();
return true;
}
@Override
public String getName() {
return _name;
}
@Override
public IAgentControl getAgentControl() {
return _agentControl;
}
@Override
public void setAgentControl(IAgentControl agentControl) {
_agentControl = agentControl;
}
protected Answer execute(PoolEjectCommand cmd) {
Connection conn = getConnection();
String hostuuid = cmd.getHostuuid();
try {
Map<Host, Host.Record> hostrs = Host.getAllRecords(conn);
boolean found = false;
for( Host.Record hr : hostrs.values() ) {
if( hr.uuid.equals(hostuuid)) {
found = true;
}
}
if( ! found) {
s_logger.debug("host " + hostuuid + " has already been ejected from pool " + _host.pool);
return new Answer(cmd);
}
Host host = Host.getByUuid(conn, hostuuid);
// remove all tags cloud stack add before eject
Host.Record hr = host.getRecord(conn);
Iterator<String> it = hr.tags.iterator();
while (it.hasNext()) {
String tag = it.next();
if (tag.startsWith("vmops-version-")) {
it.remove();
}
}
// eject from pool
try {
Pool.eject(conn, host);
} catch (XenAPIException e) {
String msg = "Unable to eject host " + _host.uuid + " due to " + e.toString();
s_logger.warn(msg);
host.destroy(conn);
}
return new Answer(cmd);
} catch (XenAPIException e) {
String msg = "XenAPIException Unable to destroy host " + _host.uuid + " in xenserver database due to " + e.toString();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
} catch (Exception e) {
String msg = "Exception Unable to destroy host " + _host.uuid + " in xenserver database due to " + e.getMessage();
s_logger.warn(msg, e);
return new Answer(cmd, false, msg);
}
}
protected class Nic {
public Network n;
public Network.Record nr;
public PIF p;
public PIF.Record pr;
public Nic(Network n, Network.Record nr, PIF p, PIF.Record pr) {
this.n = n;
this.nr = nr;
this.p = p;
this.pr = pr;
}
}
// A list of UUIDs that are gathered from the XenServer when
// the resource first connects to XenServer. These UUIDs do
// not change over time.
protected class XenServerHost {
public String systemvmisouuid;
public String uuid;
public String ip;
public String publicNetwork;
public String privateNetwork;
public String linkLocalNetwork;
public String storageNetwork1;
public String storageNetwork2;
public String guestNetwork;
public String guestPif;
public String publicPif;
public String privatePif;
public String storagePif1;
public String storagePif2;
public String pool;
public int speed;
public int cpus;
}
/*Override by subclass*/
protected String getGuestOsType(String stdType) {
return stdType;
}
/*
protected boolean patchSystemVm(VDI vdi, String vmName, VirtualMachine.Type type) {
if (type == VirtualMachine.Type.DomainRouter) {
return patchSpecialVM(vdi, vmName, "router");
} else if (type == VirtualMachine.Type.ConsoleProxy) {
return patchSpecialVM(vdi, vmName, "consoleproxy");
} else if (type == VirtualMachine.Type.SecondaryStorageVm) {
return patchSpecialVM(vdi, vmName, "secstorage");
} else {
throw new CloudRuntimeException("Tried to patch unknown type of system vm");
}
}
protected boolean patchSystemVm(VDI vdi, String vmName) {
if (vmName.startsWith("r-")) {
return patchSpecialVM(vdi, vmName, "router");
} else if (vmName.startsWith("v-")) {
return patchSpecialVM(vdi, vmName, "consoleproxy");
} else if (vmName.startsWith("s-")) {
return patchSpecialVM(vdi, vmName, "secstorage");
} else {
throw new CloudRuntimeException("Tried to patch unknown type of system vm");
}
}
protected boolean patchSpecialVM(VDI vdi, String vmname, String vmtype) {
// patch special vm here, domr, domp
VBD vbd = null;
Connection conn = getConnection();
try {
Host host = Host.getByUuid(conn, _host.uuid);
Set<VM> vms = host.getResidentVMs(conn);
for (VM vm : vms) {
VM.Record vmrec = null;
try {
vmrec = vm.getRecord(conn);
} catch (Exception e) {
String msg = "VM.getRecord failed due to " + e.toString() + " " + e.getMessage();
s_logger.warn(msg);
continue;
}
if (vmrec.isControlDomain) {
VBD.Record vbdr = new VBD.Record();
vbdr.VM = vm;
vbdr.VDI = vdi;
vbdr.bootable = false;
vbdr.userdevice = getUnusedDeviceNum(vm);
vbdr.unpluggable = true;
vbdr.mode = Types.VbdMode.RW;
vbdr.type = Types.VbdType.DISK;
vbd = VBD.create(conn, vbdr);
vbd.plug(conn);
String device = vbd.getDevice(conn);
return patchspecialvm(vmname, device, vmtype);
}
}
} catch (XenAPIException e) {
String msg = "patchSpecialVM faile on " + _host.uuid + " due to " + e.toString();
s_logger.warn(msg, e);
} catch (Exception e) {
String msg = "patchSpecialVM faile on " + _host.uuid + " due to " + e.getMessage();
s_logger.warn(msg, e);
} finally {
if (vbd != null) {
try {
if (vbd.getCurrentlyAttached(conn)) {
vbd.unplug(conn);
}
vbd.destroy(conn);
} catch (XmlRpcException e) {
String msg = "Catch XmlRpcException due to " + e.getMessage();
s_logger.warn(msg, e);
} catch (XenAPIException e) {
String msg = "Catch XenAPIException due to " + e.toString();
s_logger.warn(msg, e);
}
}
}
return false;
}
protected boolean patchspecialvm(String vmname, String device, String vmtype) {
String result = callHostPlugin("vmops", "patchdomr", "vmname", vmname, "vmtype", vmtype, "device", "/dev/" + device);
if (result == null || result.isEmpty())
return false;
return true;
}
*/
}
| bug 6807: first ip is different from sourcenat
status 6807: resolved fixed
| core/src/com/cloud/hypervisor/xen/resource/CitrixResourceBase.java | bug 6807: first ip is different from sourcenat status 6807: resolved fixed |
|
Java | apache-2.0 | 0e6c2a928790cc89d135167f6ed68ae64dd20c2a | 0 | apache/karaf-cellar,apache/karaf-cellar | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.karaf.cellar.core;
import org.apache.karaf.cellar.core.event.EventType;
import org.osgi.service.cm.Configuration;
import org.osgi.service.cm.ConfigurationAdmin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Cellar generic support. This class provides a set of util methods used by other classes.
*/
public class CellarSupport {
protected static final transient Logger LOGGER = LoggerFactory.getLogger(CellarSupport.class);
protected ClusterManager clusterManager;
protected GroupManager groupManager;
protected ConfigurationAdmin configurationAdmin;
/**
* If the entry is not present in the list, add it. If the entry is present in the list, remove it.
*
* @param listType the comma separated list of resources.
* @param group the cluster group name.
* @param category the resource category name.
* @param entry the entry to switch.
*/
public void switchListEntry(String listType, String group, String category, EventType type, String entry) throws Exception {
if (group != null) {
Configuration configuration = configurationAdmin.getConfiguration(Configurations.GROUP, null);
Dictionary dictionary = configuration.getProperties();
if (dictionary == null) {
dictionary = new Properties();
}
String key = group + Configurations.SEPARATOR + category + Configurations.SEPARATOR + listType + Configurations.SEPARATOR + type.name().toLowerCase();
if (dictionary.get(key) != null) {
String value = dictionary.get(key).toString();
if (value.contains(entry)) {
value = value.replace(entry, "");
} else {
value = value + "," + entry;
}
if (value.startsWith(",")) value = value.substring(1);
if (value.endsWith(",")) value = value.substring(0, value.length() - 1);
value = value.replace("\n\n", "");
value = value.replace(",,", ",");
dictionary.put(key, value);
} else {
dictionary.put(key, entry);
}
configuration.update(dictionary);
}
}
/**
* Get a set of resources in the Cellar cluster groups configuration.
*
* @param listType the comma separated list of resources.
* @param group the cluster group name.
* @param category the resource category name.
* @param type the event type (inbound, outbound).
* @return the set of resources.
*/
public Set<String> getListEntries(String listType, String group, String category, EventType type) {
Set<String> result = null;
if (group != null) {
try {
Configuration configuration = configurationAdmin.getConfiguration(Configurations.GROUP, null);
Dictionary<String, Object> dictionary = configuration.getProperties();
if (dictionary != null) {
String parent = (String) dictionary.get(group + Configurations.SEPARATOR + Configurations.PARENT);
if (parent != null) {
result = getListEntries(listType, parent, category, type);
}
String propertyName = group + Configurations.SEPARATOR + category + Configurations.SEPARATOR + listType + Configurations.SEPARATOR + type.name().toLowerCase();
String propertyValue = (String) dictionary.get(propertyName);
if (propertyValue != null) {
propertyValue = propertyValue.replaceAll("\n","");
String[] itemList = propertyValue.split(Configurations.DELIMETER);
if (itemList != null && itemList.length > 0) {
if (result == null) {
result = new HashSet<String>();
}
for (String item : itemList) {
if (item != null) {
result.add(item.trim());
}
}
}
}
}
} catch (IOException e) {
LOGGER.error("Error looking up for clustering group configuration cfg");
}
}
return result;
}
/**
* Get the resources in the Cellar cluster groups configuration.
*
* @param listType the comma separated string of resources.
* @param groups the cluster groups names.
* @param category the resource category name.
* @param type the event type (inbound, outbound).
* @return the set of resources.
*/
public Set<String> getListEntries(String listType, Collection<String> groups, String category, EventType type) {
Set<String> result = null;
if (groups != null && !groups.isEmpty()) {
for (String group : groups) {
Set<String> items = getListEntries(listType, group, category, type);
if (items != null && !items.isEmpty()) {
if (result == null)
result = new HashSet<String>();
result.addAll(items);
}
}
}
return result;
}
/**
* Get a set of resources in the Cellar cluster groups configuration.
*
* @param listType a comma separated string of resources.
* @param group the cluster group.
* @param category the resource category name.
* @param type the event type (inbound, outbound).
* @return the set of resources.
*/
public Set<String> getListEntries(String listType, Group group, String category, EventType type) {
Set<String> result = null;
if (group != null) {
String groupName = group.getName();
Set<String> items = getListEntries(listType, groupName, category, type);
if (items != null && !items.isEmpty()) {
if (result == null)
result = new HashSet<String>();
result.addAll(items);
}
}
return result;
}
/**
* Check if a resource is allowed for a type of cluster event.
*
* @param group the cluster group.
* @param category the resource category name.
* @param event the resource name.
* @param type the event type (inbound, outbound).
*/
public Boolean isAllowed(Group group, String category, String event, EventType type) {
Set<String> whiteList = getListEntries(Configurations.WHITELIST, group, category, type);
Set<String> blackList = getListEntries(Configurations.BLACKLIST, group, category, type);
if (blackList == null || whiteList == null) {
// If one list is missing, we probably have a configuration issue - do not synchronize anything
LOGGER.warn("No whitelist/blacklist found for " + group.getName() + ", check your configuration !");
return false;
}
// if no white listed items we assume all are accepted.
Boolean result = true;
if (!whiteList.isEmpty()) {
result = false;
for (String whiteListItem : whiteList) {
if (wildCardMatch(event, whiteListItem)) {
result = true;
break;
}
}
}
if (result) {
// we passed whitelist, now check the blacklist
// if any blackList item matched, then false is returned.
for (String blackListItem : blackList) {
if (wildCardMatch(event, blackListItem)) {
return false;
}
}
}
return result;
}
/**
* Check if a string match a regex.
*
* @param item the string to check.
* @param pattern the regex pattern.
* @return true if the item string matches the pattern, false else.
*/
protected boolean wildCardMatch(String item, String pattern) {
if (item == null || pattern == null) {
return false;
}
// update the pattern to have a valid regex pattern
pattern = pattern.replace("*", ".*");
// use the regex
Pattern p = Pattern.compile(pattern);
Matcher m = p.matcher(item);
return m.matches();
}
public ConfigurationAdmin getConfigurationAdmin() {
return configurationAdmin;
}
public void setConfigurationAdmin(ConfigurationAdmin configurationAdmin) {
this.configurationAdmin = configurationAdmin;
}
public ClusterManager getClusterManager() {
return clusterManager;
}
public void setClusterManager(ClusterManager clusterManager) {
this.clusterManager = clusterManager;
}
public GroupManager getGroupManager() {
return groupManager;
}
public void setGroupManager(GroupManager groupManager) {
this.groupManager = groupManager;
}
}
| core/src/main/java/org/apache/karaf/cellar/core/CellarSupport.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.karaf.cellar.core;
import org.apache.karaf.cellar.core.event.EventType;
import org.osgi.service.cm.Configuration;
import org.osgi.service.cm.ConfigurationAdmin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Cellar generic support. This class provides a set of util methods used by other classes.
*/
public class CellarSupport {
protected static final transient Logger LOGGER = LoggerFactory.getLogger(CellarSupport.class);
protected ClusterManager clusterManager;
protected GroupManager groupManager;
protected ConfigurationAdmin configurationAdmin;
/**
* If the entry is not present in the list, add it. If the entry is present in the list, remove it.
*
* @param listType the comma separated list of resources.
* @param group the cluster group name.
* @param category the resource category name.
* @param entry the entry to switch.
*/
public void switchListEntry(String listType, String group, String category, EventType type, String entry) throws Exception {
if (group != null) {
Configuration configuration = configurationAdmin.getConfiguration(Configurations.GROUP, null);
Dictionary dictionary = configuration.getProperties();
if (dictionary == null) {
dictionary = new Properties();
}
String key = group + Configurations.SEPARATOR + category + Configurations.SEPARATOR + listType + Configurations.SEPARATOR + type.name().toLowerCase();
if (dictionary.get(key) != null) {
String value = dictionary.get(key).toString();
if (value.contains(entry)) {
value = value.replace(entry, "");
} else {
value = value + "," + entry;
}
if (value.startsWith(",")) value = value.substring(1);
if (value.endsWith(",")) value = value.substring(0, value.length() - 1);
value = value.replace("\n\n", "");
value = value.replace(",,", ",");
dictionary.put(key, value);
} else {
dictionary.put(key, entry);
}
configuration.update(dictionary);
}
}
/**
* Get a set of resources in the Cellar cluster groups configuration.
*
* @param listType the comma separated list of resources.
* @param group the cluster group name.
* @param category the resource category name.
* @param type the event type (inbound, outbound).
* @return the set of resources.
*/
public Set<String> getListEntries(String listType, String group, String category, EventType type) {
Set<String> result = null;
if (group != null) {
try {
Configuration configuration = configurationAdmin.getConfiguration(Configurations.GROUP, null);
Dictionary<String, Object> dictionary = configuration.getProperties();
if (dictionary != null) {
String parent = (String) dictionary.get(group + Configurations.SEPARATOR + Configurations.PARENT);
if (parent != null) {
result = getListEntries(listType, parent, category, type);
}
String propertyName = group + Configurations.SEPARATOR + category + Configurations.SEPARATOR + listType + Configurations.SEPARATOR + type.name().toLowerCase();
String propertyValue = (String) dictionary.get(propertyName);
if (propertyValue != null) {
propertyValue = propertyValue.replaceAll("\n","");
String[] itemList = propertyValue.split(Configurations.DELIMETER);
if (itemList != null && itemList.length > 0) {
if (result == null) {
result = new HashSet<String>();
}
for (String item : itemList) {
if (item != null) {
result.add(item.trim());
}
}
}
}
}
} catch (IOException e) {
LOGGER.error("Error looking up for clustering group configuration cfg");
}
}
return result;
}
/**
* Get the resources in the Cellar cluster groups configuration.
*
* @param listType the comma separated string of resources.
* @param groups the cluster groups names.
* @param category the resource category name.
* @param type the event type (inbound, outbound).
* @return the set of resources.
*/
public Set<String> getListEntries(String listType, Collection<String> groups, String category, EventType type) {
Set<String> result = null;
if (groups != null && !groups.isEmpty()) {
for (String group : groups) {
Set<String> items = getListEntries(listType, group, category, type);
if (items != null && !items.isEmpty()) {
if (result == null)
result = new HashSet<String>();
result.addAll(items);
}
}
}
return result;
}
/**
* Get a set of resources in the Cellar cluster groups configuration.
*
* @param listType a comma separated string of resources.
* @param group the cluster group.
* @param category the resource category name.
* @param type the event type (inbound, outbound).
* @return the set of resources.
*/
public Set<String> getListEntries(String listType, Group group, String category, EventType type) {
Set<String> result = null;
if (group != null) {
String groupName = group.getName();
Set<String> items = getListEntries(listType, groupName, category, type);
if (items != null && !items.isEmpty()) {
if (result == null)
result = new HashSet<String>();
result.addAll(items);
}
}
return result;
}
/**
* Check if a resource is allowed for a type of cluster event.
*
* @param group the cluster group.
* @param category the resource category name.
* @param event the resource name.
* @param type the event type (inbound, outbound).
*/
public Boolean isAllowed(Group group, String category, String event, EventType type) {
Boolean result = true;
Set<String> whiteList = getListEntries(Configurations.WHITELIST, group, category, type);
Set<String> blackList = getListEntries(Configurations.BLACKLIST, group, category, type);
if (blackList == null || whiteList == null) {
// If one list is missing, we probably have a configuration issue - do not synchronize anything
LOGGER.warn("No whitelist/blacklist found for " + group.getName() + ", check your configuration !");
return false;
}
// if no white listed items we assume all are accepted.
if (!whiteList.isEmpty()) {
result = false;
for (String whiteListItem : whiteList) {
if (wildCardMatch(event, whiteListItem))
result = true;
}
}
if (result) {
// if any blackList item matched, then false is returned.
if (!blackList.isEmpty()) {
for (String blackListItem : blackList) {
if (wildCardMatch(event, blackListItem)) {
return false;
}
}
}
}
return result;
}
/**
* Check if a string match a regex.
*
* @param item the string to check.
* @param pattern the regex pattern.
* @return true if the item string matches the pattern, false else.
*/
protected boolean wildCardMatch(String item, String pattern) {
if (item == null || pattern == null) {
return false;
}
// update the pattern to have a valid regex pattern
pattern = pattern.replace("*", ".*");
// use the regex
Pattern p = Pattern.compile(pattern);
Matcher m = p.matcher(item);
return m.matches();
}
public ConfigurationAdmin getConfigurationAdmin() {
return configurationAdmin;
}
public void setConfigurationAdmin(ConfigurationAdmin configurationAdmin) {
this.configurationAdmin = configurationAdmin;
}
public ClusterManager getClusterManager() {
return clusterManager;
}
public void setClusterManager(ClusterManager clusterManager) {
this.clusterManager = clusterManager;
}
public GroupManager getGroupManager() {
return groupManager;
}
public void setGroupManager(GroupManager groupManager) {
this.groupManager = groupManager;
}
}
| Slightly optimized code for group configuration
management
| core/src/main/java/org/apache/karaf/cellar/core/CellarSupport.java | Slightly optimized code for group configuration management |
|
Java | apache-2.0 | c084b9b8504c1848b061ff8d2e35606bcb0d55e3 | 0 | danberindei/JGroups,deepnarsay/JGroups,vjuranek/JGroups,Sanne/JGroups,rpelisse/JGroups,ibrahimshbat/JGroups,ibrahimshbat/JGroups,ibrahimshbat/JGroups,TarantulaTechnology/JGroups,deepnarsay/JGroups,kedzie/JGroups,slaskawi/JGroups,belaban/JGroups,pruivo/JGroups,danberindei/JGroups,pruivo/JGroups,ibrahimshbat/JGroups,slaskawi/JGroups,TarantulaTechnology/JGroups,kedzie/JGroups,Sanne/JGroups,rpelisse/JGroups,rhusar/JGroups,kedzie/JGroups,ligzy/JGroups,vjuranek/JGroups,vjuranek/JGroups,belaban/JGroups,deepnarsay/JGroups,dimbleby/JGroups,TarantulaTechnology/JGroups,rvansa/JGroups,slaskawi/JGroups,Sanne/JGroups,pferraro/JGroups,danberindei/JGroups,tristantarrant/JGroups,pferraro/JGroups,ligzy/JGroups,rhusar/JGroups,pferraro/JGroups,pruivo/JGroups,ligzy/JGroups,dimbleby/JGroups,rvansa/JGroups,rhusar/JGroups,rpelisse/JGroups,belaban/JGroups,dimbleby/JGroups,tristantarrant/JGroups | package org.jgroups.protocols;
import org.jgroups.*;
import org.jgroups.annotations.*;
import org.jgroups.conf.PropertyConverters;
import org.jgroups.stack.IpAddress;
import org.jgroups.stack.Protocol;
import org.jgroups.stack.ProtocolStack;
import org.jgroups.util.*;
import org.jgroups.util.Queue;
import org.jgroups.util.ThreadFactory;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.*;
import java.text.NumberFormat;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.*;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.Lock;
/**
* Generic transport - specific implementations should extend this abstract class.
* Features which are provided to the subclasses include
* <ul>
* <li>version checking
* <li>marshalling and unmarshalling
* <li>message bundling (handling single messages, and message lists)
* <li>incoming packet handler
* <li>loopback
* </ul>
* A subclass has to override
* <ul>
* <li>{@link #sendToAllMembers(byte[], int, int)}
* <li>{@link #sendToSingleMember(org.jgroups.Address, byte[], int, int)}
* <li>{@link #init()}
* <li>{@link #start()}: subclasses <em>must</em> call super.start() <em>after</em> they initialize themselves
* (e.g., created their sockets).
* <li>{@link #stop()}: subclasses <em>must</em> call super.stop() after they deinitialized themselves
* <li>{@link #destroy()}
* </ul>
* The create() or start() method has to create a local address.<br>
* The {@link #receive(Address, Address, byte[], int, int)} method must
* be called by subclasses when a unicast or multicast message has been received.
* @author staBela Ban
* @version $Id: TP.java,v 1.228 2008/09/25 13:15:06 belaban Exp $
*/
@MBean(description="Transport protocol")
@DeprecatedProperty(names={"bind_to_all_interfaces", "use_outgoing_packet_handler"})
public abstract class TP extends Protocol {
private static final byte LIST=1; // we have a list of messages rather than a single message when set
private static final byte MULTICAST=2; // message is a multicast (versus a unicast) message when set
private static final byte OOB=4; // message has OOB flag set (Message.OOB)
private static NumberFormat f;
private static final int INITIAL_BUFSIZE=4095;
static {
f=NumberFormat.getNumberInstance();
f.setGroupingUsed(false);
f.setMaximumFractionDigits(2);
}
private ExposedByteArrayOutputStream out_stream=null;
private ExposedDataOutputStream dos=null;
private final Lock out_stream_lock=new ReentrantLock();
/* ------------------------------------------ JMX and Properties ------------------------------------------ */
@ManagedAttribute
@Property(converter=PropertyConverters.BindAddress.class,
description="The interface (NIC) which should be used by this transport ")
protected InetAddress bind_addr=null;
@Property(description="Ignores all bind address parameters and let's the OS return the local host address. Default is false")
protected boolean use_local_host=false;
@ManagedAttribute
@Property(description=" If true, the transport should use all available interfaces to receive multicast messages. Default is false")
protected boolean receive_on_all_interfaces=false;
/**
* List<NetworkInterface> of interfaces to receive multicasts on. The
* multicast receive socket will listen on all of these interfaces. This is
* a comma-separated list of IP addresses or interface names. E.g.
* "192.168.5.1,eth1,127.0.0.1". Duplicates are discarded; we only bind to
* an interface once. If this property is set, it override
* receive_on_all_interfaces.
*/
@ManagedAttribute
@Property(converter=PropertyConverters.NetworkInterfaceList.class,
description="Comma delimited list of interfaces (IP addresses or interface names) to receive multicasts on")
protected List<NetworkInterface> receive_interfaces=null;
/**
* If true, the transport should use all available interfaces to send
* multicast messages. This means the same multicast message is sent N
* times, so use with care
*/
@Property(description="If true, the transport should use all available interfaces to send multicast messages. Default is false",
deprecatedMessage="This property is deprecated. Use IP bonding or something similar")
protected boolean send_on_all_interfaces=false;
/**
* List<NetworkInterface> of interfaces to send multicasts on. The
* multicast send socket will send the same multicast message on all of
* these interfaces. This is a comma-separated list of IP addresses or
* interface names. E.g. "192.168.5.1,eth1,127.0.0.1". Duplicates are
* discarded. If this property is set, it override send_on_all_interfaces.
*/
@Property(converter=PropertyConverters.NetworkInterfaceList.class,
description="Comma delimited list of interfaces (IP addresses or interface names) to send multicasts on",
deprecatedMessage="This property is deprecated. Use IP bonding or something similar")
protected List<NetworkInterface> send_interfaces=null;
/**
* The port to which the transport binds. 0 means to bind to any (ephemeral)
* port
*/
@Property(name="start_port", deprecatedMessage="start_port is deprecated; use bind_port instead",
description="The port to which the transport binds. Default of 0 binds to any (ephemeral) port")
protected int bind_port=0;
@Property(name="end_port", deprecatedMessage="end_port is deprecated; use port_range instead")
protected int port_range=1; // 27-6-2003 bgooren, Only try one port by default
@Property(description="TODO")
protected boolean prevent_port_reuse=false;
/**
* If true, messages sent to self are treated specially: unicast messages
* are looped back immediately, multicast messages get a local copy first
* and - when the real copy arrives - it will be discarded. Useful for
* Window media (non)sense
*/
@ManagedAttribute(description="", writable=true)
@Property(description="Messages to self are looped back immediatelly if true. Default is false")
protected boolean loopback=false;
/**
* Discard packets with a different version. Usually minor version
* differences are okay. Setting this property to true means that we expect
* the exact same version on all incoming packets
*/
@ManagedAttribute(description="Discard packets with a different version", writable=true)
@Property(description="Discard packets with a different version if true. Default is false")
protected boolean discard_incompatible_packets=false;
/**
* Sometimes receivers are overloaded (they have to handle de-serialization
* etc). Packet handler is a separate thread taking care of
* de-serialization, receiver thread(s) simply put packet in queue and
* return immediately. Setting this to true adds one more thread
*/
@ManagedAttribute(description="Should additional thread be used for message deserialization", writable=true)
@Property(name="use_packet_handler",
deprecatedMessage="'use_packet_handler' is deprecated; use 'use_incoming_packet_handler' instead",
description="Should additional thread be used for message deserialization. Default is true")
protected boolean use_incoming_packet_handler=true;
@Property(description="Should concurrent stack with thread pools be used to deliver messages up the stack. Default is true")
protected boolean use_concurrent_stack=true;
@Property(description="Thread naming pattern for threads in this channel. Default is cl")
protected String thread_naming_pattern="cl";
@Property(name="oob_thread_pool.enabled",description="Switch for enabling thread pool for OOB messages. Default true")
protected boolean oob_thread_pool_enabled=true;
@ManagedAttribute(description="Minimum thread pool size for OOB messages. Default is 2")
@Property(name="oob_thread_pool.min_threads")
protected int oob_thread_pool_min_threads=2;
@ManagedAttribute(description="Maximum thread pool size for OOB messages. Default is 10")
@Property(name="oob_thread_pool.max_threads")
protected int oob_thread_pool_max_threads=10;
@ManagedAttribute(description="Timeout in milliseconds to remove idle thread from OOB pool. Default is 30000")
@Property(name="oob_thread_pool.keep_alive_time")
protected long oob_thread_pool_keep_alive_time=30000;
@ManagedAttribute(description="Use queue to enqueue incoming OOB messages. Default is true")
@Property(name="oob_thread_pool.queue_enabled",
description="Use queue to enqueue incoming OOB messages. Default is true")
protected boolean oob_thread_pool_queue_enabled=true;
@ManagedAttribute(description="Maximum queue size for incoming OOB messages. Default is 500")
@Property(name="oob_thread_pool.queue_max_size")
protected int oob_thread_pool_queue_max_size=500;
@ManagedAttribute
@Property(name="oob_thread_pool.rejection_policy",
description="Thread rejection policy. Possible values are Abort, Discard, DiscardOldest and Run. Default is Run")
String oob_thread_pool_rejection_policy="Run";
@ManagedAttribute(description="Minimum thread pool size for regular messages. Default is 2")
@Property(name="thread_pool.min_threads")
protected int thread_pool_min_threads=2;
@ManagedAttribute(description="Maximum thread pool size for regular messages. Default is 10")
@Property(name="thread_pool.max_threads")
protected int thread_pool_max_threads=10;
@ManagedAttribute(description="Timeout in milliseconds to remove idle thread from regular pool. Default is 30000")
@Property(name="thread_pool.keep_alive_time")
protected long thread_pool_keep_alive_time=30000;
@ManagedAttribute(description="Switch for enabling thread pool for regular messages. Default true")
@Property(name="thread_pool.enabled")
protected boolean thread_pool_enabled=true;
@ManagedAttribute(description="Use queue to enqueue incoming regular messages")
@Property(name="thread_pool.queue_enabled",
description="Use queue to enqueue incoming regular messages. Default is true")
protected boolean thread_pool_queue_enabled=true;
@ManagedAttribute(description="Maximum queue size for incoming OOB messages")
@Property(name="thread_pool.queue_max_size",
description="Maximum queue size for incoming OOB messages. Default is 500")
protected int thread_pool_queue_max_size=500;
@ManagedAttribute
@Property(name="thread_pool.rejection_policy",
description="Thread rejection policy. Possible values are Abort, Discard, DiscardOldest and Run Default is Run")
protected String thread_pool_rejection_policy="Run";
@ManagedAttribute(description="Number of threads to be used by the timer thread pool")
@Property(name="timer.num_threads",description="Number of threads to be used by the timer thread pool. Default is 4")
protected int num_timer_threads=4;
@ManagedAttribute(description="Enable bundling of smaller messages into bigger ones", writable=true)
@Property(description="Enable bundling of smaller messages into bigger ones. Default is false")
protected boolean enable_bundling=false;
/** Enable bundling for unicast messages. Ignored if enable_bundling is off */
@Property(description="Enable bundling of smaller messages into bigger ones for unicast messages. Default is true")
protected boolean enable_unicast_bundling=true;
@Property(description="Switch to enbale diagnostic probing. Default is true")
protected boolean enable_diagnostics=true;
@Property(description="Address for diagnostic probing. Default is 224.0.75.75")
protected String diagnostics_addr="224.0.75.75";
@Property(description="Port for diagnostic probing. Default is 7500")
protected int diagnostics_port=7500;
@Property(description="If assigned enable this transport to be a singleton (shared) transport")
protected String singleton_name=null;
@Property(description="Path to a file to store currently used ports on this machine.")
protected String persistent_ports_file=null;
@Property(name="ports_expiry_time",description="Timeout to expire ports used with PortManager. Default is 30000 msec")
protected long pm_expiry_time=30000L;
@Property(description="Switch to enable tracking of currently used ports on this machine. Default is false")
protected boolean persistent_ports=false;
/**
* Maximum number of bytes for messages to be queued until they are sent.
* This value needs to be smaller than the largest datagram packet size in
* case of UDP
*/
protected int max_bundle_size=64000;
/**
* Max number of milliseconds until queued messages are sent. Messages are
* sent when max_bundle_size or max_bundle_timeout has been exceeded
* (whichever occurs faster)
*/
protected long max_bundle_timeout=20;
/* --------------------------------------------- JMX ---------------------------------------------- */
@ManagedAttribute
protected long num_msgs_sent=0;
@ManagedAttribute
protected long num_msgs_received=0;
@ManagedAttribute
protected long num_bytes_sent=0;
@ManagedAttribute
protected long num_bytes_received=0;
/** The name of the group to which this member is connected */
@ManagedAttribute
protected String channel_name=null;
/**
* whether or not warnings about messages from different groups are logged -
* private flag, not for common use
*/
@ManagedAttribute(writable=true, description="whether or not warnings about messages from different groups are logged")
private boolean log_discard_msgs=true;
@ManagedAttribute
protected long num_oob_msgs_received=0;
@ManagedAttribute
protected long num_incoming_msgs_received=0;
/* --------------------------------------------- Fields ------------------------------------------------------ */
/** The address (host and port) of this member */
protected Address local_addr=null;
/** The members of this group (updated when a member joins or leaves) */
protected final HashSet<Address> members=new HashSet<Address>(11);
protected View view=null;
protected final ExposedByteArrayInputStream in_stream=new ExposedByteArrayInputStream(new byte[] { '0' });
protected final DataInputStream dis=new DataInputStream(in_stream);
/** Used by packet handler to store incoming DatagramPackets */
protected Queue incoming_packet_queue=null;
/**
* Dequeues DatagramPackets from packet_queue, unmarshalls them and calls
* <tt>handleIncomingUdpPacket()</tt>
*/
protected IncomingPacketHandler incoming_packet_handler=null;
/** Used by packet handler to store incoming Messages */
protected Queue incoming_msg_queue=null;
protected IncomingMessageHandler incoming_msg_handler;
protected ThreadGroup pool_thread_group=new ThreadGroup(Util.getGlobalThreadGroup(), "Thread Pools");
/**
* Keeps track of connects and disconnects, in order to start and stop
* threads
*/
protected int connect_count=0;
/**
* ================================== OOB thread pool ========================
*/
protected Executor oob_thread_pool;
/** Factory which is used by oob_thread_pool */
protected ThreadFactory oob_thread_factory=null;
/**
* Used if oob_thread_pool is a ThreadPoolExecutor and
* oob_thread_pool_queue_enabled is true
*/
protected BlockingQueue<Runnable> oob_thread_pool_queue=null;
/**
* ================================== Regular thread pool =======================
*/
/**
* The thread pool which handles unmarshalling, version checks and
* dispatching of regular messages
*/
protected Executor thread_pool;
/** Factory which is used by oob_thread_pool */
protected ThreadFactory default_thread_factory=null;
/**
* Used if thread_pool is a ThreadPoolExecutor and thread_pool_queue_enabled
* is true
*/
protected BlockingQueue<Runnable> thread_pool_queue=null;
/**
* ================================== Timer thread pool =========================
*/
protected TimeScheduler timer=null;
protected ThreadFactory timer_thread_factory;
/**
* =================================Default thread factory ========================
*/
/** Used by all threads created by JGroups outside of the thread pools */
protected ThreadFactory global_thread_factory=null;
/**
* If set it will be added to <tt>local_addr</tt>. Used to implement for
* example transport independent addresses
*/
protected byte[] additional_data=null;
private Bundler bundler=null;
private DiagnosticsHandler diag_handler=null;
/**
* If singleton_name is enabled, this map is used to de-multiplex incoming
* messages according to their cluster names (attached to the message by the
* transport anyway). The values are the next protocols above the
* transports.
*/
private final ConcurrentMap<String,Protocol> up_prots=new ConcurrentHashMap<String,Protocol>();
protected TpHeader header;
protected final String name=getName();
protected PortsManager pm=null;
/**
* Creates the TP protocol, and initializes the state variables, does
* however not start any sockets or threads.
*/
protected TP() {
}
/**
* debug only
*/
public String toString() {
return local_addr != null? name + "(local address: " + local_addr + ')' : name;
}
public void resetStats() {
num_msgs_sent=num_msgs_received=num_bytes_sent=num_bytes_received=0;
num_oob_msgs_received=num_incoming_msgs_received=0;
}
public void setThreadPoolQueueEnabled(boolean flag) {thread_pool_queue_enabled=flag;}
public Executor getDefaultThreadPool() {
return thread_pool;
}
public void setDefaultThreadPool(Executor thread_pool) {
if(this.thread_pool != null)
shutdownThreadPool(this.thread_pool);
this.thread_pool=thread_pool;
}
public ThreadFactory getDefaultThreadPoolThreadFactory() {
return default_thread_factory;
}
public void setDefaultThreadPoolThreadFactory(ThreadFactory factory) {
default_thread_factory=factory;
if(thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)thread_pool).setThreadFactory(factory);
}
public Executor getOOBThreadPool() {
return oob_thread_pool;
}
public void setOOBThreadPool(Executor oob_thread_pool) {
if(this.oob_thread_pool != null) {
shutdownThreadPool(this.oob_thread_pool);
}
this.oob_thread_pool=oob_thread_pool;
}
public ThreadFactory getOOBThreadPoolThreadFactory() {
return oob_thread_factory;
}
public void setOOBThreadPoolThreadFactory(ThreadFactory factory) {
oob_thread_factory=factory;
if(oob_thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)oob_thread_pool).setThreadFactory(factory);
}
public ThreadFactory getTimerThreadFactory() {
return timer_thread_factory;
}
public void setTimerThreadFactory(ThreadFactory factory) {
timer_thread_factory=factory;
timer.setThreadFactory(factory);
}
public TimeScheduler getTimer() {return timer;}
public ThreadFactory getThreadFactory() {
return global_thread_factory;
}
public void setThreadFactory(ThreadFactory factory) {
global_thread_factory=factory;
}
/**
* Names the current thread. Valid values are "pcl":
* p: include the previous (original) name, e.g. "Incoming thread-1", "UDP ucast receiver"
* c: include the cluster name, e.g. "MyCluster"
* l: include the local address of the current member, e.g. "192.168.5.1:5678"
*/
public String getThreadNamingPattern() {return thread_naming_pattern;}
public long getNumMessagesSent() {return num_msgs_sent;}
public long getNumMessagesReceived() {return num_msgs_received;}
public long getNumBytesSent() {return num_bytes_sent;}
public long getNumBytesReceived() {return num_bytes_received;}
public String getBindAddress() {return bind_addr != null? bind_addr.toString() : "null";}
public void setBindAddress(String bind_addr) throws UnknownHostException {
this.bind_addr=InetAddress.getByName(bind_addr);
}
public InetAddress getBindAddressAsInetAddress() {return bind_addr;}
public int getBindPort() {return bind_port;}
public void setBindPort(int port) {this.bind_port=port;}
/** @deprecated Use {@link #isReceiveOnAllInterfaces()} instead */
public boolean getBindToAllInterfaces() {return receive_on_all_interfaces;}
public void setBindToAllInterfaces(boolean flag) {this.receive_on_all_interfaces=flag;}
public boolean isReceiveOnAllInterfaces() {return receive_on_all_interfaces;}
public List<NetworkInterface> getReceiveInterfaces() {return receive_interfaces;}
public boolean isSendOnAllInterfaces() {return send_on_all_interfaces;}
public List<NetworkInterface> getSendInterfaces() {return send_interfaces;}
public boolean isDiscardIncompatiblePackets() {return discard_incompatible_packets;}
public void setDiscardIncompatiblePackets(boolean flag) {discard_incompatible_packets=flag;}
public boolean isEnableBundling() {return enable_bundling;}
public void setEnableBundling(boolean flag) {enable_bundling=flag;}
public boolean isEnableUnicastBundling() {return enable_unicast_bundling;}
public void setEnableUnicastBundling(boolean enable_unicast_bundling) {this.enable_unicast_bundling=enable_unicast_bundling;}
public void setPortRange(int range) {this.port_range=range;}
public void setUseConcurrentStack(boolean flag) {use_concurrent_stack=flag;}
@ManagedAttribute
public String getLocalAddressAsString() {return local_addr != null? local_addr.toString() : "n/a";}
@ManagedAttribute
public boolean isOOBThreadPoolEnabled() { return oob_thread_pool_enabled; }
@ManagedAttribute
public boolean isDefaulThreadPoolEnabled() { return thread_pool_enabled; }
@ManagedAttribute(description="Maximum number of bytes for messages to be queued until they are sent")
public int getMaxBundleSize() {return max_bundle_size;}
@ManagedAttribute(description="Maximum number of bytes for messages to be queued until they are sent",writable=true)
@Property(name="max_bundle_size")
public void setMaxBundleSize(int size) {
if(size <= 0) {
throw new IllegalArgumentException("max_bundle_size (" + size + ") is <= 0");
}
max_bundle_size=size;
}
@ManagedAttribute(description="Max number of milliseconds until queued messages are sent")
public long getMaxBundleTimeout() {return max_bundle_timeout;}
@ManagedAttribute(description="Max number of milliseconds until queued messages are sent", writable=true)
@Property(name="max_bundle_timeout")
public void setMaxBundleTimeout(long timeout) {
if(timeout <= 0) {
throw new IllegalArgumentException("max_bundle_timeout of " + timeout + " is invalid");
}
max_bundle_timeout=timeout;
}
public Address getLocalAddress() {return local_addr;}
public String getChannelName() {return channel_name;}
public boolean isLoopback() {return loopback;}
public void setLoopback(boolean b) {loopback=b;}
public boolean isUseIncomingPacketHandler() {return use_incoming_packet_handler;}
public ConcurrentMap<String,Protocol> getUpProtocols() {
return up_prots;
}
@ManagedAttribute
public int getOOBMinPoolSize() {
return oob_thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)oob_thread_pool).getCorePoolSize() : 0;
}
@ManagedAttribute
public void setOOBMinPoolSize(int size) {
if(oob_thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)oob_thread_pool).setCorePoolSize(size);
}
@ManagedAttribute
public int getOOBMaxPoolSize() {
return oob_thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)oob_thread_pool).getMaximumPoolSize() : 0;
}
@ManagedAttribute
public void setOOBMaxPoolSize(int size) {
if(oob_thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)oob_thread_pool).setMaximumPoolSize(size);
}
@ManagedAttribute
public int getOOBPoolSize() {
return oob_thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)oob_thread_pool).getPoolSize() : 0;
}
@ManagedAttribute
public long getOOBKeepAliveTime() {
return oob_thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)oob_thread_pool).getKeepAliveTime(TimeUnit.MILLISECONDS) : 0;
}
@ManagedAttribute
public void setOOBKeepAliveTime(long time) {
if(oob_thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)oob_thread_pool).setKeepAliveTime(time, TimeUnit.MILLISECONDS);
}
public long getOOBMessages() {
return num_oob_msgs_received;
}
@ManagedAttribute
public int getOOBQueueSize() {
return oob_thread_pool_queue.size();
}
public int getOOBMaxQueueSize() {
return oob_thread_pool_queue_max_size;
}
@ManagedAttribute
public int getIncomingMinPoolSize() {
return thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)thread_pool).getCorePoolSize() : 0;
}
@ManagedAttribute
public void setIncomingMinPoolSize(int size) {
if(thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)thread_pool).setCorePoolSize(size);
}
@ManagedAttribute
public int getIncomingMaxPoolSize() {
return thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)thread_pool).getMaximumPoolSize() : 0;
}
@ManagedAttribute
public void setIncomingMaxPoolSize(int size) {
if(thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)thread_pool).setMaximumPoolSize(size);
}
@ManagedAttribute
public int getIncomingPoolSize() {
return thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)thread_pool).getPoolSize() : 0;
}
@ManagedAttribute
public long getIncomingKeepAliveTime() {
return thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)thread_pool).getKeepAliveTime(TimeUnit.MILLISECONDS) : 0;
}
@ManagedAttribute
public void setIncomingKeepAliveTime(long time) {
if(thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)thread_pool).setKeepAliveTime(time, TimeUnit.MILLISECONDS);
}
public long getIncomingMessages() {
return num_incoming_msgs_received;
}
@ManagedAttribute
public int getIncomingQueueSize() {
return thread_pool_queue.size();
}
public int getIncomingMaxQueueSize() {
return thread_pool_queue_max_size;
}
public void setLogDiscardMessages(boolean flag) {
log_discard_msgs=flag;
}
public boolean getLogDiscardMessages() {
return log_discard_msgs;
}
public Map<String,Object> dumpStats() {
Map<String,Object> retval=super.dumpStats();
if(retval == null)
retval=new HashMap<String,Object>();
retval.put("num_msgs_sent", new Long(num_msgs_sent));
retval.put("num_msgs_received", new Long(num_msgs_received));
retval.put("num_bytes_sent", new Long(num_bytes_sent));
retval.put("num_bytes_received", new Long(num_bytes_received));
return retval;
}
/**
* Send to all members in the group. UDP would use an IP multicast message, whereas TCP would send N
* messages, one for each member
* @param data The data to be sent. This is not a copy, so don't modify it
* @param offset
* @param length
* @throws Exception
*/
public abstract void sendToAllMembers(byte[] data, int offset, int length) throws Exception;
/**
* Send to all members in the group. UDP would use an IP multicast message, whereas TCP would send N
* messages, one for each member
* @param dest Must be a non-null unicast address
* @param data The data to be sent. This is not a copy, so don't modify it
* @param offset
* @param length
* @throws Exception
*/
public abstract void sendToSingleMember(Address dest, byte[] data, int offset, int length) throws Exception;
public abstract String getInfo();
public abstract void postUnmarshalling(Message msg, Address dest, Address src, boolean multicast);
public abstract void postUnmarshallingList(Message msg, Address dest, boolean multicast);
private StringBuilder _getInfo(Channel ch) {
StringBuilder sb=new StringBuilder();
sb.append(ch.getLocalAddress()).append(" (").append(ch.getClusterName()).append(") ").append("\n");
sb.append("local_addr=").append(ch.getLocalAddress()).append("\n");
sb.append("group_name=").append(ch.getClusterName()).append("\n");
sb.append("version=").append(Version.description).append(", cvs=\"").append(Version.cvs).append("\"\n");
sb.append("view: ").append(ch.getView()).append('\n');
sb.append(getInfo());
return sb;
}
private void handleDiagnosticProbe(SocketAddress sender, DatagramSocket sock, String request) {
if(isSingleton()) {
for(Protocol prot: up_prots.values()) {
ProtocolStack st=prot.getProtocolStack();
handleDiagnosticProbe(sender, sock, request, st);
}
}
else {
handleDiagnosticProbe(sender, sock, request, stack);
}
}
private void handleDiagnosticProbe(SocketAddress sender, DatagramSocket sock, String request, ProtocolStack stack) {
try {
StringTokenizer tok=new StringTokenizer(request);
String req=tok.nextToken();
StringBuilder info=new StringBuilder("n/a");
if(req.trim().toLowerCase().startsWith("query")) {
ArrayList<String> l=new ArrayList<String>(tok.countTokens());
while(tok.hasMoreTokens())
l.add(tok.nextToken().trim().toLowerCase());
info=_getInfo(stack.getChannel());
if(l.contains("jmx")) {
Channel ch=stack.getChannel();
if(ch != null) {
Map<String,Object> m=ch.dumpStats();
StringBuilder sb=new StringBuilder();
sb.append("stats:\n");
for(Iterator<Entry<String,Object>> it=m.entrySet().iterator(); it.hasNext();) {
sb.append(it.next()).append("\n");
}
info.append(sb);
}
}
if(l.contains("props")) {
String p=stack.printProtocolSpec(true);
info.append("\nprops:\n").append(p);
}
if(l.contains("info")) {
Map<String, Object> tmp=stack.getChannel().getInfo();
info.append("INFO:\n");
for(Map.Entry<String,Object> entry: tmp.entrySet()) {
info.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
}
}
if(l.contains("dump")) {
info.append("\nstack trace:\n");
info.append(Util.dumpThreads());
}
}
byte[] diag_rsp=info.toString().getBytes();
if(log.isDebugEnabled())
log.debug("sending diag response to " + sender);
sendResponse(sock, sender, diag_rsp);
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed sending diag rsp to " + sender, t);
}
}
private static void sendResponse(DatagramSocket sock, SocketAddress sender, byte[] buf) throws IOException {
DatagramPacket p=new DatagramPacket(buf, 0, buf.length, sender);
sock.send(p);
}
/* ------------------------------------------------------------------------------- */
/*------------------------------ Protocol interface ------------------------------ */
public void init() throws Exception {
super.init();
// Create the default thread factory
global_thread_factory=new DefaultThreadFactory(Util.getGlobalThreadGroup(), "", false);
// Create the timer and the associated thread factory - depends on singleton_name
// timer_thread_factory=new DefaultThreadFactory(Util.getGlobalThreadGroup(), "Timer", true, true);
timer_thread_factory=new LazyThreadFactory(Util.getGlobalThreadGroup(), "Timer", true, true);
if(isSingleton()) {
timer_thread_factory.setIncludeClusterName(false);
}
default_thread_factory=new DefaultThreadFactory(pool_thread_group, "Incoming", false, true);
oob_thread_factory=new DefaultThreadFactory(pool_thread_group, "OOB", false, true);
setInAllThreadFactories(channel_name, local_addr, thread_naming_pattern);
timer=new TimeScheduler(timer_thread_factory, num_timer_threads);
verifyRejectionPolicy(oob_thread_pool_rejection_policy);
verifyRejectionPolicy(thread_pool_rejection_policy);
out_stream=new ExposedByteArrayOutputStream(INITIAL_BUFSIZE);
dos=new ExposedDataOutputStream(out_stream);
// ========================================== OOB thread pool ==============================
if(oob_thread_pool_enabled) {
if(oob_thread_pool_queue_enabled)
oob_thread_pool_queue=new LinkedBlockingQueue<Runnable>(oob_thread_pool_queue_max_size);
else
oob_thread_pool_queue=new SynchronousQueue<Runnable>();
oob_thread_pool=createThreadPool(oob_thread_pool_min_threads, oob_thread_pool_max_threads, oob_thread_pool_keep_alive_time,
oob_thread_pool_rejection_policy, oob_thread_pool_queue, oob_thread_factory);
}
else { // otherwise use the caller's thread to unmarshal the byte buffer into a message
oob_thread_pool=new DirectExecutor();
}
// ====================================== Regular thread pool ===========================
if(thread_pool_enabled) {
if(thread_pool_queue_enabled)
thread_pool_queue=new LinkedBlockingQueue<Runnable>(thread_pool_queue_max_size);
else
thread_pool_queue=new SynchronousQueue<Runnable>();
thread_pool=createThreadPool(thread_pool_min_threads, thread_pool_max_threads, thread_pool_keep_alive_time,
thread_pool_rejection_policy, thread_pool_queue, default_thread_factory);
}
else { // otherwise use the caller's thread to unmarshal the byte buffer into a message
thread_pool=new DirectExecutor();
}
if(persistent_ports){
pm = new PortsManager(pm_expiry_time,persistent_ports_file);
}
if(bind_addr != null) {
Map<String, Object> m=new HashMap<String, Object>(1);
m.put("bind_addr", bind_addr);
up(new Event(Event.CONFIG, m));
}
}
public void destroy() {
super.destroy();
if(timer != null) {
try {
timer.stop();
}
catch(InterruptedException e) {
log.error("failed stopping the timer", e);
}
}
// 3. Stop the thread pools
if(oob_thread_pool instanceof ThreadPoolExecutor) {
shutdownThreadPool(oob_thread_pool);
oob_thread_pool=null;
}
if(thread_pool instanceof ThreadPoolExecutor) {
shutdownThreadPool(thread_pool);
thread_pool=null;
}
}
/**
* Creates the unicast and multicast sockets and starts the unicast and multicast receiver threads
*/
public void start() throws Exception {
if(timer == null)
throw new Exception("timer is null");
if(enable_diagnostics) {
diag_handler=new DiagnosticsHandler();
diag_handler.start();
}
if(use_incoming_packet_handler && !use_concurrent_stack) {
incoming_packet_queue=new Queue();
incoming_packet_handler=new IncomingPacketHandler();
incoming_packet_handler.start();
}
if(loopback && !use_concurrent_stack) {
incoming_msg_queue=new Queue();
incoming_msg_handler=new IncomingMessageHandler();
incoming_msg_handler.start();
}
if(enable_bundling) {
bundler=new Bundler();
}
setInAllThreadFactories(channel_name, local_addr, thread_naming_pattern);
sendUpLocalAddressEvent();
}
public void stop() {
if(diag_handler != null) {
diag_handler.stop();
diag_handler=null;
}
// 1. Stop the incoming packet handler thread
if(incoming_packet_handler != null)
incoming_packet_handler.stop();
// 2. Stop the incoming message handler
if(incoming_msg_handler != null)
incoming_msg_handler.stop();
}
protected void handleConnect() throws Exception {
connect_count++;
}
protected void handleDisconnect() {
connect_count=Math.max(0, connect_count -1);
}
public String getSingletonName() {
return singleton_name;
}
public boolean isSingleton(){
return singleton_name != null && singleton_name.length() >0;
}
/**
* handle the UP event.
* @param evt - the event being send from the stack
*/
public Object up(Event evt) {
switch(evt.getType()) {
case Event.CONFIG:
if(isSingleton())
passToAllUpProtocols(evt);
else
up_prot.up(evt);
if(log.isDebugEnabled()) log.debug("received CONFIG event: " + evt.getArg());
handleConfigEvent((Map<String,Object>)evt.getArg());
return null;
}
if(isSingleton()) {
passToAllUpProtocols(evt);
return null;
}
else
return up_prot.up(evt);
}
/**
* Caller by the layer above this layer. Usually we just put this Message
* into the send queue and let one or more worker threads handle it. A worker thread
* then removes the Message from the send queue, performs a conversion and adds the
* modified Message to the send queue of the layer below it, by calling down()).
*/
public Object down(Event evt) {
if(evt.getType() != Event.MSG) { // unless it is a message handle it and respond
return handleDownEvent(evt);
}
Message msg=(Message)evt.getArg();
if(header != null) {
// added patch by Roland Kurmann (March 20 2003)
// msg.putHeader(name, new TpHeader(channel_name));
msg.putHeaderIfAbsent(name, header);
}
setSourceAddress(msg); // very important !! listToBuffer() will fail with a null src address !!
if(log.isTraceEnabled()) {
log.trace("sending msg to " + msg.getDest() + ", src=" + msg.getSrc() + ", headers are " + msg.printHeaders());
}
// Don't send if destination is local address. Instead, switch dst and src and put in up_queue.
// If multicast message, loopback a copy directly to us (but still multicast). Once we receive this,
// we will discard our own multicast message
Address dest=msg.getDest();
boolean multicast=dest == null || dest.isMulticastAddress();
if(loopback && (multicast || dest.equals(local_addr))) {
// we *have* to make a copy, or else up_prot.up() might remove headers from msg which will then *not*
// be available for marshalling further down (when sending the message)
final Message copy=msg.copy();
if(log.isTraceEnabled()) log.trace(new StringBuilder("looping back message ").append(copy));
// up_prot.up(new Event(Event.MSG, copy));
// changed to fix http://jira.jboss.com/jira/browse/JGRP-506
Executor pool=msg.isFlagSet(Message.OOB)? oob_thread_pool : thread_pool;
pool.execute(new Runnable() {
public void run() {
passMessageUp(copy, false);
}
});
if(!multicast)
return null;
}
try {
send(msg, dest, multicast);
}
catch(InterruptedException interruptedEx) {
Thread.currentThread().interrupt(); // let someone else handle the interrupt
}
catch(Throwable e) {
if(log.isErrorEnabled()) {
String dst=msg.getDest() == null? "null" : msg.getDest().toString();
log.error("failed sending message to " + dst + " (" + msg.size() + " bytes)", e);
}
}
return null;
}
/*--------------------------- End of Protocol interface -------------------------- */
/* ------------------------------ Private Methods -------------------------------- */
/**
* If the sender is null, set our own address. We cannot just go ahead and set the address
* anyway, as we might be sending a message on behalf of someone else ! E.gin case of
* retransmission, when the original sender has crashed, or in a FLUSH protocol when we
* have to return all unstable messages with the FLUSH_OK response.
*/
private void setSourceAddress(Message msg) {
if(msg.getSrc() == null)
msg.setSrc(local_addr);
}
private void passMessageUp(Message msg, boolean perform_cluster_name_matching) {
TpHeader hdr=(TpHeader)msg.getHeader(name); // replaced removeHeader() with getHeader()
if(hdr == null) {
if(channel_name == null) {
Event evt=new Event(Event.MSG, msg);
if(isSingleton()) {
passMessageToAll(evt);
}
else {
up_prot.up(evt);
}
}
else {
if(log.isErrorEnabled())
log.error(new StringBuilder("message does not have a transport header, msg is ").append(msg).
append(", headers are ").append(msg.printHeaders()).append(", will be discarded"));
}
return;
}
String ch_name=hdr.channel_name;
if(isSingleton()) {
Protocol tmp_prot=up_prots.get(ch_name);
if(tmp_prot != null) {
Event evt=new Event(Event.MSG, msg);
if(log.isTraceEnabled()) {
StringBuilder sb=new StringBuilder("message is ").append(msg).append(", headers are ").append(msg.printHeaders());
log.trace(sb);
}
tmp_prot.up(evt);
}
else {
// we discard messages for a group we don't have. If we had a scenario with channel C1 and A,B on it,
// and channel C2 and only A on it (asymmetric setup), then C2 would always log warnings that B was
// not found (Jan 25 2008 (bela))
// if(log.isWarnEnabled())
// log.warn(new StringBuilder("discarded message from group \"").append(ch_name).
// append("\" (our groups are ").append(up_prots.keySet()).append("). Sender was ").append(msg.getSrc()));
}
}
else {
// Discard if message's group name is not the same as our group name
if(perform_cluster_name_matching && channel_name != null && !channel_name.equals(ch_name)) {
if(log.isWarnEnabled() && log_discard_msgs)
log.warn(new StringBuilder("discarded message from different group \"").append(ch_name).
append("\" (our group is \"").append(channel_name).append("\"). Sender was ").append(msg.getSrc()));
}
else {
Event evt=new Event(Event.MSG, msg);
if(log.isTraceEnabled()) {
StringBuilder sb=new StringBuilder("message is ").append(msg).append(", headers are ").append(msg.printHeaders());
log.trace(sb);
}
up_prot.up(evt);
}
}
}
private void passMessageToAll(Event evt) {
for(Protocol tmp_prot: up_prots.values()) {
try {
tmp_prot.up(evt);
}
catch(Exception ex) {
if(log.isErrorEnabled())
log.error("failure passing message up: message is " + evt.getArg(), ex);
}
}
}
/**
* Subclasses must call this method when a unicast or multicast message has been received.
* Declared final so subclasses cannot override this method.
*
* @param dest
* @param sender
* @param data
* @param offset
* @param length
*/
protected final void receive(Address dest, Address sender, byte[] data, int offset, int length) {
if(data == null) return;
if(log.isTraceEnabled()){
boolean mcast=dest == null || dest.isMulticastAddress();
StringBuilder sb=new StringBuilder("received (");
sb.append(mcast? "mcast) " : "ucast) ").append(length).append(" bytes from ").append(sender);
log.trace(sb);
}
try {
// determine whether OOB or not by looking at first byte of 'data'
boolean oob=false;
byte oob_flag=data[Global.SHORT_SIZE]; // we need to skip the first 2 bytes (version)
if((oob_flag & OOB) == OOB)
oob=true;
if(use_concurrent_stack) {
if(oob) {
num_oob_msgs_received++;
dispatchToThreadPool(oob_thread_pool, dest, sender, data, offset, length);
}
else {
num_incoming_msgs_received++;
dispatchToThreadPool(thread_pool, dest, sender, data, offset, length);
}
}
else {
if(use_incoming_packet_handler) {
byte[] tmp=new byte[length];
System.arraycopy(data, offset, tmp, 0, length);
incoming_packet_queue.add(new IncomingPacket(dest, sender, tmp, 0, length));
}
else
handleIncomingPacket(dest, sender, data, offset, length);
}
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error(new StringBuilder("failed handling data from ").append(sender), t);
}
}
private void dispatchToThreadPool(Executor pool, Address dest, Address sender, byte[] data, int offset, int length) {
if(pool instanceof DirectExecutor) {
// we don't make a copy of the buffer if we execute on this thread
pool.execute(new IncomingPacket(dest, sender, data, offset, length));
}
else {
byte[] tmp=new byte[length];
System.arraycopy(data, offset, tmp, 0, length);
pool.execute(new IncomingPacket(dest, sender, tmp, 0, length));
}
}
/**
* Processes a packet read from either the multicast or unicast socket. Needs to be synchronized because
* mcast or unicast socket reads can be concurrent.
* Correction (bela April 19 2005): we access no instance variables, all vars are allocated on the stack, so
* this method should be reentrant: removed 'synchronized' keyword
*/
private void handleIncomingPacket(Address dest, Address sender, byte[] data, int offset, int length) {
Message msg=null;
short version=0;
boolean is_message_list, multicast;
byte flags;
List<Message> msgs;
try {
synchronized(in_stream) {
in_stream.setData(data, offset, length);
try {
version=dis.readShort();
}
catch(IOException ex) {
if(discard_incompatible_packets)
return;
throw ex;
}
if(Version.isBinaryCompatible(version) == false) {
if(log.isWarnEnabled()) {
StringBuilder sb=new StringBuilder();
sb.append("packet from ").append(sender).append(" has different version (").append(Version.print(version));
sb.append(") from ours (").append(Version.printVersion()).append("). ");
if(discard_incompatible_packets)
sb.append("Packet is discarded");
else
sb.append("This may cause problems");
log.warn(sb);
}
if(discard_incompatible_packets)
return;
}
flags=dis.readByte();
is_message_list=(flags & LIST) == LIST;
multicast=(flags & MULTICAST) == MULTICAST;
if(is_message_list)
msgs=readMessageList(dis, dest, multicast);
else {
msg=readMessage(dis, dest, sender, multicast);
msgs=new LinkedList<Message>();
msgs.add(msg);
}
}
Address src;
for(Iterator<Message> it=msgs.iterator(); it.hasNext();) {
msg=it.next();
src=msg.getSrc();
if(loopback) {
if(multicast && src != null && local_addr.equals(src)) { // discard own loopback multicast packets
it.remove();
}
}
else
handleIncomingMessage(msg);
}
if(incoming_msg_queue != null && !msgs.isEmpty())
incoming_msg_queue.addAll(msgs);
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed unmarshalling message", t);
}
}
private void handleIncomingMessage(Message msg) {
if(stats) {
num_msgs_received++;
num_bytes_received+=msg.getLength();
}
passMessageUp(msg, true);
}
/** Internal method to serialize and send a message. This method is not reentrant */
private void send(Message msg, Address dest, boolean multicast) throws Exception {
// bundle only regular messages; send OOB messages directly
if(enable_bundling && !msg.isFlagSet(Message.OOB)) {
if(!enable_unicast_bundling && !multicast) {
; // don't bundle unicast msgs if enable_unicast_bundling is off (http://jira.jboss.com/jira/browse/JGRP-429)
}
else {
bundler.send(msg, dest);
return;
}
}
out_stream_lock.lock();
try {
out_stream.reset();
dos.reset();
writeMessage(msg, dos, multicast);
Buffer buf=new Buffer(out_stream.getRawBuffer(), 0, out_stream.size());
doSend(buf, dest, multicast);
}
finally {
out_stream_lock.unlock();
}
}
private void doSend(Buffer buf, Address dest, boolean multicast) throws Exception {
if(stats) {
num_msgs_sent++;
num_bytes_sent+=buf.getLength();
}
if(multicast) {
sendToAllMembers(buf.getBuf(), buf.getOffset(), buf.getLength());
}
else {
sendToSingleMember(dest, buf.getBuf(), buf.getOffset(), buf.getLength());
}
}
/**
* This method needs to be synchronized on out_stream when it is called
* @param msg
* @return
* @throws java.io.IOException
*/
private static void writeMessage(Message msg, DataOutputStream dos, boolean multicast) throws Exception {
byte flags=0;
dos.writeShort(Version.version); // write the version
if(multicast)
flags+=MULTICAST;
if(msg.isFlagSet(Message.OOB))
flags+=OOB;
dos.writeByte(flags);
msg.writeTo(dos);
}
private Message readMessage(DataInputStream instream, Address dest, Address sender, boolean multicast) throws Exception {
Message msg=new Message(false); // don't create headers, readFrom() will do this
msg.readFrom(instream);
postUnmarshalling(msg, dest, sender, multicast); // allows for optimization by subclass
return msg;
}
private static void writeMessageList(List<Message> msgs, DataOutputStream dos, boolean multicast) throws Exception {
Address src;
byte flags=0;
int len=msgs != null? msgs.size() : 0;
boolean src_written=false;
dos.writeShort(Version.version);
flags+=LIST;
if(multicast)
flags+=MULTICAST;
dos.writeByte(flags);
dos.writeInt(len);
if(msgs != null) {
for(Message msg: msgs) {
src=msg.getSrc();
if(!src_written) {
Util.writeAddress(src, dos);
src_written=true;
}
msg.writeTo(dos);
}
}
}
private List<Message> readMessageList(DataInputStream instream, Address dest, boolean multicast) throws Exception {
int len;
Message msg;
Address src;
len=instream.readInt();
List<Message> list=new ArrayList<Message>(len);
src=Util.readAddress(instream);
for(int i=0; i < len; i++) {
msg=new Message(false); // don't create headers, readFrom() will do this
msg.readFrom(instream);
postUnmarshallingList(msg, dest, multicast);
msg.setSrc(src);
list.add(msg);
}
return list;
}
protected Object handleDownEvent(Event evt) {
switch(evt.getType()) {
case Event.TMP_VIEW:
case Event.VIEW_CHANGE:
synchronized(members) {
view=(View)evt.getArg();
members.clear();
if(!isSingleton()) {
Vector<Address> tmpvec=view.getMembers();
members.addAll(tmpvec);
}
else {
for(Protocol prot: up_prots.values()) {
if(prot instanceof ProtocolAdapter) {
ProtocolAdapter ad=(ProtocolAdapter)prot;
List<Address> tmp=ad.getMembers();
members.addAll(tmp);
}
}
}
}
break;
case Event.CONNECT:
case Event.CONNECT_WITH_STATE_TRANSFER:
channel_name=(String)evt.getArg();
header=new TpHeader(channel_name);
setInAllThreadFactories(channel_name, local_addr, thread_naming_pattern);
setThreadNames();
try {
handleConnect();
}
catch(Exception e) {
throw new RuntimeException(e);
}
return null;
case Event.DISCONNECT:
unsetThreadNames();
handleDisconnect();
break;
case Event.CONFIG:
if(log.isDebugEnabled()) log.debug("received CONFIG event: " + evt.getArg());
handleConfigEvent((Map<String,Object>)evt.getArg());
break;
}
return null;
}
protected void setThreadNames() {
if(incoming_packet_handler != null){
global_thread_factory.renameThread(IncomingPacketHandler.THREAD_NAME, incoming_packet_handler.getThread());
}
if(incoming_msg_handler != null) {
global_thread_factory.renameThread(IncomingMessageHandler.THREAD_NAME, incoming_msg_handler.getThread());
}
if(diag_handler != null) {
global_thread_factory.renameThread(DiagnosticsHandler.THREAD_NAME, diag_handler.getThread());
}
}
protected void unsetThreadNames() {
if(incoming_packet_handler != null && incoming_packet_handler.getThread() != null)
incoming_packet_handler.getThread().setName(IncomingPacketHandler.THREAD_NAME);
if(incoming_msg_handler != null && incoming_msg_handler.getThread() != null)
incoming_msg_handler.getThread().setName(IncomingMessageHandler.THREAD_NAME);
if(diag_handler != null && diag_handler.getThread() != null)
diag_handler.getThread().setName(DiagnosticsHandler.THREAD_NAME);
}
private void setInAllThreadFactories(String cluster_name, Address local_address, String pattern) {
ThreadFactory[] factories= { timer_thread_factory,
default_thread_factory,
oob_thread_factory,
global_thread_factory };
boolean is_shared_transport=isSingleton();
for(ThreadFactory factory:factories) {
if(pattern != null) {
factory.setPattern(pattern);
if(is_shared_transport)
factory.setIncludeClusterName(false);
}
if(cluster_name != null && !is_shared_transport) // only set cluster name if we don't have a shared transport
factory.setClusterName(cluster_name);
if(local_address != null)
factory.setAddress(local_address.toString());
}
}
protected void handleConfigEvent(Map<String,Object> map) {
if(map == null) return;
if(map.containsKey("additional_data")) {
additional_data=(byte[])map.get("additional_data");
if(local_addr instanceof IpAddress)
((IpAddress)local_addr).setAdditionalData(additional_data);
}
}
protected static ExecutorService createThreadPool(int min_threads, int max_threads, long keep_alive_time, String rejection_policy,
BlockingQueue<Runnable> queue, final ThreadFactory factory) {
ThreadPoolExecutor pool=new ThreadManagerThreadPoolExecutor(min_threads, max_threads, keep_alive_time, TimeUnit.MILLISECONDS, queue);
pool.setThreadFactory(factory);
//default
RejectedExecutionHandler handler = new ThreadPoolExecutor.CallerRunsPolicy();
if(rejection_policy != null) {
if(rejection_policy.equals("abort"))
handler = new ThreadPoolExecutor.AbortPolicy();
else if(rejection_policy.equals("discard"))
handler = new ThreadPoolExecutor.DiscardPolicy();
else if(rejection_policy.equals("discardoldest"))
handler = new ThreadPoolExecutor.DiscardOldestPolicy();
}
pool.setRejectedExecutionHandler(new ShutdownRejectedExecutionHandler(handler));
return pool;
}
private static void shutdownThreadPool(Executor thread_pool) {
if(thread_pool instanceof ExecutorService) {
ExecutorService service=(ExecutorService)thread_pool;
service.shutdownNow();
try {
service.awaitTermination(Global.THREADPOOL_SHUTDOWN_WAIT_TIME, TimeUnit.MILLISECONDS);
}
catch(InterruptedException e) {
}
}
}
private void verifyRejectionPolicy(String str) throws Exception{
if(!(str.equalsIgnoreCase("run") || str.equalsIgnoreCase("abort")|| str.equalsIgnoreCase("discard")|| str.equalsIgnoreCase("discardoldest"))) {
log.error("rejection policy of " + str + " is unknown");
throw new Exception("Unknown rejection policy " + str);
}
}
protected void passToAllUpProtocols(Event evt) {
for(Protocol prot: up_prots.values()) {
try {
prot.up(evt);
}
catch(Exception e) {
if(log.isErrorEnabled())
log.error("failed passing up event " + evt, e);
}
}
}
public void sendUpLocalAddressEvent() {
if(up_prot != null)
up(new Event(Event.SET_LOCAL_ADDRESS, local_addr));
else {
for(Map.Entry<String,Protocol> entry: up_prots.entrySet()) {
String tmp=entry.getKey();
if(tmp.startsWith(Global.DUMMY))
continue;
Protocol prot=entry.getValue();
prot.up(new Event(Event.SET_LOCAL_ADDRESS, local_addr));
}
}
}
/* ----------------------------- End of Private Methods ---------------------------------------- */
/* ----------------------------- Inner Classes ---------------------------------------- */
class IncomingPacket implements Runnable {
Address dest=null;
Address sender=null;
byte[] buf;
int offset, length;
IncomingPacket(Address dest, Address sender, byte[] buf, int offset, int length) {
this.dest=dest;
this.sender=sender;
this.buf=buf;
this.offset=offset;
this.length=length;
}
/** Code copied from handleIncomingPacket */
public void run() {
short version=0;
boolean is_message_list, multicast;
byte flags;
ExposedByteArrayInputStream in_stream=null;
DataInputStream dis=null;
try {
in_stream=new ExposedByteArrayInputStream(buf, offset, length);
dis=new DataInputStream(in_stream);
try {
version=dis.readShort();
}
catch(IOException ex) {
if(discard_incompatible_packets)
return;
throw ex;
}
if(Version.isBinaryCompatible(version) == false) {
if(log.isWarnEnabled()) {
StringBuilder sb=new StringBuilder();
sb.append("packet from ").append(sender).append(" has different version (").append(Version.print(version));
sb.append(") from ours (").append(Version.printVersion()).append("). ");
if(discard_incompatible_packets)
sb.append("Packet is discarded");
else
sb.append("This may cause problems");
log.warn(sb);
}
if(discard_incompatible_packets)
return;
}
flags=dis.readByte();
is_message_list=(flags & LIST) == LIST;
multicast=(flags & MULTICAST) == MULTICAST;
if(is_message_list) { // used if message bundling is enabled
List<Message> msgs=readMessageList(dis, dest, multicast);
for(Message msg: msgs) {
if(msg.isFlagSet(Message.OOB)) {
log.warn("bundled message should not be marked as OOB");
}
handleMyMessage(msg, multicast);
}
}
else {
Message msg=readMessage(dis, dest, sender, multicast);
handleMyMessage(msg, multicast);
}
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed handling incoming message", t);
}
}
private void handleMyMessage(Message msg, boolean multicast) {
if(stats) {
num_msgs_received++;
num_bytes_received+=msg.getLength();
}
Address src=msg.getSrc();
if(loopback && multicast && src != null && src.equals(local_addr)) {
return; // drop message that was already looped back and delivered
}
passMessageUp(msg, true);
}
}
/**
* This thread fetches byte buffers from the packet_queue, converts them into messages and passes them up
* to the higher layer (done in handleIncomingUdpPacket()).
*/
class IncomingPacketHandler implements Runnable {
public static final String THREAD_NAME="IncomingPacketHandler";
Thread t=null;
Thread getThread(){
return t;
}
void start() {
if(t == null || !t.isAlive()) {
t=global_thread_factory.newThread(this, THREAD_NAME);
t.setDaemon(true);
t.start();
}
}
void stop() {
incoming_packet_queue.close(true); // should terminate the packet_handler thread too
if(t != null) {
try {
t.join(Global.THREAD_SHUTDOWN_WAIT_TIME);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt(); // set interrupt flag again
}
}
}
public void run() {
IncomingPacket entry;
while(!incoming_packet_queue.closed() && Thread.currentThread().equals(t)) {
try {
entry=(IncomingPacket)incoming_packet_queue.remove();
handleIncomingPacket(entry.dest, entry.sender, entry.buf, entry.offset, entry.length);
}
catch(QueueClosedException closed_ex) {
break;
}
catch(Throwable ex) {
if(log.isErrorEnabled())
log.error("error processing incoming packet", ex);
}
}
if(log.isTraceEnabled()) log.trace("incoming packet handler terminating");
}
}
class IncomingMessageHandler implements Runnable {
public static final String THREAD_NAME = "IncomingMessageHandler";
Thread t;
Thread getThread(){
return t;
}
public void start() {
if(t == null || !t.isAlive()) {
t=global_thread_factory.newThread(this, THREAD_NAME);
t.setDaemon(true);
t.start();
}
}
public void stop() {
incoming_msg_queue.close(true);
if(t != null) {
try {
t.join(Global.THREAD_SHUTDOWN_WAIT_TIME);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt(); // set interrupt flag again
}
}
}
public void run() {
Message msg;
while(!incoming_msg_queue.closed() && Thread.currentThread().equals(t)) {
try {
msg=(Message)incoming_msg_queue.remove();
handleIncomingMessage(msg);
}
catch(QueueClosedException closed_ex) {
break;
}
catch(Throwable ex) {
if(log.isErrorEnabled())
log.error("error processing incoming message", ex);
}
}
if(log.isTraceEnabled()) log.trace("incoming message handler terminating");
}
}
private class Bundler {
static final int MIN_NUMBER_OF_BUNDLING_TASKS=2;
/** HashMap<Address, List<Message>>. Keys are destinations, values are lists of Messages */
final Map<Address,List<Message>> msgs=new HashMap<Address,List<Message>>(36);
@GuardedBy("lock")
long count=0; // current number of bytes accumulated
int num_msgs=0;
@GuardedBy("lock")
int num_bundling_tasks=0;
long last_bundle_time;
final ReentrantLock lock=new ReentrantLock();
final ExposedByteArrayOutputStream bundler_out_stream=new ExposedByteArrayOutputStream(INITIAL_BUFSIZE);
final ExposedDataOutputStream bundler_dos=new ExposedDataOutputStream(bundler_out_stream);
private void send(Message msg, Address dest) throws Exception {
long length=msg.size();
checkLength(length);
lock.lock();
try {
if(count + length >= max_bundle_size) {
if(!msgs.isEmpty()) {
sendBundledMessages(msgs);
}
}
addMessage(msg, dest);
count+=length;
if(num_bundling_tasks < MIN_NUMBER_OF_BUNDLING_TASKS) {
num_bundling_tasks++;
timer.schedule(new BundlingTimer(), max_bundle_timeout, TimeUnit.MILLISECONDS);
}
}
finally {
lock.unlock();
}
}
/** Run with lock acquired */
private void addMessage(Message msg, Address dest) { // no sync needed, always called with lock held
if(msgs.isEmpty())
last_bundle_time=System.currentTimeMillis();
List<Message> tmp=msgs.get(dest);
if(tmp == null) {
tmp=new LinkedList<Message>();
msgs.put(dest, tmp);
}
tmp.add(msg);
num_msgs++;
}
/**
* Sends all messages from the map, all messages for the same destination are bundled into 1 message.
* This method may be called by timer and bundler concurrently
* @param msgs
*/
private void sendBundledMessages(final Map<Address,List<Message>> msgs) {
boolean multicast;
Buffer buffer;
Address dst;
if(log.isTraceEnabled()) {
long stop=System.currentTimeMillis();
double percentage=100.0 / max_bundle_size * count;
StringBuilder sb=new StringBuilder("sending ").append(num_msgs).append(" msgs (");
num_msgs=0;
sb.append(count).append(" bytes (" + f.format(percentage) + "% of max_bundle_size)");
if(last_bundle_time > 0) {
sb.append(", collected in ").append(stop-last_bundle_time).append("ms) ");
}
sb.append(" to ").append(msgs.size()).append(" destination(s)");
if(msgs.size() > 1) sb.append(" (dests=").append(msgs.keySet()).append(")");
log.trace(sb);
}
for(Map.Entry<Address,List<Message>> entry: msgs.entrySet()) {
List<Message> list=entry.getValue();
if(list.isEmpty())
continue;
dst=entry.getKey();
multicast=dst == null || dst.isMulticastAddress();
try {
bundler_out_stream.reset();
bundler_dos.reset();
writeMessageList(list, bundler_dos, multicast); // flushes output stream when done
buffer=new Buffer(bundler_out_stream.getRawBuffer(), 0, bundler_out_stream.size());
doSend(buffer, dst, multicast);
}
catch(Throwable e) {
if(log.isErrorEnabled()) log.error("exception sending msg: " + e.toString(), e.getCause());
}
}
msgs.clear();
count=0;
}
private void checkLength(long len) throws Exception {
if(len > max_bundle_size)
throw new Exception("message size (" + len + ") is greater than max bundling size (" + max_bundle_size +
"). Set the fragmentation/bundle size in FRAG and TP correctly");
}
private class BundlingTimer implements Runnable {
public void run() {
lock.lock();
try {
if(!msgs.isEmpty()) {
sendBundledMessages(msgs);
}
}
finally {
num_bundling_tasks--;
lock.unlock();
}
}
}
}
private class DiagnosticsHandler implements Runnable {
public static final String THREAD_NAME = "DiagnosticsHandler";
Thread thread=null;
MulticastSocket diag_sock=null;
DiagnosticsHandler() {
}
Thread getThread(){
return thread;
}
void start() throws IOException {
diag_sock=new MulticastSocket(diagnostics_port);
// diag_sock=Util.createMulticastSocket(null, diagnostics_port, log);
List<NetworkInterface> interfaces=Util.getAllAvailableInterfaces();
bindToInterfaces(interfaces, diag_sock);
if(thread == null || !thread.isAlive()) {
thread=global_thread_factory.newThread(this, THREAD_NAME);
thread.setDaemon(true);
thread.start();
}
}
void stop() {
if(diag_sock != null)
diag_sock.close();
if(thread != null){
try{
thread.join(Global.THREAD_SHUTDOWN_WAIT_TIME);
}
catch(InterruptedException e){
Thread.currentThread().interrupt(); // set interrupt flag
}
}
}
public void run() {
byte[] buf=new byte[1500]; // MTU on most LANs
DatagramPacket packet;
while(!diag_sock.isClosed() && Thread.currentThread().equals(thread)) {
packet=new DatagramPacket(buf, 0, buf.length);
try {
diag_sock.receive(packet);
handleDiagnosticProbe(packet.getSocketAddress(), diag_sock,
new String(packet.getData(), packet.getOffset(), packet.getLength()));
}
catch(IOException e) {
}
}
}
private void bindToInterfaces(List<NetworkInterface> interfaces, MulticastSocket s) {
SocketAddress group_addr=new InetSocketAddress(diagnostics_addr, diagnostics_port);
for(Iterator<NetworkInterface> it=interfaces.iterator(); it.hasNext();) {
NetworkInterface i=it.next();
try {
if (i.getInetAddresses().hasMoreElements()) { // fix for VM crash - suggested by [email protected]
s.joinGroup(group_addr, i);
if(log.isTraceEnabled())
log.trace("joined " + group_addr + " on " + i.getName());
}
}
catch(IOException e) {
log.warn("failed to join " + group_addr + " on " + i.getName() + ": " + e);
}
}
}
}
public static class ProtocolAdapter extends Protocol {
final String cluster_name;
final String transport_name;
final TpHeader header;
final List<Address> members=new ArrayList<Address>();
final ThreadFactory factory;
public ProtocolAdapter(String cluster_name, String transport_name, Protocol up, Protocol down, String pattern, Address addr) {
this.cluster_name=cluster_name;
this.transport_name=transport_name;
this.up_prot=up;
this.down_prot=down;
this.header=new TpHeader(cluster_name);
this.factory=new DefaultThreadFactory(Util.getGlobalThreadGroup(), "", false);
factory.setPattern(pattern);
if(addr != null)
factory.setAddress(addr.toString());
}
@ManagedAttribute(description="Name of the cluster to which this adapter proxies")
public String getCluster_name() {
return cluster_name;
}
@ManagedAttribute(description="Name of the transport")
public String getTransport_name() {
return transport_name;
}
public List<Address> getMembers() {
return Collections.unmodifiableList(members);
}
public ThreadFactory getThreadFactory() {
return factory;
}
public Object down(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
msg.putHeader(transport_name, header);
break;
case Event.VIEW_CHANGE:
View view=(View)evt.getArg();
Vector<Address> tmp=view.getMembers();
members.clear();
members.addAll(tmp);
break;
case Event.CONNECT:
case Event.CONNECT_WITH_STATE_TRANSFER:
factory.setClusterName((String)evt.getArg());
break;
}
return down_prot.down(evt);
}
public Object up(Event evt) {
switch(evt.getType()) {
case Event.SET_LOCAL_ADDRESS:
Address addr=(Address)evt.getArg();
if(addr != null)
factory.setAddress(addr.toString());
break;
}
return up_prot.up(evt);
}
public String getName() {
return "TP.ProtocolAdapter";
}
public String toString() {
return cluster_name + " (" + transport_name + ")";
}
}
}
| src/org/jgroups/protocols/TP.java | package org.jgroups.protocols;
import org.jgroups.*;
import org.jgroups.annotations.*;
import org.jgroups.conf.PropertyConverters;
import org.jgroups.stack.IpAddress;
import org.jgroups.stack.Protocol;
import org.jgroups.stack.ProtocolStack;
import org.jgroups.util.*;
import org.jgroups.util.Queue;
import org.jgroups.util.ThreadFactory;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.*;
import java.text.NumberFormat;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.*;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.Lock;
/**
* Generic transport - specific implementations should extend this abstract class.
* Features which are provided to the subclasses include
* <ul>
* <li>version checking
* <li>marshalling and unmarshalling
* <li>message bundling (handling single messages, and message lists)
* <li>incoming packet handler
* <li>loopback
* </ul>
* A subclass has to override
* <ul>
* <li>{@link #sendToAllMembers(byte[], int, int)}
* <li>{@link #sendToSingleMember(org.jgroups.Address, byte[], int, int)}
* <li>{@link #init()}
* <li>{@link #start()}: subclasses <em>must</em> call super.start() <em>after</em> they initialize themselves
* (e.g., created their sockets).
* <li>{@link #stop()}: subclasses <em>must</em> call super.stop() after they deinitialized themselves
* <li>{@link #destroy()}
* </ul>
* The create() or start() method has to create a local address.<br>
* The {@link #receive(Address, Address, byte[], int, int)} method must
* be called by subclasses when a unicast or multicast message has been received.
* @author staBela Ban
* @version $Id: TP.java,v 1.227 2008/08/22 09:17:13 belaban Exp $
*/
@MBean(description="Transport protocol")
@DeprecatedProperty(names={"bind_to_all_interfaces", "use_outgoing_packet_handler"})
public abstract class TP extends Protocol {
private static final byte LIST=1; // we have a list of messages rather than a single message when set
private static final byte MULTICAST=2; // message is a multicast (versus a unicast) message when set
private static final byte OOB=4; // message has OOB flag set (Message.OOB)
private static NumberFormat f;
private static final int INITIAL_BUFSIZE=4095;
static {
f=NumberFormat.getNumberInstance();
f.setGroupingUsed(false);
f.setMaximumFractionDigits(2);
}
private ExposedByteArrayOutputStream out_stream=null;
private ExposedDataOutputStream dos=null;
private final Lock out_stream_lock=new ReentrantLock();
/* ------------------------------------------ JMX and Properties ------------------------------------------ */
@ManagedAttribute
@Property(converter=PropertyConverters.BindAddress.class,
description="The interface (NIC) which should be used by this transport ")
protected InetAddress bind_addr=null;
@Property(description="Ignores all bind address parameters and let's the OS return the local host address. Default is false")
protected boolean use_local_host=false;
@ManagedAttribute
@Property(description=" If true, the transport should use all available interfaces to receive multicast messages. Default is false")
protected boolean receive_on_all_interfaces=false;
/**
* List<NetworkInterface> of interfaces to receive multicasts on. The
* multicast receive socket will listen on all of these interfaces. This is
* a comma-separated list of IP addresses or interface names. E.g.
* "192.168.5.1,eth1,127.0.0.1". Duplicates are discarded; we only bind to
* an interface once. If this property is set, it override
* receive_on_all_interfaces.
*/
@ManagedAttribute
@Property(converter=PropertyConverters.NetworkInterfaceList.class,
description="Comma delimited list of interfaces (IP addresses or interface names) to receive multicasts on")
protected List<NetworkInterface> receive_interfaces=null;
/**
* If true, the transport should use all available interfaces to send
* multicast messages. This means the same multicast message is sent N
* times, so use with care
*/
@ManagedAttribute
@Property(description=" If true, the transport should use all available interfaces to send multicast messages. Default is false")
protected boolean send_on_all_interfaces=false;
/**
* List<NetworkInterface> of interfaces to send multicasts on. The
* multicast send socket will send the same multicast message on all of
* these interfaces. This is a comma-separated list of IP addresses or
* interface names. E.g. "192.168.5.1,eth1,127.0.0.1". Duplicates are
* discarded. If this property is set, it override send_on_all_interfaces.
*/
@ManagedAttribute
@Property(converter=PropertyConverters.NetworkInterfaceList.class,
description="Comma delimited list of interfaces (IP addresses or interface names) to send multicasts on")
protected List<NetworkInterface> send_interfaces=null;
/**
* The port to which the transport binds. 0 means to bind to any (ephemeral)
* port
*/
@Property(name="start_port", deprecatedMessage="start_port is deprecated; use bind_port instead",
description="The port to which the transport binds. Default of 0 binds to any (ephemeral) port")
protected int bind_port=0;
@Property(name="end_port", deprecatedMessage="end_port is deprecated; use port_range instead")
protected int port_range=1; // 27-6-2003 bgooren, Only try one port by default
@Property(description="TODO")
protected boolean prevent_port_reuse=false;
/**
* If true, messages sent to self are treated specially: unicast messages
* are looped back immediately, multicast messages get a local copy first
* and - when the real copy arrives - it will be discarded. Useful for
* Window media (non)sense
*/
@ManagedAttribute(description="", writable=true)
@Property(description="Messages to self are looped back immediatelly if true. Default is false")
protected boolean loopback=false;
/**
* Discard packets with a different version. Usually minor version
* differences are okay. Setting this property to true means that we expect
* the exact same version on all incoming packets
*/
@ManagedAttribute(description="Discard packets with a different version", writable=true)
@Property(description="Discard packets with a different version if true. Default is false")
protected boolean discard_incompatible_packets=false;
/**
* Sometimes receivers are overloaded (they have to handle de-serialization
* etc). Packet handler is a separate thread taking care of
* de-serialization, receiver thread(s) simply put packet in queue and
* return immediately. Setting this to true adds one more thread
*/
@ManagedAttribute(description="Should additional thread be used for message deserialization", writable=true)
@Property(name="use_packet_handler",
deprecatedMessage="'use_packet_handler' is deprecated; use 'use_incoming_packet_handler' instead",
description="Should additional thread be used for message deserialization. Default is true")
protected boolean use_incoming_packet_handler=true;
@Property(description="Should concurrent stack with thread pools be used to deliver messages up the stack. Default is true")
protected boolean use_concurrent_stack=true;
@Property(description="Thread naming pattern for threads in this channel. Default is cl")
protected String thread_naming_pattern="cl";
@Property(name="oob_thread_pool.enabled",description="Switch for enabling thread pool for OOB messages. Default true")
protected boolean oob_thread_pool_enabled=true;
@ManagedAttribute(description="Minimum thread pool size for OOB messages. Default is 2")
@Property(name="oob_thread_pool.min_threads")
protected int oob_thread_pool_min_threads=2;
@ManagedAttribute(description="Maximum thread pool size for OOB messages. Default is 10")
@Property(name="oob_thread_pool.max_threads")
protected int oob_thread_pool_max_threads=10;
@ManagedAttribute(description="Timeout in milliseconds to remove idle thread from OOB pool. Default is 30000")
@Property(name="oob_thread_pool.keep_alive_time")
protected long oob_thread_pool_keep_alive_time=30000;
@ManagedAttribute(description="Use queue to enqueue incoming OOB messages. Default is true")
@Property(name="oob_thread_pool.queue_enabled",
description="Use queue to enqueue incoming OOB messages. Default is true")
protected boolean oob_thread_pool_queue_enabled=true;
@ManagedAttribute(description="Maximum queue size for incoming OOB messages. Default is 500")
@Property(name="oob_thread_pool.queue_max_size")
protected int oob_thread_pool_queue_max_size=500;
@ManagedAttribute
@Property(name="oob_thread_pool.rejection_policy",
description="Thread rejection policy. Possible values are Abort, Discard, DiscardOldest and Run. Default is Run")
String oob_thread_pool_rejection_policy="Run";
@ManagedAttribute(description="Minimum thread pool size for regular messages. Default is 2")
@Property(name="thread_pool.min_threads")
protected int thread_pool_min_threads=2;
@ManagedAttribute(description="Maximum thread pool size for regular messages. Default is 10")
@Property(name="thread_pool.max_threads")
protected int thread_pool_max_threads=10;
@ManagedAttribute(description="Timeout in milliseconds to remove idle thread from regular pool. Default is 30000")
@Property(name="thread_pool.keep_alive_time")
protected long thread_pool_keep_alive_time=30000;
@ManagedAttribute(description="Switch for enabling thread pool for regular messages. Default true")
@Property(name="thread_pool.enabled")
protected boolean thread_pool_enabled=true;
@ManagedAttribute(description="Use queue to enqueue incoming regular messages")
@Property(name="thread_pool.queue_enabled",
description="Use queue to enqueue incoming regular messages. Default is true")
protected boolean thread_pool_queue_enabled=true;
@ManagedAttribute(description="Maximum queue size for incoming OOB messages")
@Property(name="thread_pool.queue_max_size",
description="Maximum queue size for incoming OOB messages. Default is 500")
protected int thread_pool_queue_max_size=500;
@ManagedAttribute
@Property(name="thread_pool.rejection_policy",
description="Thread rejection policy. Possible values are Abort, Discard, DiscardOldest and Run Default is Run")
protected String thread_pool_rejection_policy="Run";
@ManagedAttribute(description="Number of threads to be used by the timer thread pool")
@Property(name="timer.num_threads",description="Number of threads to be used by the timer thread pool. Default is 4")
protected int num_timer_threads=4;
@ManagedAttribute(description="Enable bundling of smaller messages into bigger ones", writable=true)
@Property(description="Enable bundling of smaller messages into bigger ones. Default is false")
protected boolean enable_bundling=false;
/** Enable bundling for unicast messages. Ignored if enable_bundling is off */
@Property(description="Enable bundling of smaller messages into bigger ones for unicast messages. Default is true")
protected boolean enable_unicast_bundling=true;
@Property(description="Switch to enbale diagnostic probing. Default is true")
protected boolean enable_diagnostics=true;
@Property(description="Address for diagnostic probing. Default is 224.0.75.75")
protected String diagnostics_addr="224.0.75.75";
@Property(description="Port for diagnostic probing. Default is 7500")
protected int diagnostics_port=7500;
@Property(description="If assigned enable this transport to be a singleton (shared) transport")
protected String singleton_name=null;
@Property(description="Path to a file to store currently used ports on this machine.")
protected String persistent_ports_file=null;
@Property(name="ports_expiry_time",description="Timeout to expire ports used with PortManager. Default is 30000 msec")
protected long pm_expiry_time=30000L;
@Property(description="Switch to enable tracking of currently used ports on this machine. Default is false")
protected boolean persistent_ports=false;
/**
* Maximum number of bytes for messages to be queued until they are sent.
* This value needs to be smaller than the largest datagram packet size in
* case of UDP
*/
protected int max_bundle_size=64000;
/**
* Max number of milliseconds until queued messages are sent. Messages are
* sent when max_bundle_size or max_bundle_timeout has been exceeded
* (whichever occurs faster)
*/
protected long max_bundle_timeout=20;
/* --------------------------------------------- JMX ---------------------------------------------- */
@ManagedAttribute
protected long num_msgs_sent=0;
@ManagedAttribute
protected long num_msgs_received=0;
@ManagedAttribute
protected long num_bytes_sent=0;
@ManagedAttribute
protected long num_bytes_received=0;
/** The name of the group to which this member is connected */
@ManagedAttribute
protected String channel_name=null;
/**
* whether or not warnings about messages from different groups are logged -
* private flag, not for common use
*/
@ManagedAttribute(writable=true, description="whether or not warnings about messages from different groups are logged")
private boolean log_discard_msgs=true;
@ManagedAttribute
protected long num_oob_msgs_received=0;
@ManagedAttribute
protected long num_incoming_msgs_received=0;
/* --------------------------------------------- Fields ------------------------------------------------------ */
/** The address (host and port) of this member */
protected Address local_addr=null;
/** The members of this group (updated when a member joins or leaves) */
protected final HashSet<Address> members=new HashSet<Address>(11);
protected View view=null;
protected final ExposedByteArrayInputStream in_stream=new ExposedByteArrayInputStream(new byte[] { '0' });
protected final DataInputStream dis=new DataInputStream(in_stream);
/** Used by packet handler to store incoming DatagramPackets */
protected Queue incoming_packet_queue=null;
/**
* Dequeues DatagramPackets from packet_queue, unmarshalls them and calls
* <tt>handleIncomingUdpPacket()</tt>
*/
protected IncomingPacketHandler incoming_packet_handler=null;
/** Used by packet handler to store incoming Messages */
protected Queue incoming_msg_queue=null;
protected IncomingMessageHandler incoming_msg_handler;
protected ThreadGroup pool_thread_group=new ThreadGroup(Util.getGlobalThreadGroup(), "Thread Pools");
/**
* Keeps track of connects and disconnects, in order to start and stop
* threads
*/
protected int connect_count=0;
/**
* ================================== OOB thread pool ========================
*/
protected Executor oob_thread_pool;
/** Factory which is used by oob_thread_pool */
protected ThreadFactory oob_thread_factory=null;
/**
* Used if oob_thread_pool is a ThreadPoolExecutor and
* oob_thread_pool_queue_enabled is true
*/
protected BlockingQueue<Runnable> oob_thread_pool_queue=null;
/**
* ================================== Regular thread pool =======================
*/
/**
* The thread pool which handles unmarshalling, version checks and
* dispatching of regular messages
*/
protected Executor thread_pool;
/** Factory which is used by oob_thread_pool */
protected ThreadFactory default_thread_factory=null;
/**
* Used if thread_pool is a ThreadPoolExecutor and thread_pool_queue_enabled
* is true
*/
protected BlockingQueue<Runnable> thread_pool_queue=null;
/**
* ================================== Timer thread pool =========================
*/
protected TimeScheduler timer=null;
protected ThreadFactory timer_thread_factory;
/**
* =================================Default thread factory ========================
*/
/** Used by all threads created by JGroups outside of the thread pools */
protected ThreadFactory global_thread_factory=null;
/**
* If set it will be added to <tt>local_addr</tt>. Used to implement for
* example transport independent addresses
*/
protected byte[] additional_data=null;
private Bundler bundler=null;
private DiagnosticsHandler diag_handler=null;
/**
* If singleton_name is enabled, this map is used to de-multiplex incoming
* messages according to their cluster names (attached to the message by the
* transport anyway). The values are the next protocols above the
* transports.
*/
private final ConcurrentMap<String,Protocol> up_prots=new ConcurrentHashMap<String,Protocol>();
protected TpHeader header;
protected final String name=getName();
protected PortsManager pm=null;
/**
* Creates the TP protocol, and initializes the state variables, does
* however not start any sockets or threads.
*/
protected TP() {
}
/**
* debug only
*/
public String toString() {
return local_addr != null? name + "(local address: " + local_addr + ')' : name;
}
public void resetStats() {
num_msgs_sent=num_msgs_received=num_bytes_sent=num_bytes_received=0;
num_oob_msgs_received=num_incoming_msgs_received=0;
}
public void setThreadPoolQueueEnabled(boolean flag) {thread_pool_queue_enabled=flag;}
public Executor getDefaultThreadPool() {
return thread_pool;
}
public void setDefaultThreadPool(Executor thread_pool) {
if(this.thread_pool != null)
shutdownThreadPool(this.thread_pool);
this.thread_pool=thread_pool;
}
public ThreadFactory getDefaultThreadPoolThreadFactory() {
return default_thread_factory;
}
public void setDefaultThreadPoolThreadFactory(ThreadFactory factory) {
default_thread_factory=factory;
if(thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)thread_pool).setThreadFactory(factory);
}
public Executor getOOBThreadPool() {
return oob_thread_pool;
}
public void setOOBThreadPool(Executor oob_thread_pool) {
if(this.oob_thread_pool != null) {
shutdownThreadPool(this.oob_thread_pool);
}
this.oob_thread_pool=oob_thread_pool;
}
public ThreadFactory getOOBThreadPoolThreadFactory() {
return oob_thread_factory;
}
public void setOOBThreadPoolThreadFactory(ThreadFactory factory) {
oob_thread_factory=factory;
if(oob_thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)oob_thread_pool).setThreadFactory(factory);
}
public ThreadFactory getTimerThreadFactory() {
return timer_thread_factory;
}
public void setTimerThreadFactory(ThreadFactory factory) {
timer_thread_factory=factory;
timer.setThreadFactory(factory);
}
public TimeScheduler getTimer() {return timer;}
public ThreadFactory getThreadFactory() {
return global_thread_factory;
}
public void setThreadFactory(ThreadFactory factory) {
global_thread_factory=factory;
}
/**
* Names the current thread. Valid values are "pcl":
* p: include the previous (original) name, e.g. "Incoming thread-1", "UDP ucast receiver"
* c: include the cluster name, e.g. "MyCluster"
* l: include the local address of the current member, e.g. "192.168.5.1:5678"
*/
public String getThreadNamingPattern() {return thread_naming_pattern;}
public long getNumMessagesSent() {return num_msgs_sent;}
public long getNumMessagesReceived() {return num_msgs_received;}
public long getNumBytesSent() {return num_bytes_sent;}
public long getNumBytesReceived() {return num_bytes_received;}
public String getBindAddress() {return bind_addr != null? bind_addr.toString() : "null";}
public void setBindAddress(String bind_addr) throws UnknownHostException {
this.bind_addr=InetAddress.getByName(bind_addr);
}
public InetAddress getBindAddressAsInetAddress() {return bind_addr;}
public int getBindPort() {return bind_port;}
public void setBindPort(int port) {this.bind_port=port;}
/** @deprecated Use {@link #isReceiveOnAllInterfaces()} instead */
public boolean getBindToAllInterfaces() {return receive_on_all_interfaces;}
public void setBindToAllInterfaces(boolean flag) {this.receive_on_all_interfaces=flag;}
public boolean isReceiveOnAllInterfaces() {return receive_on_all_interfaces;}
public List<NetworkInterface> getReceiveInterfaces() {return receive_interfaces;}
public boolean isSendOnAllInterfaces() {return send_on_all_interfaces;}
public List<NetworkInterface> getSendInterfaces() {return send_interfaces;}
public boolean isDiscardIncompatiblePackets() {return discard_incompatible_packets;}
public void setDiscardIncompatiblePackets(boolean flag) {discard_incompatible_packets=flag;}
public boolean isEnableBundling() {return enable_bundling;}
public void setEnableBundling(boolean flag) {enable_bundling=flag;}
public boolean isEnableUnicastBundling() {return enable_unicast_bundling;}
public void setEnableUnicastBundling(boolean enable_unicast_bundling) {this.enable_unicast_bundling=enable_unicast_bundling;}
public void setPortRange(int range) {this.port_range=range;}
public void setUseConcurrentStack(boolean flag) {use_concurrent_stack=flag;}
@ManagedAttribute
public String getLocalAddressAsString() {return local_addr != null? local_addr.toString() : "n/a";}
@ManagedAttribute
public boolean isOOBThreadPoolEnabled() { return oob_thread_pool_enabled; }
@ManagedAttribute
public boolean isDefaulThreadPoolEnabled() { return thread_pool_enabled; }
@ManagedAttribute(description="Maximum number of bytes for messages to be queued until they are sent")
public int getMaxBundleSize() {return max_bundle_size;}
@ManagedAttribute(description="Maximum number of bytes for messages to be queued until they are sent",writable=true)
@Property(name="max_bundle_size")
public void setMaxBundleSize(int size) {
if(size <= 0) {
throw new IllegalArgumentException("max_bundle_size (" + size + ") is <= 0");
}
max_bundle_size=size;
}
@ManagedAttribute(description="Max number of milliseconds until queued messages are sent")
public long getMaxBundleTimeout() {return max_bundle_timeout;}
@ManagedAttribute(description="Max number of milliseconds until queued messages are sent", writable=true)
@Property(name="max_bundle_timeout")
public void setMaxBundleTimeout(long timeout) {
if(timeout <= 0) {
throw new IllegalArgumentException("max_bundle_timeout of " + timeout + " is invalid");
}
max_bundle_timeout=timeout;
}
public Address getLocalAddress() {return local_addr;}
public String getChannelName() {return channel_name;}
public boolean isLoopback() {return loopback;}
public void setLoopback(boolean b) {loopback=b;}
public boolean isUseIncomingPacketHandler() {return use_incoming_packet_handler;}
public ConcurrentMap<String,Protocol> getUpProtocols() {
return up_prots;
}
@ManagedAttribute
public int getOOBMinPoolSize() {
return oob_thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)oob_thread_pool).getCorePoolSize() : 0;
}
@ManagedAttribute
public void setOOBMinPoolSize(int size) {
if(oob_thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)oob_thread_pool).setCorePoolSize(size);
}
@ManagedAttribute
public int getOOBMaxPoolSize() {
return oob_thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)oob_thread_pool).getMaximumPoolSize() : 0;
}
@ManagedAttribute
public void setOOBMaxPoolSize(int size) {
if(oob_thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)oob_thread_pool).setMaximumPoolSize(size);
}
@ManagedAttribute
public int getOOBPoolSize() {
return oob_thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)oob_thread_pool).getPoolSize() : 0;
}
@ManagedAttribute
public long getOOBKeepAliveTime() {
return oob_thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)oob_thread_pool).getKeepAliveTime(TimeUnit.MILLISECONDS) : 0;
}
@ManagedAttribute
public void setOOBKeepAliveTime(long time) {
if(oob_thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)oob_thread_pool).setKeepAliveTime(time, TimeUnit.MILLISECONDS);
}
public long getOOBMessages() {
return num_oob_msgs_received;
}
@ManagedAttribute
public int getOOBQueueSize() {
return oob_thread_pool_queue.size();
}
public int getOOBMaxQueueSize() {
return oob_thread_pool_queue_max_size;
}
@ManagedAttribute
public int getIncomingMinPoolSize() {
return thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)thread_pool).getCorePoolSize() : 0;
}
@ManagedAttribute
public void setIncomingMinPoolSize(int size) {
if(thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)thread_pool).setCorePoolSize(size);
}
@ManagedAttribute
public int getIncomingMaxPoolSize() {
return thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)thread_pool).getMaximumPoolSize() : 0;
}
@ManagedAttribute
public void setIncomingMaxPoolSize(int size) {
if(thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)thread_pool).setMaximumPoolSize(size);
}
@ManagedAttribute
public int getIncomingPoolSize() {
return thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)thread_pool).getPoolSize() : 0;
}
@ManagedAttribute
public long getIncomingKeepAliveTime() {
return thread_pool instanceof ThreadPoolExecutor? ((ThreadPoolExecutor)thread_pool).getKeepAliveTime(TimeUnit.MILLISECONDS) : 0;
}
@ManagedAttribute
public void setIncomingKeepAliveTime(long time) {
if(thread_pool instanceof ThreadPoolExecutor)
((ThreadPoolExecutor)thread_pool).setKeepAliveTime(time, TimeUnit.MILLISECONDS);
}
public long getIncomingMessages() {
return num_incoming_msgs_received;
}
@ManagedAttribute
public int getIncomingQueueSize() {
return thread_pool_queue.size();
}
public int getIncomingMaxQueueSize() {
return thread_pool_queue_max_size;
}
public void setLogDiscardMessages(boolean flag) {
log_discard_msgs=flag;
}
public boolean getLogDiscardMessages() {
return log_discard_msgs;
}
public Map<String,Object> dumpStats() {
Map<String,Object> retval=super.dumpStats();
if(retval == null)
retval=new HashMap<String,Object>();
retval.put("num_msgs_sent", new Long(num_msgs_sent));
retval.put("num_msgs_received", new Long(num_msgs_received));
retval.put("num_bytes_sent", new Long(num_bytes_sent));
retval.put("num_bytes_received", new Long(num_bytes_received));
return retval;
}
/**
* Send to all members in the group. UDP would use an IP multicast message, whereas TCP would send N
* messages, one for each member
* @param data The data to be sent. This is not a copy, so don't modify it
* @param offset
* @param length
* @throws Exception
*/
public abstract void sendToAllMembers(byte[] data, int offset, int length) throws Exception;
/**
* Send to all members in the group. UDP would use an IP multicast message, whereas TCP would send N
* messages, one for each member
* @param dest Must be a non-null unicast address
* @param data The data to be sent. This is not a copy, so don't modify it
* @param offset
* @param length
* @throws Exception
*/
public abstract void sendToSingleMember(Address dest, byte[] data, int offset, int length) throws Exception;
public abstract String getInfo();
public abstract void postUnmarshalling(Message msg, Address dest, Address src, boolean multicast);
public abstract void postUnmarshallingList(Message msg, Address dest, boolean multicast);
private StringBuilder _getInfo(Channel ch) {
StringBuilder sb=new StringBuilder();
sb.append(ch.getLocalAddress()).append(" (").append(ch.getClusterName()).append(") ").append("\n");
sb.append("local_addr=").append(ch.getLocalAddress()).append("\n");
sb.append("group_name=").append(ch.getClusterName()).append("\n");
sb.append("version=").append(Version.description).append(", cvs=\"").append(Version.cvs).append("\"\n");
sb.append("view: ").append(ch.getView()).append('\n');
sb.append(getInfo());
return sb;
}
private void handleDiagnosticProbe(SocketAddress sender, DatagramSocket sock, String request) {
if(isSingleton()) {
for(Protocol prot: up_prots.values()) {
ProtocolStack st=prot.getProtocolStack();
handleDiagnosticProbe(sender, sock, request, st);
}
}
else {
handleDiagnosticProbe(sender, sock, request, stack);
}
}
private void handleDiagnosticProbe(SocketAddress sender, DatagramSocket sock, String request, ProtocolStack stack) {
try {
StringTokenizer tok=new StringTokenizer(request);
String req=tok.nextToken();
StringBuilder info=new StringBuilder("n/a");
if(req.trim().toLowerCase().startsWith("query")) {
ArrayList<String> l=new ArrayList<String>(tok.countTokens());
while(tok.hasMoreTokens())
l.add(tok.nextToken().trim().toLowerCase());
info=_getInfo(stack.getChannel());
if(l.contains("jmx")) {
Channel ch=stack.getChannel();
if(ch != null) {
Map<String,Object> m=ch.dumpStats();
StringBuilder sb=new StringBuilder();
sb.append("stats:\n");
for(Iterator<Entry<String,Object>> it=m.entrySet().iterator(); it.hasNext();) {
sb.append(it.next()).append("\n");
}
info.append(sb);
}
}
if(l.contains("props")) {
String p=stack.printProtocolSpec(true);
info.append("\nprops:\n").append(p);
}
if(l.contains("info")) {
Map<String, Object> tmp=stack.getChannel().getInfo();
info.append("INFO:\n");
for(Map.Entry<String,Object> entry: tmp.entrySet()) {
info.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
}
}
if(l.contains("dump")) {
info.append("\nstack trace:\n");
info.append(Util.dumpThreads());
}
}
byte[] diag_rsp=info.toString().getBytes();
if(log.isDebugEnabled())
log.debug("sending diag response to " + sender);
sendResponse(sock, sender, diag_rsp);
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed sending diag rsp to " + sender, t);
}
}
private static void sendResponse(DatagramSocket sock, SocketAddress sender, byte[] buf) throws IOException {
DatagramPacket p=new DatagramPacket(buf, 0, buf.length, sender);
sock.send(p);
}
/* ------------------------------------------------------------------------------- */
/*------------------------------ Protocol interface ------------------------------ */
public void init() throws Exception {
super.init();
// Create the default thread factory
global_thread_factory=new DefaultThreadFactory(Util.getGlobalThreadGroup(), "", false);
// Create the timer and the associated thread factory - depends on singleton_name
// timer_thread_factory=new DefaultThreadFactory(Util.getGlobalThreadGroup(), "Timer", true, true);
timer_thread_factory=new LazyThreadFactory(Util.getGlobalThreadGroup(), "Timer", true, true);
if(isSingleton()) {
timer_thread_factory.setIncludeClusterName(false);
}
default_thread_factory=new DefaultThreadFactory(pool_thread_group, "Incoming", false, true);
oob_thread_factory=new DefaultThreadFactory(pool_thread_group, "OOB", false, true);
setInAllThreadFactories(channel_name, local_addr, thread_naming_pattern);
timer=new TimeScheduler(timer_thread_factory, num_timer_threads);
verifyRejectionPolicy(oob_thread_pool_rejection_policy);
verifyRejectionPolicy(thread_pool_rejection_policy);
out_stream=new ExposedByteArrayOutputStream(INITIAL_BUFSIZE);
dos=new ExposedDataOutputStream(out_stream);
// ========================================== OOB thread pool ==============================
if(oob_thread_pool_enabled) {
if(oob_thread_pool_queue_enabled)
oob_thread_pool_queue=new LinkedBlockingQueue<Runnable>(oob_thread_pool_queue_max_size);
else
oob_thread_pool_queue=new SynchronousQueue<Runnable>();
oob_thread_pool=createThreadPool(oob_thread_pool_min_threads, oob_thread_pool_max_threads, oob_thread_pool_keep_alive_time,
oob_thread_pool_rejection_policy, oob_thread_pool_queue, oob_thread_factory);
}
else { // otherwise use the caller's thread to unmarshal the byte buffer into a message
oob_thread_pool=new DirectExecutor();
}
// ====================================== Regular thread pool ===========================
if(thread_pool_enabled) {
if(thread_pool_queue_enabled)
thread_pool_queue=new LinkedBlockingQueue<Runnable>(thread_pool_queue_max_size);
else
thread_pool_queue=new SynchronousQueue<Runnable>();
thread_pool=createThreadPool(thread_pool_min_threads, thread_pool_max_threads, thread_pool_keep_alive_time,
thread_pool_rejection_policy, thread_pool_queue, default_thread_factory);
}
else { // otherwise use the caller's thread to unmarshal the byte buffer into a message
thread_pool=new DirectExecutor();
}
if(persistent_ports){
pm = new PortsManager(pm_expiry_time,persistent_ports_file);
}
if(bind_addr != null) {
Map<String, Object> m=new HashMap<String, Object>(1);
m.put("bind_addr", bind_addr);
up(new Event(Event.CONFIG, m));
}
}
public void destroy() {
super.destroy();
if(timer != null) {
try {
timer.stop();
}
catch(InterruptedException e) {
log.error("failed stopping the timer", e);
}
}
// 3. Stop the thread pools
if(oob_thread_pool instanceof ThreadPoolExecutor) {
shutdownThreadPool(oob_thread_pool);
oob_thread_pool=null;
}
if(thread_pool instanceof ThreadPoolExecutor) {
shutdownThreadPool(thread_pool);
thread_pool=null;
}
}
/**
* Creates the unicast and multicast sockets and starts the unicast and multicast receiver threads
*/
public void start() throws Exception {
if(timer == null)
throw new Exception("timer is null");
if(enable_diagnostics) {
diag_handler=new DiagnosticsHandler();
diag_handler.start();
}
if(use_incoming_packet_handler && !use_concurrent_stack) {
incoming_packet_queue=new Queue();
incoming_packet_handler=new IncomingPacketHandler();
incoming_packet_handler.start();
}
if(loopback && !use_concurrent_stack) {
incoming_msg_queue=new Queue();
incoming_msg_handler=new IncomingMessageHandler();
incoming_msg_handler.start();
}
if(enable_bundling) {
bundler=new Bundler();
}
setInAllThreadFactories(channel_name, local_addr, thread_naming_pattern);
sendUpLocalAddressEvent();
}
public void stop() {
if(diag_handler != null) {
diag_handler.stop();
diag_handler=null;
}
// 1. Stop the incoming packet handler thread
if(incoming_packet_handler != null)
incoming_packet_handler.stop();
// 2. Stop the incoming message handler
if(incoming_msg_handler != null)
incoming_msg_handler.stop();
}
protected void handleConnect() throws Exception {
connect_count++;
}
protected void handleDisconnect() {
connect_count=Math.max(0, connect_count -1);
}
public String getSingletonName() {
return singleton_name;
}
public boolean isSingleton(){
return singleton_name != null && singleton_name.length() >0;
}
/**
* handle the UP event.
* @param evt - the event being send from the stack
*/
public Object up(Event evt) {
switch(evt.getType()) {
case Event.CONFIG:
if(isSingleton())
passToAllUpProtocols(evt);
else
up_prot.up(evt);
if(log.isDebugEnabled()) log.debug("received CONFIG event: " + evt.getArg());
handleConfigEvent((Map<String,Object>)evt.getArg());
return null;
}
if(isSingleton()) {
passToAllUpProtocols(evt);
return null;
}
else
return up_prot.up(evt);
}
/**
* Caller by the layer above this layer. Usually we just put this Message
* into the send queue and let one or more worker threads handle it. A worker thread
* then removes the Message from the send queue, performs a conversion and adds the
* modified Message to the send queue of the layer below it, by calling down()).
*/
public Object down(Event evt) {
if(evt.getType() != Event.MSG) { // unless it is a message handle it and respond
return handleDownEvent(evt);
}
Message msg=(Message)evt.getArg();
if(header != null) {
// added patch by Roland Kurmann (March 20 2003)
// msg.putHeader(name, new TpHeader(channel_name));
msg.putHeaderIfAbsent(name, header);
}
setSourceAddress(msg); // very important !! listToBuffer() will fail with a null src address !!
if(log.isTraceEnabled()) {
log.trace("sending msg to " + msg.getDest() + ", src=" + msg.getSrc() + ", headers are " + msg.printHeaders());
}
// Don't send if destination is local address. Instead, switch dst and src and put in up_queue.
// If multicast message, loopback a copy directly to us (but still multicast). Once we receive this,
// we will discard our own multicast message
Address dest=msg.getDest();
boolean multicast=dest == null || dest.isMulticastAddress();
if(loopback && (multicast || dest.equals(local_addr))) {
// we *have* to make a copy, or else up_prot.up() might remove headers from msg which will then *not*
// be available for marshalling further down (when sending the message)
final Message copy=msg.copy();
if(log.isTraceEnabled()) log.trace(new StringBuilder("looping back message ").append(copy));
// up_prot.up(new Event(Event.MSG, copy));
// changed to fix http://jira.jboss.com/jira/browse/JGRP-506
Executor pool=msg.isFlagSet(Message.OOB)? oob_thread_pool : thread_pool;
pool.execute(new Runnable() {
public void run() {
passMessageUp(copy, false);
}
});
if(!multicast)
return null;
}
try {
send(msg, dest, multicast);
}
catch(InterruptedException interruptedEx) {
Thread.currentThread().interrupt(); // let someone else handle the interrupt
}
catch(Throwable e) {
if(log.isErrorEnabled()) {
String dst=msg.getDest() == null? "null" : msg.getDest().toString();
log.error("failed sending message to " + dst + " (" + msg.size() + " bytes)", e);
}
}
return null;
}
/*--------------------------- End of Protocol interface -------------------------- */
/* ------------------------------ Private Methods -------------------------------- */
/**
* If the sender is null, set our own address. We cannot just go ahead and set the address
* anyway, as we might be sending a message on behalf of someone else ! E.gin case of
* retransmission, when the original sender has crashed, or in a FLUSH protocol when we
* have to return all unstable messages with the FLUSH_OK response.
*/
private void setSourceAddress(Message msg) {
if(msg.getSrc() == null)
msg.setSrc(local_addr);
}
private void passMessageUp(Message msg, boolean perform_cluster_name_matching) {
TpHeader hdr=(TpHeader)msg.getHeader(name); // replaced removeHeader() with getHeader()
if(hdr == null) {
if(channel_name == null) {
Event evt=new Event(Event.MSG, msg);
if(isSingleton()) {
passMessageToAll(evt);
}
else {
up_prot.up(evt);
}
}
else {
if(log.isErrorEnabled())
log.error(new StringBuilder("message does not have a transport header, msg is ").append(msg).
append(", headers are ").append(msg.printHeaders()).append(", will be discarded"));
}
return;
}
String ch_name=hdr.channel_name;
if(isSingleton()) {
Protocol tmp_prot=up_prots.get(ch_name);
if(tmp_prot != null) {
Event evt=new Event(Event.MSG, msg);
if(log.isTraceEnabled()) {
StringBuilder sb=new StringBuilder("message is ").append(msg).append(", headers are ").append(msg.printHeaders());
log.trace(sb);
}
tmp_prot.up(evt);
}
else {
// we discard messages for a group we don't have. If we had a scenario with channel C1 and A,B on it,
// and channel C2 and only A on it (asymmetric setup), then C2 would always log warnings that B was
// not found (Jan 25 2008 (bela))
// if(log.isWarnEnabled())
// log.warn(new StringBuilder("discarded message from group \"").append(ch_name).
// append("\" (our groups are ").append(up_prots.keySet()).append("). Sender was ").append(msg.getSrc()));
}
}
else {
// Discard if message's group name is not the same as our group name
if(perform_cluster_name_matching && channel_name != null && !channel_name.equals(ch_name)) {
if(log.isWarnEnabled() && log_discard_msgs)
log.warn(new StringBuilder("discarded message from different group \"").append(ch_name).
append("\" (our group is \"").append(channel_name).append("\"). Sender was ").append(msg.getSrc()));
}
else {
Event evt=new Event(Event.MSG, msg);
if(log.isTraceEnabled()) {
StringBuilder sb=new StringBuilder("message is ").append(msg).append(", headers are ").append(msg.printHeaders());
log.trace(sb);
}
up_prot.up(evt);
}
}
}
private void passMessageToAll(Event evt) {
for(Protocol tmp_prot: up_prots.values()) {
try {
tmp_prot.up(evt);
}
catch(Exception ex) {
if(log.isErrorEnabled())
log.error("failure passing message up: message is " + evt.getArg(), ex);
}
}
}
/**
* Subclasses must call this method when a unicast or multicast message has been received.
* Declared final so subclasses cannot override this method.
*
* @param dest
* @param sender
* @param data
* @param offset
* @param length
*/
protected final void receive(Address dest, Address sender, byte[] data, int offset, int length) {
if(data == null) return;
if(log.isTraceEnabled()){
boolean mcast=dest == null || dest.isMulticastAddress();
StringBuilder sb=new StringBuilder("received (");
sb.append(mcast? "mcast) " : "ucast) ").append(length).append(" bytes from ").append(sender);
log.trace(sb);
}
try {
// determine whether OOB or not by looking at first byte of 'data'
boolean oob=false;
byte oob_flag=data[Global.SHORT_SIZE]; // we need to skip the first 2 bytes (version)
if((oob_flag & OOB) == OOB)
oob=true;
if(use_concurrent_stack) {
if(oob) {
num_oob_msgs_received++;
dispatchToThreadPool(oob_thread_pool, dest, sender, data, offset, length);
}
else {
num_incoming_msgs_received++;
dispatchToThreadPool(thread_pool, dest, sender, data, offset, length);
}
}
else {
if(use_incoming_packet_handler) {
byte[] tmp=new byte[length];
System.arraycopy(data, offset, tmp, 0, length);
incoming_packet_queue.add(new IncomingPacket(dest, sender, tmp, 0, length));
}
else
handleIncomingPacket(dest, sender, data, offset, length);
}
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error(new StringBuilder("failed handling data from ").append(sender), t);
}
}
private void dispatchToThreadPool(Executor pool, Address dest, Address sender, byte[] data, int offset, int length) {
if(pool instanceof DirectExecutor) {
// we don't make a copy of the buffer if we execute on this thread
pool.execute(new IncomingPacket(dest, sender, data, offset, length));
}
else {
byte[] tmp=new byte[length];
System.arraycopy(data, offset, tmp, 0, length);
pool.execute(new IncomingPacket(dest, sender, tmp, 0, length));
}
}
/**
* Processes a packet read from either the multicast or unicast socket. Needs to be synchronized because
* mcast or unicast socket reads can be concurrent.
* Correction (bela April 19 2005): we access no instance variables, all vars are allocated on the stack, so
* this method should be reentrant: removed 'synchronized' keyword
*/
private void handleIncomingPacket(Address dest, Address sender, byte[] data, int offset, int length) {
Message msg=null;
short version=0;
boolean is_message_list, multicast;
byte flags;
List<Message> msgs;
try {
synchronized(in_stream) {
in_stream.setData(data, offset, length);
try {
version=dis.readShort();
}
catch(IOException ex) {
if(discard_incompatible_packets)
return;
throw ex;
}
if(Version.isBinaryCompatible(version) == false) {
if(log.isWarnEnabled()) {
StringBuilder sb=new StringBuilder();
sb.append("packet from ").append(sender).append(" has different version (").append(Version.print(version));
sb.append(") from ours (").append(Version.printVersion()).append("). ");
if(discard_incompatible_packets)
sb.append("Packet is discarded");
else
sb.append("This may cause problems");
log.warn(sb);
}
if(discard_incompatible_packets)
return;
}
flags=dis.readByte();
is_message_list=(flags & LIST) == LIST;
multicast=(flags & MULTICAST) == MULTICAST;
if(is_message_list)
msgs=readMessageList(dis, dest, multicast);
else {
msg=readMessage(dis, dest, sender, multicast);
msgs=new LinkedList<Message>();
msgs.add(msg);
}
}
Address src;
for(Iterator<Message> it=msgs.iterator(); it.hasNext();) {
msg=it.next();
src=msg.getSrc();
if(loopback) {
if(multicast && src != null && local_addr.equals(src)) { // discard own loopback multicast packets
it.remove();
}
}
else
handleIncomingMessage(msg);
}
if(incoming_msg_queue != null && !msgs.isEmpty())
incoming_msg_queue.addAll(msgs);
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed unmarshalling message", t);
}
}
private void handleIncomingMessage(Message msg) {
if(stats) {
num_msgs_received++;
num_bytes_received+=msg.getLength();
}
passMessageUp(msg, true);
}
/** Internal method to serialize and send a message. This method is not reentrant */
private void send(Message msg, Address dest, boolean multicast) throws Exception {
// bundle only regular messages; send OOB messages directly
if(enable_bundling && !msg.isFlagSet(Message.OOB)) {
if(!enable_unicast_bundling && !multicast) {
; // don't bundle unicast msgs if enable_unicast_bundling is off (http://jira.jboss.com/jira/browse/JGRP-429)
}
else {
bundler.send(msg, dest);
return;
}
}
out_stream_lock.lock();
try {
out_stream.reset();
dos.reset();
writeMessage(msg, dos, multicast);
Buffer buf=new Buffer(out_stream.getRawBuffer(), 0, out_stream.size());
doSend(buf, dest, multicast);
}
finally {
out_stream_lock.unlock();
}
}
private void doSend(Buffer buf, Address dest, boolean multicast) throws Exception {
if(stats) {
num_msgs_sent++;
num_bytes_sent+=buf.getLength();
}
if(multicast) {
sendToAllMembers(buf.getBuf(), buf.getOffset(), buf.getLength());
}
else {
sendToSingleMember(dest, buf.getBuf(), buf.getOffset(), buf.getLength());
}
}
/**
* This method needs to be synchronized on out_stream when it is called
* @param msg
* @return
* @throws java.io.IOException
*/
private static void writeMessage(Message msg, DataOutputStream dos, boolean multicast) throws Exception {
byte flags=0;
dos.writeShort(Version.version); // write the version
if(multicast)
flags+=MULTICAST;
if(msg.isFlagSet(Message.OOB))
flags+=OOB;
dos.writeByte(flags);
msg.writeTo(dos);
}
private Message readMessage(DataInputStream instream, Address dest, Address sender, boolean multicast) throws Exception {
Message msg=new Message(false); // don't create headers, readFrom() will do this
msg.readFrom(instream);
postUnmarshalling(msg, dest, sender, multicast); // allows for optimization by subclass
return msg;
}
private static void writeMessageList(List<Message> msgs, DataOutputStream dos, boolean multicast) throws Exception {
Address src;
byte flags=0;
int len=msgs != null? msgs.size() : 0;
boolean src_written=false;
dos.writeShort(Version.version);
flags+=LIST;
if(multicast)
flags+=MULTICAST;
dos.writeByte(flags);
dos.writeInt(len);
if(msgs != null) {
for(Message msg: msgs) {
src=msg.getSrc();
if(!src_written) {
Util.writeAddress(src, dos);
src_written=true;
}
msg.writeTo(dos);
}
}
}
private List<Message> readMessageList(DataInputStream instream, Address dest, boolean multicast) throws Exception {
List<Message> list=new LinkedList<Message>();
int len;
Message msg;
Address src;
len=instream.readInt();
src=Util.readAddress(instream);
for(int i=0; i < len; i++) {
msg=new Message(false); // don't create headers, readFrom() will do this
msg.readFrom(instream);
postUnmarshallingList(msg, dest, multicast);
msg.setSrc(src);
list.add(msg);
}
return list;
}
protected Object handleDownEvent(Event evt) {
switch(evt.getType()) {
case Event.TMP_VIEW:
case Event.VIEW_CHANGE:
synchronized(members) {
view=(View)evt.getArg();
members.clear();
if(!isSingleton()) {
Vector<Address> tmpvec=view.getMembers();
members.addAll(tmpvec);
}
else {
for(Protocol prot: up_prots.values()) {
if(prot instanceof ProtocolAdapter) {
ProtocolAdapter ad=(ProtocolAdapter)prot;
List<Address> tmp=ad.getMembers();
members.addAll(tmp);
}
}
}
}
break;
case Event.CONNECT:
case Event.CONNECT_WITH_STATE_TRANSFER:
channel_name=(String)evt.getArg();
header=new TpHeader(channel_name);
setInAllThreadFactories(channel_name, local_addr, thread_naming_pattern);
setThreadNames();
try {
handleConnect();
}
catch(Exception e) {
throw new RuntimeException(e);
}
return null;
case Event.DISCONNECT:
unsetThreadNames();
handleDisconnect();
break;
case Event.CONFIG:
if(log.isDebugEnabled()) log.debug("received CONFIG event: " + evt.getArg());
handleConfigEvent((Map<String,Object>)evt.getArg());
break;
}
return null;
}
protected void setThreadNames() {
if(incoming_packet_handler != null){
global_thread_factory.renameThread(IncomingPacketHandler.THREAD_NAME, incoming_packet_handler.getThread());
}
if(incoming_msg_handler != null) {
global_thread_factory.renameThread(IncomingMessageHandler.THREAD_NAME, incoming_msg_handler.getThread());
}
if(diag_handler != null) {
global_thread_factory.renameThread(DiagnosticsHandler.THREAD_NAME, diag_handler.getThread());
}
}
protected void unsetThreadNames() {
if(incoming_packet_handler != null && incoming_packet_handler.getThread() != null)
incoming_packet_handler.getThread().setName(IncomingPacketHandler.THREAD_NAME);
if(incoming_msg_handler != null && incoming_msg_handler.getThread() != null)
incoming_msg_handler.getThread().setName(IncomingMessageHandler.THREAD_NAME);
if(diag_handler != null && diag_handler.getThread() != null)
diag_handler.getThread().setName(DiagnosticsHandler.THREAD_NAME);
}
private void setInAllThreadFactories(String cluster_name, Address local_address, String pattern) {
ThreadFactory[] factories= { timer_thread_factory,
default_thread_factory,
oob_thread_factory,
global_thread_factory };
boolean is_shared_transport=isSingleton();
for(ThreadFactory factory:factories) {
if(pattern != null) {
factory.setPattern(pattern);
if(is_shared_transport)
factory.setIncludeClusterName(false);
}
if(cluster_name != null && !is_shared_transport) // only set cluster name if we don't have a shared transport
factory.setClusterName(cluster_name);
if(local_address != null)
factory.setAddress(local_address.toString());
}
}
protected void handleConfigEvent(Map<String,Object> map) {
if(map == null) return;
if(map.containsKey("additional_data")) {
additional_data=(byte[])map.get("additional_data");
if(local_addr instanceof IpAddress)
((IpAddress)local_addr).setAdditionalData(additional_data);
}
}
protected static ExecutorService createThreadPool(int min_threads, int max_threads, long keep_alive_time, String rejection_policy,
BlockingQueue<Runnable> queue, final ThreadFactory factory) {
ThreadPoolExecutor pool=new ThreadManagerThreadPoolExecutor(min_threads, max_threads, keep_alive_time, TimeUnit.MILLISECONDS, queue);
pool.setThreadFactory(factory);
//default
RejectedExecutionHandler handler = new ThreadPoolExecutor.CallerRunsPolicy();
if(rejection_policy != null) {
if(rejection_policy.equals("abort"))
handler = new ThreadPoolExecutor.AbortPolicy();
else if(rejection_policy.equals("discard"))
handler = new ThreadPoolExecutor.DiscardPolicy();
else if(rejection_policy.equals("discardoldest"))
handler = new ThreadPoolExecutor.DiscardOldestPolicy();
}
pool.setRejectedExecutionHandler(new ShutdownRejectedExecutionHandler(handler));
return pool;
}
private static void shutdownThreadPool(Executor thread_pool) {
if(thread_pool instanceof ExecutorService) {
ExecutorService service=(ExecutorService)thread_pool;
service.shutdownNow();
try {
service.awaitTermination(Global.THREADPOOL_SHUTDOWN_WAIT_TIME, TimeUnit.MILLISECONDS);
}
catch(InterruptedException e) {
}
}
}
private void verifyRejectionPolicy(String str) throws Exception{
if(!(str.equalsIgnoreCase("run") || str.equalsIgnoreCase("abort")|| str.equalsIgnoreCase("discard")|| str.equalsIgnoreCase("discardoldest"))) {
log.error("rejection policy of " + str + " is unknown");
throw new Exception("Unknown rejection policy " + str);
}
}
protected void passToAllUpProtocols(Event evt) {
for(Protocol prot: up_prots.values()) {
try {
prot.up(evt);
}
catch(Exception e) {
if(log.isErrorEnabled())
log.error("failed passing up event " + evt, e);
}
}
}
public void sendUpLocalAddressEvent() {
if(up_prot != null)
up(new Event(Event.SET_LOCAL_ADDRESS, local_addr));
else {
for(Map.Entry<String,Protocol> entry: up_prots.entrySet()) {
String tmp=entry.getKey();
if(tmp.startsWith(Global.DUMMY))
continue;
Protocol prot=entry.getValue();
prot.up(new Event(Event.SET_LOCAL_ADDRESS, local_addr));
}
}
}
/* ----------------------------- End of Private Methods ---------------------------------------- */
/* ----------------------------- Inner Classes ---------------------------------------- */
class IncomingPacket implements Runnable {
Address dest=null;
Address sender=null;
byte[] buf;
int offset, length;
IncomingPacket(Address dest, Address sender, byte[] buf, int offset, int length) {
this.dest=dest;
this.sender=sender;
this.buf=buf;
this.offset=offset;
this.length=length;
}
/** Code copied from handleIncomingPacket */
public void run() {
short version=0;
boolean is_message_list, multicast;
byte flags;
ExposedByteArrayInputStream in_stream=null;
DataInputStream dis=null;
try {
in_stream=new ExposedByteArrayInputStream(buf, offset, length);
dis=new DataInputStream(in_stream);
try {
version=dis.readShort();
}
catch(IOException ex) {
if(discard_incompatible_packets)
return;
throw ex;
}
if(Version.isBinaryCompatible(version) == false) {
if(log.isWarnEnabled()) {
StringBuilder sb=new StringBuilder();
sb.append("packet from ").append(sender).append(" has different version (").append(Version.print(version));
sb.append(") from ours (").append(Version.printVersion()).append("). ");
if(discard_incompatible_packets)
sb.append("Packet is discarded");
else
sb.append("This may cause problems");
log.warn(sb);
}
if(discard_incompatible_packets)
return;
}
flags=dis.readByte();
is_message_list=(flags & LIST) == LIST;
multicast=(flags & MULTICAST) == MULTICAST;
if(is_message_list) { // used if message bundling is enabled
List<Message> msgs=readMessageList(dis, dest, multicast);
for(Message msg: msgs) {
if(msg.isFlagSet(Message.OOB)) {
log.warn("bundled message should not be marked as OOB");
}
handleMyMessage(msg, multicast);
}
}
else {
Message msg=readMessage(dis, dest, sender, multicast);
handleMyMessage(msg, multicast);
}
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed handling incoming message", t);
}
}
private void handleMyMessage(Message msg, boolean multicast) {
if(stats) {
num_msgs_received++;
num_bytes_received+=msg.getLength();
}
Address src=msg.getSrc();
if(loopback && multicast && src != null && src.equals(local_addr)) {
return; // drop message that was already looped back and delivered
}
passMessageUp(msg, true);
}
}
/**
* This thread fetches byte buffers from the packet_queue, converts them into messages and passes them up
* to the higher layer (done in handleIncomingUdpPacket()).
*/
class IncomingPacketHandler implements Runnable {
public static final String THREAD_NAME="IncomingPacketHandler";
Thread t=null;
Thread getThread(){
return t;
}
void start() {
if(t == null || !t.isAlive()) {
t=global_thread_factory.newThread(this, THREAD_NAME);
t.setDaemon(true);
t.start();
}
}
void stop() {
incoming_packet_queue.close(true); // should terminate the packet_handler thread too
if(t != null) {
try {
t.join(Global.THREAD_SHUTDOWN_WAIT_TIME);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt(); // set interrupt flag again
}
}
}
public void run() {
IncomingPacket entry;
while(!incoming_packet_queue.closed() && Thread.currentThread().equals(t)) {
try {
entry=(IncomingPacket)incoming_packet_queue.remove();
handleIncomingPacket(entry.dest, entry.sender, entry.buf, entry.offset, entry.length);
}
catch(QueueClosedException closed_ex) {
break;
}
catch(Throwable ex) {
if(log.isErrorEnabled())
log.error("error processing incoming packet", ex);
}
}
if(log.isTraceEnabled()) log.trace("incoming packet handler terminating");
}
}
class IncomingMessageHandler implements Runnable {
public static final String THREAD_NAME = "IncomingMessageHandler";
Thread t;
Thread getThread(){
return t;
}
public void start() {
if(t == null || !t.isAlive()) {
t=global_thread_factory.newThread(this, THREAD_NAME);
t.setDaemon(true);
t.start();
}
}
public void stop() {
incoming_msg_queue.close(true);
if(t != null) {
try {
t.join(Global.THREAD_SHUTDOWN_WAIT_TIME);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt(); // set interrupt flag again
}
}
}
public void run() {
Message msg;
while(!incoming_msg_queue.closed() && Thread.currentThread().equals(t)) {
try {
msg=(Message)incoming_msg_queue.remove();
handleIncomingMessage(msg);
}
catch(QueueClosedException closed_ex) {
break;
}
catch(Throwable ex) {
if(log.isErrorEnabled())
log.error("error processing incoming message", ex);
}
}
if(log.isTraceEnabled()) log.trace("incoming message handler terminating");
}
}
private class Bundler {
static final int MIN_NUMBER_OF_BUNDLING_TASKS=2;
/** HashMap<Address, List<Message>>. Keys are destinations, values are lists of Messages */
final Map<Address,List<Message>> msgs=new HashMap<Address,List<Message>>(36);
@GuardedBy("lock")
long count=0; // current number of bytes accumulated
int num_msgs=0;
@GuardedBy("lock")
int num_bundling_tasks=0;
long last_bundle_time;
final ReentrantLock lock=new ReentrantLock();
final ExposedByteArrayOutputStream bundler_out_stream=new ExposedByteArrayOutputStream(INITIAL_BUFSIZE);
final ExposedDataOutputStream bundler_dos=new ExposedDataOutputStream(bundler_out_stream);
private void send(Message msg, Address dest) throws Exception {
long length=msg.size();
checkLength(length);
lock.lock();
try {
if(count + length >= max_bundle_size) {
if(!msgs.isEmpty()) {
sendBundledMessages(msgs);
}
}
addMessage(msg, dest);
count+=length;
if(num_bundling_tasks < MIN_NUMBER_OF_BUNDLING_TASKS) {
num_bundling_tasks++;
timer.schedule(new BundlingTimer(), max_bundle_timeout, TimeUnit.MILLISECONDS);
}
}
finally {
lock.unlock();
}
}
/** Run with lock acquired */
private void addMessage(Message msg, Address dest) { // no sync needed, always called with lock held
if(msgs.isEmpty())
last_bundle_time=System.currentTimeMillis();
List<Message> tmp=msgs.get(dest);
if(tmp == null) {
tmp=new LinkedList<Message>();
msgs.put(dest, tmp);
}
tmp.add(msg);
num_msgs++;
}
/**
* Sends all messages from the map, all messages for the same destination are bundled into 1 message.
* This method may be called by timer and bundler concurrently
* @param msgs
*/
private void sendBundledMessages(final Map<Address,List<Message>> msgs) {
boolean multicast;
Buffer buffer;
Address dst;
if(log.isTraceEnabled()) {
long stop=System.currentTimeMillis();
double percentage=100.0 / max_bundle_size * count;
StringBuilder sb=new StringBuilder("sending ").append(num_msgs).append(" msgs (");
num_msgs=0;
sb.append(count).append(" bytes (" + f.format(percentage) + "% of max_bundle_size)");
if(last_bundle_time > 0) {
sb.append(", collected in ").append(stop-last_bundle_time).append("ms) ");
}
sb.append(" to ").append(msgs.size()).append(" destination(s)");
if(msgs.size() > 1) sb.append(" (dests=").append(msgs.keySet()).append(")");
log.trace(sb);
}
for(Map.Entry<Address,List<Message>> entry: msgs.entrySet()) {
List<Message> list=entry.getValue();
if(list.isEmpty())
continue;
dst=entry.getKey();
multicast=dst == null || dst.isMulticastAddress();
try {
bundler_out_stream.reset();
bundler_dos.reset();
writeMessageList(list, bundler_dos, multicast); // flushes output stream when done
buffer=new Buffer(bundler_out_stream.getRawBuffer(), 0, bundler_out_stream.size());
doSend(buffer, dst, multicast);
}
catch(Throwable e) {
if(log.isErrorEnabled()) log.error("exception sending msg: " + e.toString(), e.getCause());
}
}
msgs.clear();
count=0;
}
private void checkLength(long len) throws Exception {
if(len > max_bundle_size)
throw new Exception("message size (" + len + ") is greater than max bundling size (" + max_bundle_size +
"). Set the fragmentation/bundle size in FRAG and TP correctly");
}
private class BundlingTimer implements Runnable {
public void run() {
lock.lock();
try {
if(!msgs.isEmpty()) {
sendBundledMessages(msgs);
}
}
finally {
num_bundling_tasks--;
lock.unlock();
}
}
}
}
private class DiagnosticsHandler implements Runnable {
public static final String THREAD_NAME = "DiagnosticsHandler";
Thread thread=null;
MulticastSocket diag_sock=null;
DiagnosticsHandler() {
}
Thread getThread(){
return thread;
}
void start() throws IOException {
diag_sock=new MulticastSocket(diagnostics_port);
// diag_sock=Util.createMulticastSocket(null, diagnostics_port, log);
List<NetworkInterface> interfaces=Util.getAllAvailableInterfaces();
bindToInterfaces(interfaces, diag_sock);
if(thread == null || !thread.isAlive()) {
thread=global_thread_factory.newThread(this, THREAD_NAME);
thread.setDaemon(true);
thread.start();
}
}
void stop() {
if(diag_sock != null)
diag_sock.close();
if(thread != null){
try{
thread.join(Global.THREAD_SHUTDOWN_WAIT_TIME);
}
catch(InterruptedException e){
Thread.currentThread().interrupt(); // set interrupt flag
}
}
}
public void run() {
byte[] buf=new byte[1500]; // MTU on most LANs
DatagramPacket packet;
while(!diag_sock.isClosed() && Thread.currentThread().equals(thread)) {
packet=new DatagramPacket(buf, 0, buf.length);
try {
diag_sock.receive(packet);
handleDiagnosticProbe(packet.getSocketAddress(), diag_sock,
new String(packet.getData(), packet.getOffset(), packet.getLength()));
}
catch(IOException e) {
}
}
}
private void bindToInterfaces(List<NetworkInterface> interfaces, MulticastSocket s) {
SocketAddress group_addr=new InetSocketAddress(diagnostics_addr, diagnostics_port);
for(Iterator<NetworkInterface> it=interfaces.iterator(); it.hasNext();) {
NetworkInterface i=it.next();
try {
if (i.getInetAddresses().hasMoreElements()) { // fix for VM crash - suggested by [email protected]
s.joinGroup(group_addr, i);
if(log.isTraceEnabled())
log.trace("joined " + group_addr + " on " + i.getName());
}
}
catch(IOException e) {
log.warn("failed to join " + group_addr + " on " + i.getName() + ": " + e);
}
}
}
}
public static class ProtocolAdapter extends Protocol {
final String cluster_name;
final String transport_name;
final TpHeader header;
final List<Address> members=new ArrayList<Address>();
final ThreadFactory factory;
public ProtocolAdapter(String cluster_name, String transport_name, Protocol up, Protocol down, String pattern, Address addr) {
this.cluster_name=cluster_name;
this.transport_name=transport_name;
this.up_prot=up;
this.down_prot=down;
this.header=new TpHeader(cluster_name);
this.factory=new DefaultThreadFactory(Util.getGlobalThreadGroup(), "", false);
factory.setPattern(pattern);
if(addr != null)
factory.setAddress(addr.toString());
}
@ManagedAttribute(description="Name of the cluster to which this adapter proxies")
public String getCluster_name() {
return cluster_name;
}
@ManagedAttribute(description="Name of the transport")
public String getTransport_name() {
return transport_name;
}
public List<Address> getMembers() {
return Collections.unmodifiableList(members);
}
public ThreadFactory getThreadFactory() {
return factory;
}
public Object down(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
msg.putHeader(transport_name, header);
break;
case Event.VIEW_CHANGE:
View view=(View)evt.getArg();
Vector<Address> tmp=view.getMembers();
members.clear();
members.addAll(tmp);
break;
case Event.CONNECT:
case Event.CONNECT_WITH_STATE_TRANSFER:
factory.setClusterName((String)evt.getArg());
break;
}
return down_prot.down(evt);
}
public Object up(Event evt) {
switch(evt.getType()) {
case Event.SET_LOCAL_ADDRESS:
Address addr=(Address)evt.getArg();
if(addr != null)
factory.setAddress(addr.toString());
break;
}
return up_prot.up(evt);
}
public String getName() {
return "TP.ProtocolAdapter";
}
public String toString() {
return cluster_name + " (" + transport_name + ")";
}
}
}
| - deprecated send_on_all_interfaces and send_interfaces: bad option anyway which generates a lot of traffic. Use IP bonding or something similar if this is needed
| src/org/jgroups/protocols/TP.java | - deprecated send_on_all_interfaces and send_interfaces: bad option anyway which generates a lot of traffic. Use IP bonding or something similar if this is needed |
|
Java | apache-2.0 | 0329dc8bdd4a6002479242ee2ead2b30099a84d4 | 0 | soulwarelabs/jParley-Core | /*
* Project: jParley-Core
* Outline: jParley framework core components
*
* File: StandardSubroutine.java
* Folder: /.../com/soulwarelabs/jparley/core
* Revision: 1.06, 16 April 2014
* Created: 10 March 2014
* Author: Ilya Gubarev
*
* Copyright (c) 2014 Soulware Labs, Ltd.
* Contact information is available at http://www.soulwarelabs.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.soulwarelabs.jparley.core;
import java.io.Serializable;
import java.sql.Connection;
import java.sql.SQLException;
import com.soulwarelabs.jcommons.Box;
import com.soulwarelabs.jparley.Converter;
import com.soulwarelabs.jparley.Subroutine;
import com.soulwarelabs.jparley.utility.Manager;
import com.soulwarelabs.jparley.utility.Parameter;
import com.soulwarelabs.jparley.utility.Statement;
/**
* Standard SQL stored subroutine.
*
* @see Subroutine
*
* @since v1.0
*
* @author Ilya Gubarev
* @version 16 April 2014
*/
public abstract class StandardSubroutine implements Serializable, Subroutine {
private String name;
private Manager manager;
private Interceptor postInterceptor;
private Interceptor preInterceptor;
public StandardSubroutine(String name) {
this(name, null, null);
}
public StandardSubroutine(String name, Interceptor preInterceptor,
Interceptor postInterceptor) {
this.name = name;
this.manager = new Manager();
this.postInterceptor = postInterceptor;
this.preInterceptor = preInterceptor;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
/**
* Gets SQL workflow post-execution interceptor.
*
* @return SQL workflow interceptor.
*
* @see Interceptor
*
* @since v1.0
*/
public Interceptor getPostInterceptor() {
return postInterceptor;
}
/**
* Sets a new SQL workflow post-execution interceptor.
*
* @param postInterceptor SQL workflow interceptor.
*
* @see Interceptor
*
* @since v1.0
*/
public void setPostInterceptor(Interceptor postInterceptor) {
this.postInterceptor = postInterceptor;
}
/**
* Gets SQL workflow pre-execution interceptor.
*
* @return SQL workflow interceptor.
*
* @see Interceptor
*
* @since v1.0
*/
public Interceptor getPreInterceptor() {
return preInterceptor;
}
/**
* Sets a new SQL workflow pre-execution interceptor.
*
* @param preInterceptor SQL workflow interceptor.
*
* @see Interceptor
*
* @since v1.0
*/
public void setPreInterceptor(Interceptor preInterceptor) {
this.preInterceptor = preInterceptor;
}
@Override
public void execute(Connection connection) throws SQLException {
before(connection);
String sql = createSql(getName(), manager.getTotal());
Statement statement = new Statement(connection.prepareCall(sql));
manager.setupAll(connection, statement);
statement.execute();
manager.parseAll(connection, statement);
after(connection);
}
@Override
public void in(int index, Box<?> value) {
input(index, value, null, null);
}
@Override
public void in(int index, Object value) {
input(index, new Box<Object>(value), null, null);
}
@Override
public void in(String name, Box<?> value) {
input(name, value, null, null);
}
@Override
public void in(String name, Object value) {
input(name, new Box<Object>(value), null, null);
}
@Override
public void in(int index, Box<?> value, Integer type) {
input(index, value, type, null);
}
@Override
public void in(int index, Object value, Integer type) {
input(index, new Box<Object>(value), type, null);
}
@Override
public void in(String name, Box<?> value, Integer type) {
input(name, value, type, null);
}
@Override
public void in(String name, Object value, Integer type) {
input(name, new Box<Object>(value), type, null);
}
@Override
public void in(int index, Box<?> value, Converter encoder) {
input(index, value, null, encoder);
}
@Override
public void in(int index, Object value, Converter encoder) {
input(index, new Box<Object>(value), null, encoder);
}
@Override
public void in(String name, Box<?> value, Converter encoder) {
input(name, value, null, encoder);
}
@Override
public void in(String name, Object value, Converter encoder) {
input(name, new Box<Object>(value), null, encoder);
}
public String print() {
Interviewer interviewer = null;
interview(interviewer);
return String.format("%s {%s}", getName(), interviewer);
}
@Override
public void reset() {
manager.removeAll();
}
@Override
public String toString(){
return print();
}
protected void after(Connection connection) throws SQLException {
intercept(connection, getPostInterceptor());
}
protected void before(Connection connection) throws SQLException {
intercept(connection, getPreInterceptor());
}
protected abstract String createSql(String name, int parametersNumber);
protected void input(int index, Box<?> value, Integer type,
Converter encoder) {
manager.in(index, value, type, encoder);
}
protected void input(String name, Box<?> value, Integer type,
Converter encoder) {
manager.in(name, value, type, encoder);
}
protected String interview(Interviewer interviewer) {
for (Object key : manager.getKeys()) {
Parameter parameter = manager.getParameter(key);
Box<?> input = parameter.getInput();
Box<Object> output = parameter.getOutput();
String struct = parameter.getStruct();
Integer type = parameter.getType();
if (key instanceof Integer) {
interviewer.perform((Integer) key, input, output, type, struct);
} else {
interviewer.perform((String) key, input, output, type, struct);
}
}
return interviewer.toString();
}
protected Box<Object> output(int index, int type, String struct,
Converter decoder) {
return manager.out(index, type, struct, decoder);
}
protected Box<Object> output(String name, int type, String struct,
Converter decoder) {
return manager.out(name, type, struct, decoder);
}
protected void remove(int index) {
manager.remove(index);
}
protected void remove(String name) {
manager.remove(name);
}
private void intercept(Connection connection, Interceptor interceptor)
throws SQLException {
if (interceptor != null) {
interceptor.perform(connection);
}
}
}
| src/main/java/com/soulwarelabs/jparley/core/StandardSubroutine.java | /*
* Project: jParley-Core
* Outline: jParley framework core components
*
* File: StandardSubroutine.java
* Folder: /.../com/soulwarelabs/jparley/core
* Revision: 1.06, 16 April 2014
* Created: 10 March 2014
* Author: Ilya Gubarev
*
* Copyright (c) 2014 Soulware Labs, Ltd.
* Contact information is available at http://www.soulwarelabs.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.soulwarelabs.jparley.core;
import java.io.Serializable;
import java.sql.Connection;
import java.sql.SQLException;
import com.soulwarelabs.jcommons.Box;
import com.soulwarelabs.jparley.Converter;
import com.soulwarelabs.jparley.Subroutine;
import com.soulwarelabs.jparley.utility.Manager;
import com.soulwarelabs.jparley.utility.Parameter;
import com.soulwarelabs.jparley.utility.Statement;
/**
* Standard SQL stored subroutine.
*
* @see Subroutine
*
* @since v1.0
*
* @author Ilya Gubarev
* @version 16 April 2014
*/
public abstract class StandardSubroutine implements Serializable, Subroutine {
private String name;
private Manager manager;
private Interviewer interviewer;
private Interceptor postInterceptor;
private Interceptor preInterceptor;
public StandardSubroutine(String name) {
this(name, null, null);
}
public StandardSubroutine(String name, Interceptor preInterceptor,
Interceptor postInterceptor) {
this.name = name;
this.manager = new Manager();
this.interviewer = null;
this.postInterceptor = postInterceptor;
this.preInterceptor = preInterceptor;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
/**
* Gets SQL workflow post-execution interceptor.
*
* @return SQL workflow interceptor.
*
* @see Interceptor
*
* @since v1.0
*/
public Interceptor getPostInterceptor() {
return postInterceptor;
}
/**
* Sets a new SQL workflow post-execution interceptor.
*
* @param postInterceptor SQL workflow interceptor.
*
* @see Interceptor
*
* @since v1.0
*/
public void setPostInterceptor(Interceptor postInterceptor) {
this.postInterceptor = postInterceptor;
}
/**
* Gets SQL workflow pre-execution interceptor.
*
* @return SQL workflow interceptor.
*
* @see Interceptor
*
* @since v1.0
*/
public Interceptor getPreInterceptor() {
return preInterceptor;
}
/**
* Sets a new SQL workflow pre-execution interceptor.
*
* @param preInterceptor SQL workflow interceptor.
*
* @see Interceptor
*
* @since v1.0
*/
public void setPreInterceptor(Interceptor preInterceptor) {
this.preInterceptor = preInterceptor;
}
@Override
public void execute(Connection connection) throws SQLException {
before(connection);
String sql = createSql(getName(), manager.getTotal());
Statement statement = new Statement(connection.prepareCall(sql));
manager.setupAll(connection, statement);
statement.execute();
manager.parseAll(connection, statement);
after(connection);
}
@Override
public void in(int index, Box<?> value) {
input(index, value, null, null);
}
@Override
public void in(int index, Object value) {
input(index, new Box<Object>(value), null, null);
}
@Override
public void in(String name, Box<?> value) {
input(name, value, null, null);
}
@Override
public void in(String name, Object value) {
input(name, new Box<Object>(value), null, null);
}
@Override
public void in(int index, Box<?> value, Integer type) {
input(index, value, type, null);
}
@Override
public void in(int index, Object value, Integer type) {
input(index, new Box<Object>(value), type, null);
}
@Override
public void in(String name, Box<?> value, Integer type) {
input(name, value, type, null);
}
@Override
public void in(String name, Object value, Integer type) {
input(name, new Box<Object>(value), type, null);
}
@Override
public void in(int index, Box<?> value, Converter encoder) {
input(index, value, null, encoder);
}
@Override
public void in(int index, Object value, Converter encoder) {
input(index, new Box<Object>(value), null, encoder);
}
@Override
public void in(String name, Box<?> value, Converter encoder) {
input(name, value, null, encoder);
}
@Override
public void in(String name, Object value, Converter encoder) {
input(name, new Box<Object>(value), null, encoder);
}
public String print() {
return String.format("%s {%s}", getName(), interview(interviewer));
}
@Override
public void reset() {
manager.removeAll();
}
@Override
public String toString(){
return print();
}
protected void after(Connection connection) throws SQLException {
intercept(connection, getPostInterceptor());
}
protected void before(Connection connection) throws SQLException {
intercept(connection, getPreInterceptor());
}
protected abstract String createSql(String name, int parametersNumber);
protected void input(int index, Box<?> value, Integer type,
Converter encoder) {
manager.in(index, value, type, encoder);
}
protected void input(String name, Box<?> value, Integer type,
Converter encoder) {
manager.in(name, value, type, encoder);
}
protected String interview(Interviewer interviewer) {
for (Object key : manager.getKeys()) {
Parameter parameter = manager.getParameter(key);
Box<?> input = parameter.getInput();
Box<Object> output = parameter.getOutput();
String struct = parameter.getStruct();
Integer type = parameter.getType();
if (key instanceof Integer) {
interviewer.perform((Integer) key, input, output, type, struct);
} else {
interviewer.perform((String) key, input, output, type, struct);
}
}
return interviewer.toString();
}
protected Box<Object> output(int index, int type, String struct,
Converter decoder) {
return manager.out(index, type, struct, decoder);
}
protected Box<Object> output(String name, int type, String struct,
Converter decoder) {
return manager.out(name, type, struct, decoder);
}
protected void remove(int index) {
manager.remove(index);
}
protected void remove(String name) {
manager.remove(name);
}
private void intercept(Connection connection, Interceptor interceptor)
throws SQLException {
if (interceptor != null) {
interceptor.perform(connection);
}
}
}
| StandardSubroutine .print() implementation updated
| src/main/java/com/soulwarelabs/jparley/core/StandardSubroutine.java | StandardSubroutine .print() implementation updated |
|
Java | apache-2.0 | 6f15ab2bc33ffb26a541254b27a3d5252db48a59 | 0 | codehaus-plexus/plexus-utils,codehaus-plexus/plexus-utils,codehaus-plexus/plexus-utils | package org.codehaus.plexus.util.cli;
/*
* Copyright The Codehaus Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/***************************************************************************************************
* CruiseControl, a Continuous Integration Toolkit Copyright (c) 2001-2003, ThoughtWorks, Inc. 651 W
* Washington Ave. Suite 500 Chicago, IL 60661 USA All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met: + Redistributions of source code must retain the
* above copyright notice, this list of conditions and the following disclaimer. + Redistributions
* in binary form must reproduce the above copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other materials provided with the distribution. +
* Neither the name of ThoughtWorks, Inc., CruiseControl, nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**************************************************************************************************/
/*
* ====================================================================
* Copyright 2003-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
import org.codehaus.plexus.util.Os;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.cli.shell.BourneShell;
import org.codehaus.plexus.util.cli.shell.CmdShell;
import org.codehaus.plexus.util.cli.shell.CommandShell;
import org.codehaus.plexus.util.cli.shell.Shell;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Vector;
/**
* <p>Commandline objects help handling command lines specifying processes to execute.</p>
*
* <p>The class can be used to define a command line as nested elements or as a helper to define a command line by an
* application.</p>
*
* <code>
* <someelement><br>
* <acommandline executable="/executable/to/run"><br>
* <argument value="argument 1" /><br>
* <argument line="argument_1 argument_2 argument_3" /><br>
* <argument value="argument 4" /><br>
* </acommandline><br>
* </someelement><br>
* </code>
*
* <p>The element <code>someelement</code> must provide a method <code>createAcommandline</code> which returns an instance
* of this class.</p>
*
* @author [email protected]
* @author <a href="mailto:[email protected]">Stefan Bodewig</a>
*/
public class Commandline
implements Cloneable
{
/**
* @deprecated Use {@link org.codehaus.plexus.util.Os} class instead.
*/
protected static final String OS_NAME = "os.name";
/**
* @deprecated Use {@link org.codehaus.plexus.util.Os} class instead.
*/
protected static final String WINDOWS = "Windows";
protected Vector<Arg> arguments = new Vector<Arg>();
// protected Vector envVars = new Vector();
// synchronized added to preserve synchronize of Vector class
protected Map<String, String> envVars = Collections.synchronizedMap( new LinkedHashMap<String, String>() );
private long pid = -1;
private Shell shell;
/**
* @deprecated Use {@link Commandline#setExecutable(String)} instead.
*/
protected String executable;
/**
* @deprecated Use {@link Commandline#setWorkingDirectory(File)} or {@link Commandline#setWorkingDirectory(String)}
* instead.
*/
private File workingDir;
/**
* Create a new command line object. Shell is autodetected from operating system Shell usage is only desirable when
* generating code for remote execution.
*
* @param toProcess
*/
public Commandline( String toProcess, Shell shell )
{
this.shell = shell;
String[] tmp = new String[0];
try
{
tmp = CommandLineUtils.translateCommandline( toProcess );
}
catch ( Exception e )
{
System.err.println( "Error translating Commandline." );
}
if ( ( tmp != null ) && ( tmp.length > 0 ) )
{
setExecutable( tmp[0] );
for ( int i = 1; i < tmp.length; i++ )
{
createArgument().setValue( tmp[i] );
}
}
}
/**
* Create a new command line object. Shell is autodetected from operating system Shell usage is only desirable when
* generating code for remote execution.
*/
public Commandline( Shell shell )
{
this.shell = shell;
}
/**
* Create a new command line object, given a command following POSIX sh quoting rules
*
* @param toProcess
*/
public Commandline( String toProcess )
{
setDefaultShell();
String[] tmp = new String[0];
try
{
tmp = CommandLineUtils.translateCommandline( toProcess );
}
catch ( Exception e )
{
System.err.println( "Error translating Commandline." );
}
if ( ( tmp != null ) && ( tmp.length > 0 ) )
{
setExecutable( tmp[0] );
for ( int i = 1; i < tmp.length; i++ )
{
createArgument().setValue( tmp[i] );
}
}
}
/**
* Create a new command line object.
*/
public Commandline()
{
setDefaultShell();
}
public long getPid()
{
if ( pid == -1 )
{
pid = Long.parseLong( String.valueOf( System.currentTimeMillis() ) );
}
return pid;
}
public void setPid( long pid )
{
this.pid = pid;
}
/**
* Class to keep track of the position of an Argument.
*/
// <p>This class is there to support the srcfile and targetfile
// elements of <execon> and <transform> - don't know
// whether there might be additional use cases.</p> --SB
public class Marker
{
private int position;
private int realPos = -1;
Marker( int position )
{
this.position = position;
}
/**
* <p>Return the number of arguments that preceded this marker.</p>
*
* <p>The name of the executable - if set - is counted as the very first argument.</p>
*/
public int getPosition()
{
if ( realPos == -1 )
{
realPos = ( getLiteralExecutable() == null ? 0 : 1 );
for ( int i = 0; i < position; i++ )
{
Arg arg = (Arg) arguments.elementAt( i );
realPos += arg.getParts().length;
}
}
return realPos;
}
}
/**
* <p>
* Sets the shell or command-line interpreter for the detected operating system, and the shell arguments.
* </p>
*/
private void setDefaultShell()
{
// If this is windows set the shell to command.com or cmd.exe with correct arguments.
if ( Os.isFamily( Os.FAMILY_WINDOWS ) )
{
if ( Os.isFamily( Os.FAMILY_WIN9X ) )
{
setShell( new CommandShell() );
}
else
{
setShell( new CmdShell() );
}
}
else
{
setShell( new BourneShell() );
}
}
/**
* <p>Creates an argument object.</p>
*
* <p>Each commandline object has at most one instance of the argument class. This method calls
* <code>this.createArgument(false)</code>.</p>
*
* @return the argument object.
* @see #createArgument(boolean)
* @deprecated Use {@link Commandline#createArg()} instead
*/
public Argument createArgument()
{
return this.createArgument( false );
}
/**
* <p>Creates an argument object and adds it to our list of args.</p>
*
* <p>Each commandline object has at most one instance of the argument class.</p>
*
* @param insertAtStart if true, the argument is inserted at the beginning of the list of args, otherwise it is
* appended.
* @deprecated Use {@link Commandline#createArg(boolean)} instead
*/
public Argument createArgument( boolean insertAtStart )
{
Argument argument = new Argument();
if ( insertAtStart )
{
arguments.insertElementAt( argument, 0 );
}
else
{
arguments.addElement( argument );
}
return argument;
}
/**
* <p>Creates an argument object.</p>
*
* <p>Each commandline object has at most one instance of the argument class. This method calls
* <code>this.createArgument(false)</code>.</p>
*
* @return the argument object.
* @see #createArgument(boolean)
*/
public Arg createArg()
{
return this.createArg( false );
}
/**
* <p>Creates an argument object and adds it to our list of args.</p>
*
* <p>Each commandline object has at most one instance of the argument class.</p>
*
* @param insertAtStart if true, the argument is inserted at the beginning of the list of args, otherwise it is
* appended.
*/
public Arg createArg( boolean insertAtStart )
{
Arg argument = new Argument();
if ( insertAtStart )
{
arguments.insertElementAt( argument, 0 );
}
else
{
arguments.addElement( argument );
}
return argument;
}
/**
* Adds an argument object to our list of args.
*
* @see #addArg(Arg,boolean)
*/
public void addArg( Arg argument )
{
this.addArg( argument, false );
}
/**
* Adds an argument object to our list of args.
*
* @param insertAtStart if true, the argument is inserted at the beginning of the list of args, otherwise it is
* appended.
*/
public void addArg( Arg argument, boolean insertAtStart )
{
if ( insertAtStart )
{
arguments.insertElementAt( argument, 0 );
}
else
{
arguments.addElement( argument );
}
}
/**
* Sets the executable to run.
*/
public void setExecutable( String executable )
{
shell.setExecutable( executable );
this.executable = executable;
}
/**
* @return Executable to be run, as a literal string (no shell quoting/munging)
*/
public String getLiteralExecutable()
{
return executable;
}
/**
* Return an executable name, quoted for shell use. Shell usage is only desirable when generating code for remote
* execution.
*
* @return Executable to be run, quoted for shell interpretation
*/
public String getExecutable()
{
String exec = shell.getExecutable();
if ( exec == null )
{
exec = executable;
}
return exec;
}
public void addArguments( String[] line )
{
for ( String aLine : line )
{
createArgument().setValue( aLine );
}
}
/**
* Add an environment variable
*/
public void addEnvironment( String name, String value )
{
// envVars.add( name + "=" + value );
envVars.put( name, value );
}
/**
* Add system environment variables
*/
public void addSystemEnvironment()
throws Exception
{
Properties systemEnvVars = CommandLineUtils.getSystemEnvVars();
for ( Object o : systemEnvVars.keySet() )
{
String key = (String) o;
if ( !envVars.containsKey( key ) )
{
addEnvironment( key, systemEnvVars.getProperty( key ) );
}
}
}
/**
* Return the list of environment variables
*/
public String[] getEnvironmentVariables()
throws CommandLineException
{
try
{
addSystemEnvironment();
}
catch ( Exception e )
{
throw new CommandLineException( "Error setting up environmental variables", e );
}
String[] environmentVars = new String[envVars.size()];
int i = 0;
for ( Object o : envVars.keySet() )
{
String name = (String) o;
String value = envVars.get( name );
environmentVars[i] = name + "=" + value;
i++;
}
return environmentVars;
}
/**
* Returns the executable and all defined arguments.<br>
* For Windows Family, {@link Commandline#getShellCommandline()} is returned
*/
public String[] getCommandline()
{
if ( Os.isFamily( Os.FAMILY_WINDOWS ) )
{
return getShellCommandline();
}
return getRawCommandline();
}
/**
* Returns the executable and all defined arguments.<br>
*
*/
public String[] getRawCommandline()
{
final String[] args = getArguments();
String executable = getLiteralExecutable();
if ( executable == null )
{
return args;
}
final String[] result = new String[args.length + 1];
result[0] = executable;
System.arraycopy( args, 0, result, 1, args.length );
return result;
}
/**
* Returns the shell, executable and all defined arguments. Shell usage is only desirable when generating code for
* remote execution.
*/
public String[] getShellCommandline()
{
// TODO: Provided only for backward compat. with <= 1.4
verifyShellState();
return (String[]) getShell().getShellCommandLine( getArguments() ).toArray( new String[0] );
}
/**
* Returns all arguments defined by <code>addLine</code>, <code>addValue</code> or the argument object.
*/
public String[] getArguments()
{
Vector<String> result = new Vector<String>( arguments.size() * 2 );
for ( int i = 0; i < arguments.size(); i++ )
{
Arg arg = arguments.elementAt( i );
String[] s = arg.getParts();
if ( s != null )
{
for ( String value : s )
{
result.addElement( value );
}
}
}
String[] res = new String[result.size()];
result.copyInto( res );
return res;
}
public String toString()
{
return StringUtils.join( getShellCommandline(), " " );
}
public int size()
{
return getCommandline().length;
}
public Object clone()
{
Commandline c = new Commandline( (Shell) shell.clone() );
c.executable = executable;
c.workingDir = workingDir;
c.addArguments( getArguments() );
return c;
}
/**
* Clear out the whole command line.
*/
public void clear()
{
executable = null;
workingDir = null;
shell.setExecutable( null );
shell.clearArguments();
arguments.removeAllElements();
}
/**
* Clear out the arguments but leave the executable in place for another operation.
*/
public void clearArgs()
{
arguments.removeAllElements();
}
/**
* <p>Return a marker.</p>
*
* <p>This marker can be used to locate a position on the commandline - to insert something for example - when all
* parameters have been set.
* </p>
*/
public Marker createMarker()
{
return new Marker( arguments.size() );
}
/**
* Sets execution directory.
*/
public void setWorkingDirectory( String path )
{
shell.setWorkingDirectory( path );
workingDir = new File( path );
}
/**
* Sets execution directory.
*/
public void setWorkingDirectory( File workingDirectory )
{
shell.setWorkingDirectory( workingDirectory );
workingDir = workingDirectory;
}
public File getWorkingDirectory()
{
File workDir = shell.getWorkingDirectory();
if ( workDir == null )
{
workDir = workingDir;
}
return workDir;
}
/**
* Executes the command.
*/
public Process execute()
throws CommandLineException
{
// TODO: Provided only for backward compat. with <= 1.4
verifyShellState();
Process process;
// addEnvironment( "MAVEN_TEST_ENVAR", "MAVEN_TEST_ENVAR_VALUE" );
String[] environment = getEnvironmentVariables();
File workingDir = shell.getWorkingDirectory();
try
{
if ( workingDir == null )
{
process = Runtime.getRuntime().exec( getCommandline(), environment, workingDir );
}
else
{
if ( !workingDir.exists() )
{
throw new CommandLineException( "Working directory \"" + workingDir.getPath()
+ "\" does not exist!" );
}
else if ( !workingDir.isDirectory() )
{
throw new CommandLineException( "Path \"" + workingDir.getPath()
+ "\" does not specify a directory." );
}
process = Runtime.getRuntime().exec( getCommandline(), environment, workingDir );
}
}
catch ( IOException ex )
{
throw new CommandLineException( "Error while executing process.", ex );
}
return process;
}
/**
* @deprecated Remove once backward compat with plexus-utils <= 1.4 is no longer a consideration
*/
private void verifyShellState()
{
if ( shell.getWorkingDirectory() == null )
{
shell.setWorkingDirectory( workingDir );
}
if ( shell.getOriginalExecutable() == null )
{
shell.setExecutable( executable );
}
}
public Properties getSystemEnvVars()
throws Exception
{
return CommandLineUtils.getSystemEnvVars();
}
/**
* Allows to set the shell to be used in this command line. Shell usage is only desirable when generating code for
* remote execution.
*
* @param shell
* @since 1.2
*/
public void setShell( Shell shell )
{
this.shell = shell;
}
/**
* Get the shell to be used in this command line. Shell usage is only desirable when generating code for remote
* execution.
*
* @since 1.2
*/
public Shell getShell()
{
return shell;
}
/**
* @deprecated Use {@link CommandLineUtils#translateCommandline(String)} instead.
*/
public static String[] translateCommandline( String toProcess )
throws Exception
{
return CommandLineUtils.translateCommandline( toProcess );
}
/**
* @deprecated Use {@link CommandLineUtils#quote(String)} instead.
*/
public static String quoteArgument( String argument )
throws CommandLineException
{
return CommandLineUtils.quote( argument );
}
/**
* @deprecated Use {@link CommandLineUtils#toString(String[])} instead.
*/
public static String toString( String[] line )
{
return CommandLineUtils.toString( line );
}
public static class Argument
implements Arg
{
private String[] parts;
/*
* (non-Javadoc)
* @see org.codehaus.plexus.util.cli.Argument#setValue(java.lang.String)
*/
public void setValue( String value )
{
if ( value != null )
{
parts = new String[] { value };
}
}
/*
* (non-Javadoc)
* @see org.codehaus.plexus.util.cli.Argument#setLine(java.lang.String)
*/
public void setLine( String line )
{
if ( line == null )
{
return;
}
try
{
parts = CommandLineUtils.translateCommandline( line );
}
catch ( Exception e )
{
System.err.println( "Error translating Commandline." );
}
}
/*
* (non-Javadoc)
* @see org.codehaus.plexus.util.cli.Argument#setFile(java.io.File)
*/
public void setFile( File value )
{
parts = new String[] { value.getAbsolutePath() };
}
/*
* (non-Javadoc)
* @see org.codehaus.plexus.util.cli.Argument#getParts()
*/
public String[] getParts()
{
return parts;
}
}
}
| src/main/java/org/codehaus/plexus/util/cli/Commandline.java | package org.codehaus.plexus.util.cli;
/*
* Copyright The Codehaus Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/***************************************************************************************************
* CruiseControl, a Continuous Integration Toolkit Copyright (c) 2001-2003, ThoughtWorks, Inc. 651 W
* Washington Ave. Suite 500 Chicago, IL 60661 USA All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met: + Redistributions of source code must retain the
* above copyright notice, this list of conditions and the following disclaimer. + Redistributions
* in binary form must reproduce the above copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other materials provided with the distribution. +
* Neither the name of ThoughtWorks, Inc., CruiseControl, nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**************************************************************************************************/
/*
* ====================================================================
* Copyright 2003-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
import org.codehaus.plexus.util.Os;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.cli.shell.BourneShell;
import org.codehaus.plexus.util.cli.shell.CmdShell;
import org.codehaus.plexus.util.cli.shell.CommandShell;
import org.codehaus.plexus.util.cli.shell.Shell;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Vector;
/**
* <p>Commandline objects help handling command lines specifying processes to execute.</p>
*
* <p>The class can be used to define a command line as nested elements or as a helper to define a command line by an
* application.</p>
*
* <code>
* <someelement><br>
* <acommandline executable="/executable/to/run"><br>
* <argument value="argument 1" /><br>
* <argument line="argument_1 argument_2 argument_3" /><br>
* <argument value="argument 4" /><br>
* </acommandline><br>
* </someelement><br>
* </code>
*
* <p>The element <code>someelement</code> must provide a method <code>createAcommandline</code> which returns an instance
* of this class.</p>
*
* @author [email protected]
* @author <a href="mailto:[email protected]">Stefan Bodewig</a>
*/
public class Commandline
implements Cloneable
{
/**
* @deprecated Use {@link org.codehaus.plexus.util.Os} class instead.
*/
protected static final String OS_NAME = "os.name";
/**
* @deprecated Use {@link org.codehaus.plexus.util.Os} class instead.
*/
protected static final String WINDOWS = "Windows";
protected Vector<Arg> arguments = new Vector<Arg>();
// protected Vector envVars = new Vector();
// synchronized added to preserve synchronize of Vector class
protected Map<String, String> envVars = Collections.synchronizedMap( new LinkedHashMap<String, String>() );
private long pid = -1;
private Shell shell;
/**
* @deprecated Use {@link Commandline#setExecutable(String)} instead.
*/
protected String executable;
/**
* @deprecated Use {@link Commandline#setWorkingDirectory(File)} or {@link Commandline#setWorkingDirectory(String)}
* instead.
*/
private File workingDir;
/**
* Create a new command line object. Shell is autodetected from operating system Shell usage is only desirable when
* generating code for remote execution.
*
* @param toProcess
*/
public Commandline( String toProcess, Shell shell )
{
this.shell = shell;
String[] tmp = new String[0];
try
{
tmp = CommandLineUtils.translateCommandline( toProcess );
}
catch ( Exception e )
{
System.err.println( "Error translating Commandline." );
}
if ( ( tmp != null ) && ( tmp.length > 0 ) )
{
setExecutable( tmp[0] );
for ( int i = 1; i < tmp.length; i++ )
{
createArgument().setValue( tmp[i] );
}
}
}
/**
* Create a new command line object. Shell is autodetected from operating system Shell usage is only desirable when
* generating code for remote execution.
*/
public Commandline( Shell shell )
{
this.shell = shell;
}
/**
* Create a new command line object, given a command following POSIX sh quoting rules
*
* @param toProcess
*/
public Commandline( String toProcess )
{
setDefaultShell();
String[] tmp = new String[0];
try
{
tmp = CommandLineUtils.translateCommandline( toProcess );
}
catch ( Exception e )
{
System.err.println( "Error translating Commandline." );
}
if ( ( tmp != null ) && ( tmp.length > 0 ) )
{
setExecutable( tmp[0] );
for ( int i = 1; i < tmp.length; i++ )
{
createArgument().setValue( tmp[i] );
}
}
}
/**
* Create a new command line object.
*/
public Commandline()
{
setDefaultShell();
}
public long getPid()
{
if ( pid == -1 )
{
pid = Long.parseLong( String.valueOf( System.currentTimeMillis() ) );
}
return pid;
}
public void setPid( long pid )
{
this.pid = pid;
}
/**
* Class to keep track of the position of an Argument.
*/
// <p>This class is there to support the srcfile and targetfile
// elements of <execon> and <transform> - don't know
// whether there might be additional use cases.</p> --SB
public class Marker
{
private int position;
private int realPos = -1;
Marker( int position )
{
this.position = position;
}
/**
* <p>Return the number of arguments that preceded this marker.</p>
*
* <p>The name of the executable - if set - is counted as the very first argument.</p>
*/
public int getPosition()
{
if ( realPos == -1 )
{
realPos = ( getLiteralExecutable() == null ? 0 : 1 );
for ( int i = 0; i < position; i++ )
{
Arg arg = (Arg) arguments.elementAt( i );
realPos += arg.getParts().length;
}
}
return realPos;
}
}
/**
* <p>
* Sets the shell or command-line interpreter for the detected operating system, and the shell arguments.
* </p>
*/
private void setDefaultShell()
{
// If this is windows set the shell to command.com or cmd.exe with correct arguments.
if ( Os.isFamily( Os.FAMILY_WINDOWS ) )
{
if ( Os.isFamily( Os.FAMILY_WIN9X ) )
{
setShell( new CommandShell() );
}
else
{
setShell( new CmdShell() );
}
}
else
{
setShell( new BourneShell() );
}
}
/**
* <p>Creates an argument object.</p>
*
* <p>Each commandline object has at most one instance of the argument class. This method calls
* <code>this.createArgument(false)</code>.</p>
*
* @return the argument object.
* @see #createArgument(boolean)
* @deprecated Use {@link Commandline#createArg()} instead
*/
public Argument createArgument()
{
return this.createArgument( false );
}
/**
* <p>Creates an argument object and adds it to our list of args.</p>
*
* <p>Each commandline object has at most one instance of the argument class.</p>
*
* @param insertAtStart if true, the argument is inserted at the beginning of the list of args, otherwise it is
* appended.
* @deprecated Use {@link Commandline#createArg(boolean)} instead
*/
public Argument createArgument( boolean insertAtStart )
{
Argument argument = new Argument();
if ( insertAtStart )
{
arguments.insertElementAt( argument, 0 );
}
else
{
arguments.addElement( argument );
}
return argument;
}
/**
* <p>Creates an argument object.</p>
*
* <p>Each commandline object has at most one instance of the argument class. This method calls
* <code>this.createArgument(false)</code>.</p>
*
* @return the argument object.
* @see #createArgument(boolean)
*/
public Arg createArg()
{
return this.createArg( false );
}
/**
* <p>Creates an argument object and adds it to our list of args.</p>
*
* <p>Each commandline object has at most one instance of the argument class.</p>
*
* @param insertAtStart if true, the argument is inserted at the beginning of the list of args, otherwise it is
* appended.
*/
public Arg createArg( boolean insertAtStart )
{
Arg argument = new Argument();
if ( insertAtStart )
{
arguments.insertElementAt( argument, 0 );
}
else
{
arguments.addElement( argument );
}
return argument;
}
/**
* Adds an argument object to our list of args.
*
* @see #addArg(Arg,boolean)
*/
public void addArg( Arg argument )
{
this.addArg( argument, false );
}
/**
* Adds an argument object to our list of args.
*
* @param insertAtStart if true, the argument is inserted at the beginning of the list of args, otherwise it is
* appended.
*/
public void addArg( Arg argument, boolean insertAtStart )
{
if ( insertAtStart )
{
arguments.insertElementAt( argument, 0 );
}
else
{
arguments.addElement( argument );
}
}
/**
* Sets the executable to run.
*/
public void setExecutable( String executable )
{
shell.setExecutable( executable );
this.executable = executable;
}
/**
* @return Executable to be run, as a literal string (no shell quoting/munging)
*/
public String getLiteralExecutable()
{
return executable;
}
/**
* Return an executable name, quoted for shell use. Shell usage is only desirable when generating code for remote
* execution.
*
* @return Executable to be run, quoted for shell interpretation
*/
public String getExecutable()
{
String exec = shell.getExecutable();
if ( exec == null )
{
exec = executable;
}
return exec;
}
public void addArguments( String[] line )
{
for ( String aLine : line )
{
createArgument().setValue( aLine );
}
}
/**
* Add an environment variable
*/
public void addEnvironment( String name, String value )
{
// envVars.add( name + "=" + value );
envVars.put( name, value );
}
/**
* Add system environment variables
*/
public void addSystemEnvironment()
throws Exception
{
Properties systemEnvVars = CommandLineUtils.getSystemEnvVars();
for ( Object o : systemEnvVars.keySet() )
{
String key = (String) o;
if ( !envVars.containsKey( key ) )
{
addEnvironment( key, systemEnvVars.getProperty( key ) );
}
}
}
/**
* Return the list of environment variables
*/
public String[] getEnvironmentVariables()
throws CommandLineException
{
try
{
addSystemEnvironment();
}
catch ( Exception e )
{
throw new CommandLineException( "Error setting up environmental variables", e );
}
String[] environmentVars = new String[envVars.size()];
int i = 0;
for ( Object o : envVars.keySet() )
{
String name = (String) o;
String value = envVars.get( name );
environmentVars[i] = name + "=" + value;
i++;
}
return environmentVars;
}
/**
* Returns the executable and all defined arguments.<br>
* For Windows Family, {@link Commandline#getShellCommandline()} is returned
*/
public String[] getCommandline()
{
if ( Os.isFamily( Os.FAMILY_WINDOWS ) )
{
return getShellCommandline();
}
final String[] args = getArguments();
String executable = getLiteralExecutable();
if ( executable == null )
{
return args;
}
final String[] result = new String[args.length + 1];
result[0] = executable;
System.arraycopy( args, 0, result, 1, args.length );
return result;
}
/**
* Returns the shell, executable and all defined arguments. Shell usage is only desirable when generating code for
* remote execution.
*/
public String[] getShellCommandline()
{
// TODO: Provided only for backward compat. with <= 1.4
verifyShellState();
return (String[]) getShell().getShellCommandLine( getArguments() ).toArray( new String[0] );
}
/**
* Returns all arguments defined by <code>addLine</code>, <code>addValue</code> or the argument object.
*/
public String[] getArguments()
{
Vector<String> result = new Vector<String>( arguments.size() * 2 );
for ( int i = 0; i < arguments.size(); i++ )
{
Arg arg = arguments.elementAt( i );
String[] s = arg.getParts();
if ( s != null )
{
for ( String value : s )
{
result.addElement( value );
}
}
}
String[] res = new String[result.size()];
result.copyInto( res );
return res;
}
public String toString()
{
return StringUtils.join( getShellCommandline(), " " );
}
public int size()
{
return getCommandline().length;
}
public Object clone()
{
Commandline c = new Commandline( (Shell) shell.clone() );
c.executable = executable;
c.workingDir = workingDir;
c.addArguments( getArguments() );
return c;
}
/**
* Clear out the whole command line.
*/
public void clear()
{
executable = null;
workingDir = null;
shell.setExecutable( null );
shell.clearArguments();
arguments.removeAllElements();
}
/**
* Clear out the arguments but leave the executable in place for another operation.
*/
public void clearArgs()
{
arguments.removeAllElements();
}
/**
* <p>Return a marker.</p>
*
* <p>This marker can be used to locate a position on the commandline - to insert something for example - when all
* parameters have been set.
* </p>
*/
public Marker createMarker()
{
return new Marker( arguments.size() );
}
/**
* Sets execution directory.
*/
public void setWorkingDirectory( String path )
{
shell.setWorkingDirectory( path );
workingDir = new File( path );
}
/**
* Sets execution directory.
*/
public void setWorkingDirectory( File workingDirectory )
{
shell.setWorkingDirectory( workingDirectory );
workingDir = workingDirectory;
}
public File getWorkingDirectory()
{
File workDir = shell.getWorkingDirectory();
if ( workDir == null )
{
workDir = workingDir;
}
return workDir;
}
/**
* Executes the command.
*/
public Process execute()
throws CommandLineException
{
// TODO: Provided only for backward compat. with <= 1.4
verifyShellState();
Process process;
// addEnvironment( "MAVEN_TEST_ENVAR", "MAVEN_TEST_ENVAR_VALUE" );
String[] environment = getEnvironmentVariables();
File workingDir = shell.getWorkingDirectory();
try
{
if ( workingDir == null )
{
process = Runtime.getRuntime().exec( getCommandline(), environment, workingDir );
}
else
{
if ( !workingDir.exists() )
{
throw new CommandLineException( "Working directory \"" + workingDir.getPath()
+ "\" does not exist!" );
}
else if ( !workingDir.isDirectory() )
{
throw new CommandLineException( "Path \"" + workingDir.getPath()
+ "\" does not specify a directory." );
}
process = Runtime.getRuntime().exec( getCommandline(), environment, workingDir );
}
}
catch ( IOException ex )
{
throw new CommandLineException( "Error while executing process.", ex );
}
return process;
}
/**
* @deprecated Remove once backward compat with plexus-utils <= 1.4 is no longer a consideration
*/
private void verifyShellState()
{
if ( shell.getWorkingDirectory() == null )
{
shell.setWorkingDirectory( workingDir );
}
if ( shell.getOriginalExecutable() == null )
{
shell.setExecutable( executable );
}
}
public Properties getSystemEnvVars()
throws Exception
{
return CommandLineUtils.getSystemEnvVars();
}
/**
* Allows to set the shell to be used in this command line. Shell usage is only desirable when generating code for
* remote execution.
*
* @param shell
* @since 1.2
*/
public void setShell( Shell shell )
{
this.shell = shell;
}
/**
* Get the shell to be used in this command line. Shell usage is only desirable when generating code for remote
* execution.
*
* @since 1.2
*/
public Shell getShell()
{
return shell;
}
/**
* @deprecated Use {@link CommandLineUtils#translateCommandline(String)} instead.
*/
public static String[] translateCommandline( String toProcess )
throws Exception
{
return CommandLineUtils.translateCommandline( toProcess );
}
/**
* @deprecated Use {@link CommandLineUtils#quote(String)} instead.
*/
public static String quoteArgument( String argument )
throws CommandLineException
{
return CommandLineUtils.quote( argument );
}
/**
* @deprecated Use {@link CommandLineUtils#toString(String[])} instead.
*/
public static String toString( String[] line )
{
return CommandLineUtils.toString( line );
}
public static class Argument
implements Arg
{
private String[] parts;
/*
* (non-Javadoc)
* @see org.codehaus.plexus.util.cli.Argument#setValue(java.lang.String)
*/
public void setValue( String value )
{
if ( value != null )
{
parts = new String[] { value };
}
}
/*
* (non-Javadoc)
* @see org.codehaus.plexus.util.cli.Argument#setLine(java.lang.String)
*/
public void setLine( String line )
{
if ( line == null )
{
return;
}
try
{
parts = CommandLineUtils.translateCommandline( line );
}
catch ( Exception e )
{
System.err.println( "Error translating Commandline." );
}
}
/*
* (non-Javadoc)
* @see org.codehaus.plexus.util.cli.Argument#setFile(java.io.File)
*/
public void setFile( File value )
{
parts = new String[] { value.getAbsolutePath() };
}
/*
* (non-Javadoc)
* @see org.codehaus.plexus.util.cli.Argument#getParts()
*/
public String[] getParts()
{
return parts;
}
}
}
| Add public String[] getRawCommandline() to avoid OS dependency code in
unit tests | src/main/java/org/codehaus/plexus/util/cli/Commandline.java | Add public String[] getRawCommandline() to avoid OS dependency code in unit tests |
|
Java | bsd-3-clause | b6e63ff080ecd77cceca12466414646fe5e1f98a | 0 | jongo216/timestamp | /** Copyright (c) 2014, Group D in course TNM082
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**/
package com.example.timestamp;
import java.util.ArrayList;
import java.util.GregorianCalendar;
import java.util.List;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.os.SystemClock;
import android.support.v4.app.Fragment;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.Log;
import android.view.*;
import android.view.View.MeasureSpec;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.Chronometer;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.example.timestamp.model.DB;
import com.example.timestamp.model.Project;
import com.example.timestamp.model.SettingsManager;
import com.example.timestamp.model.TimePost;
public class Start extends Fragment{
// Instansvariabler
//final Context context = this;
public String[] projectsMenuString; // = {"Projekt 1", "Projekt 2", "Nytt projekt"};
public int[] projectMenuIds;
private ArrayList<Project> projects;
private LinearLayout imgButton;
private Spinner spinnerProjectView;
private View rootView;
private Chronometer chronometer;
private ViewPager statsViewPager;
private MyAdapter statsPagerAdapter;
private TextView textView;
private FragmentManager statsFragmentManager;
//private FragmentActivity parentActivity;
private static DB db;
@Override //mother of all inits!
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
rootView = inflater.inflate(R.layout.activity_start, container, false);
//parentActivity = getActivity();
//Link to xml objects
chronometer = (Chronometer)rootView.findViewById(R.id.chronometer);
chronometer.setVisibility(View.GONE);
textView = (TextView)rootView.findViewById(R.id.textStamplaIn);
imgButton = (LinearLayout) rootView.findViewById(R.id.btnCheckIn);
spinnerProjectView = (Spinner) rootView.findViewById(R.id.projects_menu_spinner2);
statsViewPager = (ViewPager) rootView.findViewById(R.id.statsViewPager);
Log.d("Activityinfo: ", "Activity of Start: " + getActivity().toString());
db = new DB(getActivity());
initTimer();
initProjectSpinner();
initTimerButton();
initStats();
dbButtonListener(); //Button is just for debug and not visible anyways. But i leave this ftm.
return rootView;
}
/*public void onActivityCreated (Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}*/
private void initProjectSpinner(){
int selectedRow = 0;
int currentProject = SettingsManager.getCurrentProjectId(getActivity());
//Fetch projects froom data base and use them to create arrays
projects = db.getAllProjects();
projectsMenuString = new String[projects.size() + 1];
projectMenuIds = new int[projects.size()+1];
//Check if there are any projects
//if there are not, direct the user
//to create a new project
if(db.projectsEmpty()){
//create new project
Intent intent = new Intent(getActivity(), CreateNewProject.class);
intent.putExtra(Constants.PROJECT_ID, 0); //Optional parameters
startActivity(intent);
}
for (int n = 0; n < projects.size(); n++)
{
projectsMenuString[n] = projects.get(n).getName();
projectMenuIds[n] = projects.get(n).getId();
if (currentProject == projectMenuIds[n])
selectedRow = n;
}
projectsMenuString[projects.size()]= getString(R.string.add_project);
projectMenuIds[projects.size()] = -1;
//För att välja vilken typ av graf man vill se.
//Hämtar namn från string array med menu item.
ArrayAdapter<String> adapter = new ArrayAdapter<String>(getActivity(),
android.R.layout.simple_spinner_dropdown_item, projectsMenuString){
// Style för Spinnern.. Sätter textstorlek samt centrerar..
public View getView(int position, View convertView,ViewGroup parent) {
View v = super.getView(position, convertView, parent);
((TextView) v).setGravity(Gravity.CENTER);
((TextView) v).setTextColor(Color.WHITE);
((TextView) v).setTextSize(25);
return v;
}
//Style för dropdownmenyn under spinnern..
public View getDropDownView(int position, View convertView,ViewGroup parent) {
View v = super.getDropDownView(position, convertView,parent);
((TextView) v).setGravity(Gravity.CENTER);
((TextView) v).setTextColor(Color.WHITE);
((TextView) v).setBackgroundColor(Color.BLACK);
((TextView) v).setTextSize(18);
return v;
}
};
//Spinnern använder items från en valt adapter.
spinnerProjectView.setAdapter(adapter);
spinnerProjectView.setSelection(selectedRow);
//Set action listener for the spinner
spinnerProjectView.setOnItemSelectedListener(new OnItemSelectedListener(){
@Override
public void onItemSelected(AdapterView<?> parent, View view,
int pos, long id) {
// TODO Auto-generated method stub
if(projectMenuIds[pos] != -1){
SettingsManager.setCurrentProjectId(projectMenuIds[pos], getActivity());
updateStats();
}else{
//Skapa nytt projekt
Intent intent = new Intent(getActivity(), CreateNewProject.class);
intent.putExtra(Constants.PROJECT_ID, 0); //Optional parameters
startActivity(intent);
}
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
// TODO Auto-generated method stub
}
});
//spinnerListener();
}
private void initStats() {
statsFragmentManager = getChildFragmentManager();
statsPagerAdapter = new MyAdapter(statsFragmentManager);
//statsPagerAdapter.instantiateItem(statsViewPager, 0); //Not needed
statsViewPager.setOffscreenPageLimit(5);
statsViewPager.setAdapter(statsPagerAdapter);
//statsViewPager.measure(MeasureSpec.AT_MOST, MeasureSpec.AT_MOST);
}
public static class MyAdapter extends FragmentPagerAdapter {
public MyAdapter(FragmentManager fm) {
super(fm);
}
@Override
public int getCount() {
return 3;
}
@Override
public Fragment getItem(int position) {
switch (position) {
case 0:
StatsBarChartFragment barChart = new StatsBarChartFragment();
//barChart.setRetainInstance(true);
//barChart.setDB(db);
return barChart;
case 1:
return new StatsBurnDownFragment();
case 2:
return new StatsSummaryFragment();
}
return new StatsSummaryFragment();
}
}
private void updateStats() {
List<Fragment> fragments = statsFragmentManager.getFragments();
if (fragments != null)
for (int n = 0; n < fragments.size(); n++)
((UpdateableStatistics)fragments.get(n)).update();
}
public void initTimer(){
boolean timerRunning;
timerRunning = SettingsManager.getIsTimerRunning(getActivity());
if (timerRunning)
{
GregorianCalendar startTime = SettingsManager.getStartTime(getActivity());
GregorianCalendar currentTime = new GregorianCalendar();
imgButton.setBackground(getResources().getDrawable(R.drawable.checkinbutton_green) );
chronometer.setBase(SystemClock.elapsedRealtime() - currentTime.getTimeInMillis() + startTime.getTimeInMillis());
chronometer.start();
chronometer.setVisibility(View.VISIBLE);
textView.setVisibility(View.GONE);
}
else imgButton.setBackground(getResources().getDrawable(R.drawable.checkinbutton_white) );
}
public void initTimerButton(){
imgButton.setOnClickListener(new OnClickListener(){
public void onClick(View arg0){
boolean timerRunning = SettingsManager.getIsTimerRunning(getActivity());
if(timerRunning){
imgButton.setBackground(getResources().getDrawable(R.drawable.checkinbutton_white) );
SettingsManager.setIsTimerRunning(false, getActivity());
chronometer.stop();
textView.setVisibility(View.VISIBLE);
chronometer.setVisibility(View.GONE);
TimePost p = new TimePost();
p.setProjectId(SettingsManager.getCurrentProjectId(getActivity()));
p.setStartTime(SettingsManager.getStartTime(getActivity()));
p.setEndTimeNow();
//DB db = new DB(getActivity());
db.set(p);
}
else{
imgButton.setBackground(getResources().getDrawable(R.drawable.checkinbutton_green) );
chronometer.setBase(SystemClock.elapsedRealtime());
chronometer.start();
chronometer.setVisibility(View.VISIBLE);
textView.setVisibility(View.GONE);
SettingsManager.setIsTimerRunning(true, getActivity());
SettingsManager.setStartTime(new GregorianCalendar(), getActivity());
}
}
});
}
//database testing!
public void dbButtonListener(){
//projects
Button projectsBtn = (Button) rootView.findViewById(R.id.Projects);
projectsBtn.setOnClickListener(new OnClickListener(){
public void onClick(View arg0){
//DB db = new DB(getActivity());
ArrayList<Project> projects = db.getAllProjects();
String text = "";
for(int i = 0; i < projects.size(); ++i){
//buggs with printStart/EndTime
text = text + "Project name: " + projects.get(i).getName() +
" \tOwner: " + projects.get(i).getOwner() +
" \tCustomer: " + projects.get(i).getCustomer() + "\n";
}
Toast.makeText(getActivity(), text, Toast.LENGTH_LONG).show();
}
});
}
@Override
public void onResume()
{
super.onResume();
//updateStats();
initTimer();
initProjectSpinner();
}
//OLD METHOD. DEPRECATED?
/*public void startTime(View view){
AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(getActivity());
// set title
alertDialogBuilder.setTitle("Timestamp");
// set dialog message
alertDialogBuilder
.setMessage("You have now checked in!")
.setCancelable(false)
.setPositiveButton("Cancel",new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int id) {
// if this button is clicked, close
// current activity
getActivity().finish();
}
})
.setNegativeButton("Okay",new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int id) {
// if this button is clicked, just close
// the dialog box and do nothing
dialog.cancel();
}
});
// create alert dialog
AlertDialog alertDialog = alertDialogBuilder.create();
// show it
alertDialog.show();
}*/
}
| src/com/example/timestamp/Start.java | /** Copyright (c) 2014, Group D in course TNM082
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**/
package com.example.timestamp;
import java.util.ArrayList;
import java.util.GregorianCalendar;
import java.util.List;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.os.SystemClock;
import android.support.v4.app.Fragment;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.Log;
import android.view.*;
import android.view.View.MeasureSpec;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.Chronometer;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.example.timestamp.model.DB;
import com.example.timestamp.model.Project;
import com.example.timestamp.model.SettingsManager;
import com.example.timestamp.model.TimePost;
public class Start extends Fragment{
// Instansvariabler
//final Context context = this;
public String[] projectsMenuString; // = {"Projekt 1", "Projekt 2", "Nytt projekt"};
public int[] projectMenuIds;
private ArrayList<Project> projects;
private LinearLayout imgButton;
private Spinner spinnerProjectView;
private View rootView;
private Chronometer chronometer;
private ViewPager statsViewPager;
private MyAdapter statsPagerAdapter;
private TextView textView;
private FragmentManager statsFragmentManager;
//private FragmentActivity parentActivity;
private static DB db;
@Override //mother of all inits!
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
rootView = inflater.inflate(R.layout.activity_start, container, false);
//parentActivity = getActivity();
//Link to xml objects
chronometer = (Chronometer)rootView.findViewById(R.id.chronometer);
chronometer.setVisibility(View.GONE);
textView = (TextView)rootView.findViewById(R.id.textStamplaIn);
imgButton = (LinearLayout) rootView.findViewById(R.id.btnCheckIn);
spinnerProjectView = (Spinner) rootView.findViewById(R.id.projects_menu_spinner2);
statsViewPager = (ViewPager) rootView.findViewById(R.id.statsViewPager);
Log.d("Activityinfo: ", "Activity of Start: " + getActivity().toString());
db = new DB(getActivity());
initTimer();
initProjectSpinner();
initTimerButton();
initStats();
dbButtonListener(); //Button is just for debug and not visible anyways. But i leave this ftm.
return rootView;
}
/*public void onActivityCreated (Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}*/
private void initProjectSpinner(){
int selectedRow = 0;
int currentProject = SettingsManager.getCurrentProjectId(getActivity());
//Fetch projects froom data base and use them to create arrays
projects = db.getAllProjects();
projectsMenuString = new String[projects.size() + 1];
projectMenuIds = new int[projects.size()+1];
//Check if there are any projects
//if there are not, direct the user
//to create a new project
if(db.projectsEmpty()){
//create new project
Intent intent = new Intent(getActivity(), CreateNewProject.class);
intent.putExtra(Constants.PROJECT_ID, 0); //Optional parameters
startActivity(intent);
}
for (int n = 0; n < projects.size(); n++)
{
projectsMenuString[n] = projects.get(n).getName();
projectMenuIds[n] = projects.get(n).getId();
if (currentProject == projectMenuIds[n])
selectedRow = n;
}
projectsMenuString[projects.size()]= getString(R.string.add_project);
projectMenuIds[projects.size()] = -1;
//För att välja vilken typ av graf man vill se.
//Hämtar namn från string array med menu item.
ArrayAdapter<String> adapter = new ArrayAdapter<String>(getActivity(),
android.R.layout.simple_spinner_dropdown_item, projectsMenuString){
// Style för Spinnern.. Sätter textstorlek samt centrerar..
public View getView(int position, View convertView,ViewGroup parent) {
View v = super.getView(position, convertView, parent);
((TextView) v).setGravity(Gravity.CENTER);
((TextView) v).setTextColor(Color.WHITE);
((TextView) v).setTextSize(25);
return v;
}
//Style för dropdownmenyn under spinnern..
public View getDropDownView(int position, View convertView,ViewGroup parent) {
View v = super.getDropDownView(position, convertView,parent);
((TextView) v).setGravity(Gravity.CENTER);
((TextView) v).setTextColor(Color.WHITE);
((TextView) v).setBackgroundColor(Color.BLACK);
((TextView) v).setTextSize(18);
return v;
}
};
//Spinnern använder items från en valt adapter.
spinnerProjectView.setAdapter(adapter);
spinnerProjectView.setSelection(selectedRow);
//Set action listener for the spinner
spinnerProjectView.setOnItemSelectedListener(new OnItemSelectedListener(){
@Override
public void onItemSelected(AdapterView<?> parent, View view,
int pos, long id) {
// TODO Auto-generated method stub
if(projectMenuIds[pos] != -1){
SettingsManager.setCurrentProjectId(projectMenuIds[pos], getActivity());
updateStats();
}else{
//Skapa nytt projekt
Intent intent = new Intent(getActivity(), CreateNewProject.class);
intent.putExtra(Constants.PROJECT_ID, 0); //Optional parameters
startActivity(intent);
}
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
// TODO Auto-generated method stub
}
});
//spinnerListener();
}
private void initStats() {
statsFragmentManager = getChildFragmentManager();
statsPagerAdapter = new MyAdapter(statsFragmentManager);
//statsPagerAdapter.instantiateItem(statsViewPager, 0); //Not needed
statsViewPager.setOffscreenPageLimit(5);
statsViewPager.setAdapter(statsPagerAdapter);
//statsViewPager.measure(MeasureSpec.AT_MOST, MeasureSpec.AT_MOST);
}
public static class MyAdapter extends FragmentPagerAdapter {
public MyAdapter(FragmentManager fm) {
super(fm);
}
@Override
public int getCount() {
return 3;
}
@Override
public Fragment getItem(int position) {
switch (position) {
case 0:
StatsBarChartFragment barChart = new StatsBarChartFragment();
//barChart.setRetainInstance(true);
//barChart.setDB(db);
return barChart;
case 1:
return new StatsBurnDownFragment();
case 2:
return new StatsSummaryFragment();
}
return new StatsSummaryFragment();
}
}
private void updateStats() {
List<Fragment> fragments = statsFragmentManager.getFragments();
if (fragments != null)
for (int n = 0; n < fragments.size(); n++)
((UpdateableStatistics)fragments.get(n)).update();
}
public void initTimer(){
boolean timerRunning;
timerRunning = SettingsManager.getIsTimerRunning(getActivity());
if (timerRunning)
{
GregorianCalendar startTime = SettingsManager.getStartTime(getActivity());
GregorianCalendar currentTime = new GregorianCalendar();
imgButton.setBackground(getResources().getDrawable(R.drawable.checkinbutton_green) );
chronometer.setBase(SystemClock.elapsedRealtime() - currentTime.getTimeInMillis() + startTime.getTimeInMillis());
chronometer.start();
chronometer.setVisibility(View.VISIBLE);
textView.setVisibility(View.GONE);
}
else imgButton.setBackground(getResources().getDrawable(R.drawable.checkinbutton_white) );
}
public void initTimerButton(){
imgButton.setOnClickListener(new OnClickListener(){
public void onClick(View arg0){
boolean timerRunning = SettingsManager.getIsTimerRunning(getActivity());
if(timerRunning){
imgButton.setBackground(getResources().getDrawable(R.drawable.checkinbutton_white) );
SettingsManager.setIsTimerRunning(false, getActivity());
chronometer.stop();
textView.setVisibility(View.VISIBLE);
chronometer.setVisibility(View.GONE);
TimePost p = new TimePost();
p.setProjectId(SettingsManager.getCurrentProjectId(getActivity()));
p.setStartTime(SettingsManager.getStartTime(getActivity()));
p.setEndTimeNow();
//DB db = new DB(getActivity());
db.set(p);
}
else{
imgButton.setBackground(getResources().getDrawable(R.drawable.checkinbutton_green) );
chronometer.setBase(SystemClock.elapsedRealtime());
chronometer.start();
chronometer.setVisibility(View.VISIBLE);
textView.setVisibility(View.GONE);
SettingsManager.setIsTimerRunning(true, getActivity());
SettingsManager.setStartTime(new GregorianCalendar(), getActivity());
}
}
});
}
//database testing!
public void dbButtonListener(){
//projects
Button projectsBtn = (Button) rootView.findViewById(R.id.Projects);
projectsBtn.setOnClickListener(new OnClickListener(){
public void onClick(View arg0){
//DB db = new DB(getActivity());
ArrayList<Project> projects = db.getAllProjects();
String text = "";
for(int i = 0; i < projects.size(); ++i){
//buggs with printStart/EndTime
text = text + "Project name: " + projects.get(i).getName() +
" \tOwner: " + projects.get(i).getOwner() +
" \tCustomer: " + projects.get(i).getCustomer() + "\n";
}
Toast.makeText(getActivity(), text, Toast.LENGTH_LONG).show();
}
});
}
@Override
public void onResume()
{
super.onResume();
//updateStats();
initTimer();
initProjectSpinner();
}
//OLD METHOD. DEPRECATED?
/*public void startTime(View view){
AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(getActivity());
// set title
alertDialogBuilder.setTitle("Timestamp");
// set dialog message
alertDialogBuilder
.setMessage("You have now checked in!")
.setCancelable(false)
.setPositiveButton("Cancel",new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int id) {
// if this button is clicked, close
// current activity
getActivity().finish();
}
})
.setNegativeButton("Okay",new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int id) {
// if this button is clicked, just close
// the dialog box and do nothing
dialog.cancel();
}
});
// create alert dialog
AlertDialog alertDialog = alertDialogBuilder.create();
// show it
alertDialog.show();
}*/
}
| Changing clock text color to black
| src/com/example/timestamp/Start.java | Changing clock text color to black |
|
Java | bsd-3-clause | 1b60bf3c19a2b647cdead90d0a7b3feaa7655bda | 0 | BaseXdb/basex,BaseXdb/basex,BaseXdb/basex,BaseXdb/basex,BaseXdb/basex,BaseXdb/basex,BaseXdb/basex,BaseXdb/basex,BaseXdb/basex,BaseXdb/basex,BaseXdb/basex,BaseXdb/basex | package org.basex.query.func.crypto;
import java.security.*;
import java.util.*;
import java.security.cert.X509Certificate;
import javax.xml.crypto.*;
import javax.xml.crypto.dsig.*;
import javax.xml.crypto.dsig.keyinfo.*;
/**
* Extracts a key from a given {@link KeyInfo} object.
*
* @author BaseX Team 2005-21, BSD License
* @author Lukas Kircher
*/
final class MyKeySelector extends KeySelector {
/**
* Wrapper for KeySelector results.
*
* @author BaseX Team 2005-21, BSD License
* @author Lukas Kircher
*/
private static class MyKeySelectorResult implements KeySelectorResult {
/** Key. */
private final Key pk;
@Override
public Key getKey() {
return pk;
}
/**
* Constructor.
* @param key key
*/
MyKeySelectorResult(final PublicKey key) {
pk = key;
}
}
@Override
public KeySelectorResult select(final KeyInfo ki, final Purpose p, final AlgorithmMethod m,
final XMLCryptoContext c) throws KeySelectorException {
if(ki == null) throw new KeySelectorException("KeyInfo is null");
final SignatureMethod sm = (SignatureMethod) m;
final List<?> list = ki.getContent();
for(final Object l : list) {
final XMLStructure s = (XMLStructure) l;
PublicKey pk = null;
if(s instanceof KeyValue) {
try {
pk = ((KeyValue) s).getPublicKey();
} catch(final KeyException ke) {
throw new KeySelectorException(ke);
}
} else if(s instanceof X509Data) {
for(final Object d : ((X509Data) s).getContent()) {
if(d instanceof X509Certificate) {
pk = ((X509Certificate) d).getPublicKey();
}
}
}
if(pk != null) {
final String sa = sm.getAlgorithm();
final String ka = pk.getAlgorithm();
if("DSA".equalsIgnoreCase(ka) && "http://www.w3.org/2000/09/xmldsig#dsa-sha1".equals(sa) ||
"RSA".equalsIgnoreCase(ka) && "http://www.w3.org/2000/09/xmldsig#rsa-sha1".equals(sa)) {
return new MyKeySelectorResult(pk);
}
}
}
throw new KeySelectorException("No KeyValue element found");
}
} | basex-core/src/main/java/org/basex/query/func/crypto/MyKeySelector.java | package org.basex.query.func.crypto;
import java.security.*;
import java.util.*;
import javax.security.cert.*;
import javax.security.cert.Certificate;
import javax.xml.crypto.*;
import javax.xml.crypto.dsig.*;
import javax.xml.crypto.dsig.keyinfo.*;
/**
* Extracts a key from a given {@link KeyInfo} object.
*
* @author BaseX Team 2005-21, BSD License
* @author Lukas Kircher
*/
@SuppressWarnings("deprecation")
final class MyKeySelector extends KeySelector {
/**
* Wrapper for KeySelector results.
*
* @author BaseX Team 2005-21, BSD License
* @author Lukas Kircher
*/
private static class MyKeySelectorResult implements KeySelectorResult {
/** Key. */
private final Key pk;
@Override
public Key getKey() {
return pk;
}
/**
* Constructor.
* @param key key
*/
MyKeySelectorResult(final PublicKey key) {
pk = key;
}
}
@Override
public KeySelectorResult select(final KeyInfo ki, final Purpose p, final AlgorithmMethod m,
final XMLCryptoContext c) throws KeySelectorException {
if(ki == null) throw new KeySelectorException("KeyInfo is null");
final SignatureMethod sm = (SignatureMethod) m;
final List<?> list = ki.getContent();
for(final Object l : list) {
final XMLStructure s = (XMLStructure) l;
PublicKey pk = null;
if(s instanceof KeyValue) {
try {
pk = ((KeyValue) s).getPublicKey();
} catch(final KeyException ke) {
throw new KeySelectorException(ke);
}
} else if(s instanceof X509Data) {
for(final Object d : ((X509Data) s).getContent()) {
if(d instanceof X509Certificate) {
pk = ((Certificate) d).getPublicKey();
}
}
}
if(pk != null) {
final String sa = sm.getAlgorithm();
final String ka = pk.getAlgorithm();
if("DSA".equalsIgnoreCase(ka) && "http://www.w3.org/2000/09/xmldsig#dsa-sha1".equals(sa) ||
"RSA".equalsIgnoreCase(ka) && "http://www.w3.org/2000/09/xmldsig#rsa-sha1".equals(sa)) {
return new MyKeySelectorResult(pk);
}
}
}
throw new KeySelectorException("No KeyValue element found");
}
} | [MOD] XQuery, validate:signature: JDK11 upgrade | basex-core/src/main/java/org/basex/query/func/crypto/MyKeySelector.java | [MOD] XQuery, validate:signature: JDK11 upgrade |
|
Java | isc | 65d5860fd9f61580553fe0b09d1e8ce5962b2fac | 0 | nwillc/almost-functional | /*
* Copyright (c) 2015, [email protected]
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package almost.functional.reactive;
import almost.functional.Consumer;
import almost.functional.Optional;
import org.junit.Test;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static org.assertj.core.api.Assertions.assertThat;
@SuppressWarnings("unchecked")
public class BroadcastObserverTest {
@Test
public void testFluidApiReturns() throws Exception {
BroadcastObserver<Boolean> test = new BroadcastObserver<Boolean>();
assertThat(test).isNotNull();
assertThat(test.addNextConsumer(new BooleanConsumer())).isEqualTo(test);
assertThat(test.addErrorConsumer(new ErrorConsumer())).isEqualTo(test);
assertThat(test.addCompletedConsumer(new BooleanConsumer())).isEqualTo(test);
}
@Test
public void shouldAllowSimplifiedConstructor() throws Exception {
BooleanConsumer booleanConsumer = new BooleanConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>(
Optional.<Consumer<Boolean>>of(booleanConsumer),
Optional.<Consumer<Throwable>>empty(),
Optional.<Consumer<Boolean>>empty()
);
assertThat(booleanConsumer.flag.get()).isFalse();
broadcastObserver.next(true);
assertThat(booleanConsumer.flag.get()).isTrue();
}
@Test
public void shouldBeCompleted() throws Exception {
BooleanConsumer consumer = new BooleanConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>();
broadcastObserver.addCompletedConsumer(consumer);
assertThat(consumer.flag.get()).isFalse();
broadcastObserver.completed(true);
assertThat(consumer.flag.get()).isTrue();
}
@Test
public void shouldNext() throws Exception {
BooleanConsumer consumer = new BooleanConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>();
broadcastObserver.addNextConsumer(consumer);
assertThat(consumer.flag.get()).isFalse();
broadcastObserver.next(true);
assertThat(consumer.flag.get()).isTrue();
}
@Test
public void shouldError() throws Exception {
ErrorConsumer errorConsumer = new ErrorConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>();
broadcastObserver.addErrorConsumer(errorConsumer);
assertThat(errorConsumer.error.get()).isNull();
broadcastObserver.error(new NullPointerException());
assertThat(errorConsumer.error.get()).isInstanceOf(NullPointerException.class);
}
@Test
public void shouldRespectCompleted() throws Exception {
BooleanConsumer next = new BooleanConsumer();
BooleanConsumer completed = new BooleanConsumer();
ErrorConsumer error = new ErrorConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>(Optional.of(next),
Optional.of(error),
Optional.of(completed));
assertThat(next.flag.get()).isFalse();
assertThat(completed.flag.get()).isFalse();
assertThat(error.error.get()).isNull();
broadcastObserver.next(true);
assertThat(next.flag.get()).isTrue();
broadcastObserver.next(false);
assertThat(next.flag.get()).isFalse();
broadcastObserver.error(new NullPointerException());
assertThat(error.error.get()).isInstanceOf(NullPointerException.class);
broadcastObserver.error(null);
assertThat(error.error.get()).isNull();
completed.flag.set(true);
broadcastObserver.completed(false);
assertThat(completed.flag.get()).isFalse();
// Should ignore further messages since completed
broadcastObserver.completed(true);
assertThat(completed.flag.get()).isFalse();
broadcastObserver.next(true);
assertThat(next.flag.get()).isFalse();
broadcastObserver.error(new NullPointerException());
assertThat(error.error.get()).isNull();
}
@Test
public void shouldDoMultiples() throws Exception {
BooleanConsumer one = new BooleanConsumer();
BooleanConsumer two = new BooleanConsumer();
BooleanConsumer three = new BooleanConsumer();
BooleanConsumer four = new BooleanConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>();
broadcastObserver.addNextConsumer(one, two);
broadcastObserver.addCompletedConsumer(three, four);
assertThat(one.flag.get()).isFalse();
assertThat(two.flag.get()).isFalse();
assertThat(three.flag.get()).isFalse();
assertThat(four.flag.get()).isFalse();
broadcastObserver.next(true);
assertThat(one.flag.get()).isTrue();
assertThat(two.flag.get()).isTrue();
assertThat(three.flag.get()).isFalse();
assertThat(four.flag.get()).isFalse();
broadcastObserver.next(false);
assertThat(one.flag.get()).isFalse();
assertThat(two.flag.get()).isFalse();
assertThat(three.flag.get()).isFalse();
assertThat(four.flag.get()).isFalse();
broadcastObserver.completed(true);
assertThat(one.flag.get()).isFalse();
assertThat(two.flag.get()).isFalse();
assertThat(three.flag.get()).isTrue();
assertThat(four.flag.get()).isTrue();
}
@Test
public void shouldIgnoreExceptions() throws Exception {
Consumer<Boolean> exceptionThrower = new Consumer<Boolean>() {
@Override
public void accept(Boolean aBoolean) {
throw new IllegalArgumentException();
}
};
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>(Optional.of(exceptionThrower),
Optional.<Consumer<Throwable>>empty(),
Optional.<Consumer<Boolean>>empty());
broadcastObserver.next(true);
}
private static class BooleanConsumer implements Consumer<Boolean> {
final AtomicBoolean flag = new AtomicBoolean(false);
@Override
public void accept(Boolean aBoolean) {
flag.set(aBoolean);
}
}
private static class ErrorConsumer implements Consumer<Throwable> {
final AtomicReference<Throwable> error = new AtomicReference<Throwable>(null);
@Override
public void accept(Throwable throwable) {
error.set(throwable);
}
}
}
| src/test/java/almost/functional/reactive/BroadcastObserverTest.java | /*
* Copyright (c) 2015, [email protected]
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package almost.functional.reactive;
import almost.functional.Consumer;
import almost.functional.Optional;
import org.junit.Test;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static org.assertj.core.api.Assertions.assertThat;
@SuppressWarnings("unchecked")
public class BroadcastObserverTest {
@Test
public void shouldAllowSimplifiedConstructor() throws Exception {
BooleanConsumer booleanConsumer = new BooleanConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>(
Optional.<Consumer<Boolean>>of(booleanConsumer),
Optional.<Consumer<Throwable>>empty(),
Optional.<Consumer<Boolean>>empty()
);
assertThat(booleanConsumer.flag.get()).isFalse();
broadcastObserver.next(true);
assertThat(booleanConsumer.flag.get()).isTrue();
}
@Test
public void shouldBeCompleted() throws Exception {
BooleanConsumer consumer = new BooleanConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>();
broadcastObserver.addCompletedConsumer(consumer);
assertThat(consumer.flag.get()).isFalse();
broadcastObserver.completed(true);
assertThat(consumer.flag.get()).isTrue();
}
@Test
public void shouldNext() throws Exception {
BooleanConsumer consumer = new BooleanConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>();
broadcastObserver.addNextConsumer(consumer);
assertThat(consumer.flag.get()).isFalse();
broadcastObserver.next(true);
assertThat(consumer.flag.get()).isTrue();
}
@Test
public void shouldError() throws Exception {
ErrorConsumer errorConsumer = new ErrorConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>();
broadcastObserver.addErrorConsumer(errorConsumer);
assertThat(errorConsumer.error.get()).isNull();
broadcastObserver.error(new NullPointerException());
assertThat(errorConsumer.error.get()).isInstanceOf(NullPointerException.class);
}
@Test
public void shouldRespectCompleted() throws Exception {
BooleanConsumer next = new BooleanConsumer();
BooleanConsumer completed = new BooleanConsumer();
ErrorConsumer error = new ErrorConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>(Optional.of(next),
Optional.of(error),
Optional.of(completed));
assertThat(next.flag.get()).isFalse();
assertThat(completed.flag.get()).isFalse();
assertThat(error.error.get()).isNull();
broadcastObserver.next(true);
assertThat(next.flag.get()).isTrue();
broadcastObserver.next(false);
assertThat(next.flag.get()).isFalse();
broadcastObserver.error(new NullPointerException());
assertThat(error.error.get()).isInstanceOf(NullPointerException.class);
broadcastObserver.error(null);
assertThat(error.error.get()).isNull();
completed.flag.set(true);
broadcastObserver.completed(false);
assertThat(completed.flag.get()).isFalse();
// Should ignore further messages since completed
broadcastObserver.completed(true);
assertThat(completed.flag.get()).isFalse();
broadcastObserver.next(true);
assertThat(next.flag.get()).isFalse();
broadcastObserver.error(new NullPointerException());
assertThat(error.error.get()).isNull();
}
@Test
public void shouldDoMultiples() throws Exception {
BooleanConsumer one = new BooleanConsumer();
BooleanConsumer two = new BooleanConsumer();
BooleanConsumer three = new BooleanConsumer();
BooleanConsumer four = new BooleanConsumer();
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>();
broadcastObserver.addNextConsumer(one, two);
broadcastObserver.addCompletedConsumer(three, four);
assertThat(one.flag.get()).isFalse();
assertThat(two.flag.get()).isFalse();
assertThat(three.flag.get()).isFalse();
assertThat(four.flag.get()).isFalse();
broadcastObserver.next(true);
assertThat(one.flag.get()).isTrue();
assertThat(two.flag.get()).isTrue();
assertThat(three.flag.get()).isFalse();
assertThat(four.flag.get()).isFalse();
broadcastObserver.next(false);
assertThat(one.flag.get()).isFalse();
assertThat(two.flag.get()).isFalse();
assertThat(three.flag.get()).isFalse();
assertThat(four.flag.get()).isFalse();
broadcastObserver.completed(true);
assertThat(one.flag.get()).isFalse();
assertThat(two.flag.get()).isFalse();
assertThat(three.flag.get()).isTrue();
assertThat(four.flag.get()).isTrue();
}
@Test
public void shouldIgnoreExceptions() throws Exception {
Consumer<Boolean> exceptionThrower = new Consumer<Boolean>() {
@Override
public void accept(Boolean aBoolean) {
throw new IllegalArgumentException();
}
};
BroadcastObserver<Boolean> broadcastObserver = new BroadcastObserver<Boolean>(Optional.of(exceptionThrower),
Optional.<Consumer<Throwable>>empty(),
Optional.<Consumer<Boolean>>empty());
broadcastObserver.next(true);
}
private static class BooleanConsumer implements Consumer<Boolean> {
final AtomicBoolean flag = new AtomicBoolean(false);
@Override
public void accept(Boolean aBoolean) {
flag.set(aBoolean);
}
}
private static class ErrorConsumer implements Consumer<Throwable> {
final AtomicReference<Throwable> error = new AtomicReference<Throwable>(null);
@Override
public void accept(Throwable throwable) {
error.set(throwable);
}
}
}
| Increased test coverage
| src/test/java/almost/functional/reactive/BroadcastObserverTest.java | Increased test coverage |
|
Java | mit | e01c11cf006abb9eab52edd26d6e1f0cde58eac8 | 0 | JoelJ/LinkedArrayList | package com.joelj.collections;
/**
* Simple Pair class.
* Has two values: first and second that are individually typed.
*
* User: Joel Johnson
* Date: 2/16/13
* Time: 4:58 PM
*/
public class Pair<FIRST, SECOND> {
private final FIRST first;
private final SECOND second;
public static <FIRST, SECOND> Pair of(FIRST first, SECOND second) {
return new Pair<FIRST, SECOND>(first, second);
}
private Pair(FIRST first, SECOND second) {
this.first = first;
this.second = second;
}
public FIRST getFirst() {
return first;
}
public SECOND getSecond() {
return second;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Pair)) return false;
Pair pair = (Pair) o;
return !(first != null ? !first.equals(pair.first) : pair.first != null) && !(second != null ? !second.equals(pair.second) : pair.second != null);
}
@Override
public int hashCode() {
int result = first != null ? first.hashCode() : 0;
result = 31 * result + (second != null ? second.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "{" + first + "," + second + '}';
}
}
| src/main/java/com/joelj/collections/Pair.java | package com.joelj.collections;
/**
* User: Joel Johnson
* Date: 2/16/13
* Time: 4:58 PM
*/
public class Pair<FIRST, SECOND> {
private final FIRST first;
private final SECOND second;
public static <FIRST, SECOND> Pair of(FIRST first, SECOND second) {
return new Pair<FIRST, SECOND>(first, second);
}
private Pair(FIRST first, SECOND second) {
this.first = first;
this.second = second;
}
public FIRST getFirst() {
return first;
}
public SECOND getSecond() {
return second;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Pair)) return false;
Pair pair = (Pair) o;
return !(first != null ? !first.equals(pair.first) : pair.first != null) && !(second != null ? !second.equals(pair.second) : pair.second != null);
}
@Override
public int hashCode() {
int result = first != null ? first.hashCode() : 0;
result = 31 * result + (second != null ? second.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "{" + first + "," + second + '}';
}
}
| adding javadoc
| src/main/java/com/joelj/collections/Pair.java | adding javadoc |
|
Java | mit | f27918d4bdfcd7e7c8cf9464e174dfba3830aa5a | 0 | kamontat/CheckIDNumberA,kamontat/CheckIDNumberA,kamontat/CheckIDNumberA | package com.kamontat.checkidnumber.view.fragment;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import com.kamontat.checkidnumber.R;
/**
* @author kamontat
* @version 1.0
* @since Thu 11/May/2017 - 10:22 PM
*/
public class InputFragment extends Fragment {
private TextView message;
private EditText input;
private Button button;
private View.OnClickListener clickListener;
private TextWatcher watcher;
public InputFragment() {
// Required empty public constructor
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_input, container, false);
message = (TextView) view.findViewById(R.id.input_message);
input = (EditText) view.findViewById(R.id.input_id_number);
if (watcher != null) input.addTextChangedListener(watcher);
button = (Button) view.findViewById(R.id.input_btn);
if (clickListener != null) button.setOnClickListener(clickListener);
return view;
}
public String getInput() {
return input.getText().toString();
}
public void setTextColor(int id) {
input.setTextColor(id);
}
public void setInputListener(TextWatcher watcher) {
this.watcher = watcher;
}
public void setButton(View.OnClickListener clickListener) {
this.clickListener = clickListener;
}
public void setButtonEnable(boolean b) {
button.setEnabled(b);
}
public void showKeyboard(Activity activity) {
if (input.requestFocus()) {
InputMethodManager imm = (InputMethodManager) activity.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.showSoftInput(input, InputMethodManager.SHOW_IMPLICIT);
}
}
public void clearText() {
input.setText("");
}
}
| app/src/main/java/com/kamontat/checkidnumber/view/fragment/InputFragment.java | package com.kamontat.checkidnumber.view.fragment;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import com.kamontat.checkidnumber.R;
/**
* @author kamontat
* @version 1.0
* @since Thu 11/May/2017 - 10:22 PM
*/
public class InputFragment extends Fragment {
private TextView message;
private EditText input;
private Button button;
public InputFragment() {
// Required empty public constructor
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_input, container, false);
message = (TextView) view.findViewById(R.id.input_message);
input = (EditText) view.findViewById(R.id.input_id_number);
button = (Button) view.findViewById(R.id.input_btn);
return view;
}
}
| Update with main
| app/src/main/java/com/kamontat/checkidnumber/view/fragment/InputFragment.java | Update with main |
|
Java | mit | edb1c4b3b2e27e7aeffa6507699388b44ff7b25c | 0 | yonglehou/moco,ForrestVV/moco,zhigang1992/moco,jasonchaffee/moco,zhigang1992/moco,ruiruiguo/moco,dreamhead/moco,sandyway/moco,ysjian/moco,cgourlay/moco,yonglehou/moco,dreamhead/moco,ForrestAlfred/moco,ForrestVV/moco,jasonchaffee/moco,sandyway/moco,ruiruiguo/moco,LiuShulong/moco,ForrestAlfred/moco,ysjian/moco,cgourlay/moco,LiuShulong/moco | package com.github.dreamhead.moco.resource.reader;
import com.github.dreamhead.moco.Request;
import com.github.dreamhead.moco.RequestExtractor;
import com.google.common.base.Optional;
public class ExtractorVariable<T> implements Variable {
private final RequestExtractor<T> extractor;
public ExtractorVariable(RequestExtractor<T> extractor) {
this.extractor = extractor;
}
@Override
public Object toTemplateVariable(Request request) {
Optional<T> extractContent = extractor.extract(request);
if (!extractContent.isPresent()) {
return null;
}
T target = extractContent.get();
if (target instanceof String[]) {
String[] contents = (String[])target;
if (contents.length == 1) {
return contents[0];
}
return contents;
}
return target.toString();
}
}
| moco-core/src/main/java/com/github/dreamhead/moco/resource/reader/ExtractorVariable.java | package com.github.dreamhead.moco.resource.reader;
import com.github.dreamhead.moco.Request;
import com.github.dreamhead.moco.RequestExtractor;
import com.google.common.base.Optional;
public class ExtractorVariable<T> implements Variable {
private final RequestExtractor<T> extractor;
public ExtractorVariable(RequestExtractor<T> extractor) {
this.extractor = extractor;
}
@Override
public Object toTemplateVariable(Request request) {
Optional<T> extractContent = extractor.extract(request);
if (!extractContent.isPresent()) {
return null;
}
T target = extractContent.get();
if (target instanceof String[]) {
String[] contents = (String[])target;
return contents[0];
}
return target.toString();
}
}
| left string collection to template
| moco-core/src/main/java/com/github/dreamhead/moco/resource/reader/ExtractorVariable.java | left string collection to template |
|
Java | mit | 62b0142b33a138f5aa6527f26eed5d9d2f7ca678 | 0 | Backendless/Android-SDK,Backendless/Android-SDK,gwokudasam/Android-SDK | /*
* ********************************************************************************************************************
* <p/>
* BACKENDLESS.COM CONFIDENTIAL
* <p/>
* ********************************************************************************************************************
* <p/>
* Copyright 2012 BACKENDLESS.COM. All Rights Reserved.
* <p/>
* NOTICE: All information contained herein is, and remains the property of Backendless.com and its suppliers,
* if any. The intellectual and technical concepts contained herein are proprietary to Backendless.com and its
* suppliers and may be covered by U.S. and Foreign Patents, patents in process, and are protected by trade secret
* or copyright law. Dissemination of this information or reproduction of this material is strictly forbidden
* unless prior written permission is obtained from Backendless.com.
* <p/>
* ********************************************************************************************************************
*/
package com.backendless;
import java.io.IOException;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnPreparedListener;
import android.view.SurfaceHolder;
import com.backendless.exceptions.BackendlessException;
import com.backendless.media.DisplayOrientation;
import com.backendless.media.Session;
import com.backendless.media.SessionBuilder;
import com.backendless.media.StreamProtocolType;
import com.backendless.media.StreamVideoQuality;
import com.backendless.media.StreamType;
import com.backendless.media.audio.AudioQuality;
import com.backendless.media.gl.SurfaceView;
import com.backendless.media.rtsp.RtspClient;
import com.backendless.media.video.VideoQuality;
public final class Media
{
private final static String WOWZA_SERVER_IP = ( true ) ? "10.0.1.48" : "media.backendless.com";
private final static String WOWZA_SERVER_LIVE_APP_NAME = "mediaAppLive";
private final static String WOWZA_SERVER_VOD_APP_NAME = "mediaAppVod";
private final static Integer WOWZA_SERVER_PORT = 1935;
private final static String RTSP_PROTOCOL = StreamProtocolType.RTSP.getValue();
private final static String HLS_PROTOCOL = StreamProtocolType.HLS.getValue();
private static final String MEDIA_FILES_LOCATION = "/files/media/";
private static final String HLS_PLAYLIST_CONSTANT = "/playlist.m3u8";
private RtspClient rtspClient;
private Session session;
private MediaPlayer mediaPlayer;
private StreamProtocolType protocolType;
private static final Media instance = new Media();
static Media getInstance()
{
return instance;
}
public void toggleFlash()
{
checkSessionIsNull();
session.toggleFlash();
}
public StreamVideoQuality getStreamQuality()
{
checkSessionIsNull();
VideoQuality videoQuality = session.getVideoTrack().getVideoQuality();
int width = videoQuality.resX;
int height = videoQuality.resY;
int framerate = videoQuality.framerate;
int bitrate = videoQuality.bitrate;
StreamVideoQuality streamQuality = StreamVideoQuality.getFromString( width + "x" + height + ", " + framerate + "fps, " + bitrate
+ " Kbps" );
return streamQuality;
}
public void setVideoQuality( StreamVideoQuality streamQuality )
{
checkSessionIsNull();
if( streamQuality == null )
{
return;
}
VideoQuality videoQuality = convertVideoQuality( streamQuality );
session.setVideoQuality( videoQuality );
}
private VideoQuality convertVideoQuality( StreamVideoQuality streamQuality )
{
Pattern pattern = Pattern.compile( "(\\d+)x(\\d+)\\D+(\\d+)\\D+(\\d+)" );
Matcher matcher = pattern.matcher( streamQuality.getValue() );
matcher.find();
int width = Integer.parseInt( matcher.group( 1 ) );
int height = Integer.parseInt( matcher.group( 2 ) );
int framerate = Integer.parseInt( matcher.group( 3 ) );
int bitrate = Integer.parseInt( matcher.group( 4 ) ) * 1000;
VideoQuality videoQuality = new VideoQuality( width, height, framerate, bitrate );
return videoQuality;
}
public void setAudioQuality( int sampleRate, int bitRate )
{
checkSessionIsNull();
session.setAudioQuality( new AudioQuality( sampleRate, bitRate ) );
}
public void switchCamera()
{
checkSessionIsNull();
session.switchCamera();
}
public void startPreview()
{
checkSessionIsNull();
session.startPreview();
}
public void stopPreview()
{
checkSessionIsNull();
session.stopPreview();
}
public void stopClientStream()
{
checkRtspClientIsNull();
rtspClient.stopStream();
}
public void releaseSession()
{
checkSessionIsNull();
session.release();
}
public void releaseClient()
{
checkRtspClientIsNull();
rtspClient.release();
}
/**
* <p>
* default video quality to 176x144 20fps 500Kbps<br/>
* default audio quality to 8000 sampleRate 32000 bitRate
* </p>
*/
public void configureForPublish( Context context, SurfaceView mSurfaceView, DisplayOrientation orientation )
{
session = getSession( context, mSurfaceView, orientation.getValue() );
rtspClient = getRtspClient( context, session );
}
public void configureForPublish( Context context, Session.Callback callback, SurfaceView mSurfaceView, DisplayOrientation orientation )
{
session = getSession( context, callback, mSurfaceView, orientation.getValue() );
rtspClient = getRtspClient( context, session );
}
/**
* StreamProtocolType sets to default value - RTSP
*
* @param mSurfaceHolder
*/
public void configureForPlay( SurfaceHolder mSurfaceHolder )
{
configureForPlay( mSurfaceHolder, StreamProtocolType.RTSP );
}
public void configureForPlay( SurfaceHolder mSurfaceHolder, StreamProtocolType protocolType )
{
this.protocolType = protocolType;
mediaPlayer = new MediaPlayer();
mediaPlayer.setDisplay( mSurfaceHolder );
mediaPlayer.setAudioStreamType( AudioManager.STREAM_MUSIC );
mediaPlayer.setOnPreparedListener( new OnPreparedListener() {
@Override
public void onPrepared( MediaPlayer mp )
{
mp.start();
}
} );
}
public void publishRecordOrStop( String tube, String streamName )
{
publishStreamOrStop( tube, streamName, StreamType.LIVE_RECORDING );
}
public void publishLiveOrStop( String tube, String streamName )
{
publishStreamOrStop( tube, streamName, StreamType.LIVE );
}
public void playLiveOrStop( String tube, String streamName ) throws IllegalArgumentException, SecurityException,
IllegalStateException, IOException
{
playStreamOrStop( tube, streamName, StreamType.RECORDING );
}
public void playRecordOrStop( String tube, String streamName ) throws IllegalArgumentException, SecurityException,
IllegalStateException, IOException
{
playStreamOrStop( tube, streamName, StreamType.AVAILABLE );
}
private void publishStreamOrStop( String tube, String streamName, StreamType streamType )
{
checkSessionIsNull();
checkRtspClientIsNull();
if( mediaPlayer != null )
{
mediaPlayer.reset();
}
if( streamName == null || streamName.isEmpty() )
{
streamName = "default";
}
else
{
streamName = streamName.trim().replace( '.', '_' );
}
if( tube == null || tube.isEmpty() )
{
tube = "default";
}
else
{
tube = tube.trim();
}
String operationType = getOperationType( streamType );
String params = getConnectParams( tube, operationType, streamName );
startOrStopStream( rtspClient, streamName, params );
}
private void checkPlayerIsNull()
{
if( mediaPlayer == null )
{
throw new BackendlessException( "Player client is null. Method configure( .. ) must be invoked" );
}
}
private void checkSessionIsNull()
{
if( session == null )
{
throw new BackendlessException( "Session client is null. Method configure( .. ) must be invoked" );
}
}
private void checkRtspClientIsNull()
{
if( rtspClient == null )
{
throw new BackendlessException( "Streaming client is null. Method configure( .. ) must be invoked" );
}
}
private String getOperationType( StreamType streamType )
{
return ( streamType == StreamType.LIVE ) ? "publishLive" : "publishRecorded";
}
private void playStreamOrStop( String tube, String streamName, StreamType streamType ) throws IllegalArgumentException,
SecurityException, IllegalStateException, IOException
{
checkPlayerIsNull();
if( streamName == null || streamName.isEmpty() )
{
streamName = "default";
}
else
{
streamName = streamName.trim().replace( '.', '_' );
}
if( tube == null || tube.isEmpty() )
{
tube = "default";
}
else
{
tube = tube.trim();
}
if( mediaPlayer.isPlaying() )
{
mediaPlayer.stop();
mediaPlayer.reset();
}
else
{
if( session != null )
{
session.stopPreview();
}
mediaPlayer.reset();
if( protocolType == null )
{
protocolType = StreamProtocolType.RTSP;
}
String protocol = getProtocol( protocolType );
String operationType = ( streamType == StreamType.RECORDING ) ? "playLive" : "playRecorded";
String wowzaAddress = WOWZA_SERVER_IP + ":" + WOWZA_SERVER_PORT + "/"
+ ( ( streamType == StreamType.RECORDING ) ? WOWZA_SERVER_LIVE_APP_NAME : WOWZA_SERVER_VOD_APP_NAME ) + "/_definst_/";
String params = getConnectParams( tube, operationType, streamName );
String streamPath = getStreamName( streamName, protocolType );
String url = protocol + wowzaAddress + streamPath + params;
mediaPlayer.setDataSource( url );
mediaPlayer.prepare();
mediaPlayer.start();
}
}
private String getStreamName( String fileName, StreamProtocolType protocol )
{
String subDir = Backendless.getApplicationId().toLowerCase() + MEDIA_FILES_LOCATION;
String hlsAdditionalParameter = ( protocol == StreamProtocolType.HLS ) ? HLS_PLAYLIST_CONSTANT : "";
return subDir + fileName + hlsAdditionalParameter;
}
private String getProtocol( StreamProtocolType streamProtocolType )
{
if( streamProtocolType.equals( StreamProtocolType.RTSP ) )
{
return RTSP_PROTOCOL;
}
if( streamProtocolType.equals( StreamProtocolType.HLS ) )
{
return HLS_PROTOCOL;
}
throw new BackendlessException( "Backendless Android SDK not supported protocol type '" + streamProtocolType + "'" );
}
private Session getSession( Context context, SurfaceView mSurfaceView, int orientation )
{
Session mSession = SessionBuilder.getInstance().setContext( context ).setAudioEncoder( SessionBuilder.AUDIO_AAC )
.setVideoEncoder( SessionBuilder.VIDEO_H264 ).setSurfaceView( mSurfaceView ).setPreviewOrientation( orientation )
.setCallback( (Session.Callback) context ).build();
return mSession;
}
private Session getSession( Context context, Session.Callback callback, SurfaceView mSurfaceView, int orientation )
{
Session mSession = SessionBuilder.getInstance().setContext( context ).setAudioEncoder( SessionBuilder.AUDIO_AAC )
.setVideoEncoder( SessionBuilder.VIDEO_H264 ).setSurfaceView( mSurfaceView ).setPreviewOrientation( orientation )
.setCallback( callback ).build();
return mSession;
}
private String getConnectParams( String tube, String operationType, String streamName )
{
String paramsToSend;
BackendlessUser currentUser = Backendless.UserService.CurrentUser();
Object identity = currentUser != null ? currentUser.getProperty( "user-token" ) : null;
HashMap<String, String> map = new HashMap<String, String>();
map.putAll( HeadersManager.getInstance().getHeaders() );
map.put( "identity", identity != null ? identity.toString() : map.get( "user-token" ) );
paramsToSend = "?application-id=" + map.get( "application-id" ) + "&version=" + Backendless.getVersion() + "&identity="
+ map.get( "identity" ) + "&tube=" + tube + "&operationType=" + operationType + "&streamName=" + streamName;
return paramsToSend;
}
// Connects/disconnects to the RTSP server and starts/stops the stream
private void startOrStopStream( RtspClient rtspClient, String streamName, String params )
{
if( !rtspClient.isStreaming() )
{
rtspClient.setServerAddress( WOWZA_SERVER_IP, WOWZA_SERVER_PORT );
rtspClient.setStreamPath( "/" + WOWZA_SERVER_LIVE_APP_NAME + "/" + streamName + params );
rtspClient.startStream();
}
else
{
// Stops the stream and disconnects from the RTSP server
rtspClient.stopStream();
}
}
private RtspClient getRtspClient( Context context, Session mSession )
{
// Configures the RTSP client
if( rtspClient == null )
{
rtspClient = new RtspClient();
rtspClient.setSession( mSession );
rtspClient.setCallback( (RtspClient.Callback) context );
}
return rtspClient;
}
public StreamProtocolType getProtocolType()
{
return protocolType;
}
public void setProtocolType( StreamProtocolType protocolType )
{
this.protocolType = protocolType;
}
}
| src/com/backendless/Media.java | /*
* ********************************************************************************************************************
* <p/>
* BACKENDLESS.COM CONFIDENTIAL
* <p/>
* ********************************************************************************************************************
* <p/>
* Copyright 2012 BACKENDLESS.COM. All Rights Reserved.
* <p/>
* NOTICE: All information contained herein is, and remains the property of Backendless.com and its suppliers,
* if any. The intellectual and technical concepts contained herein are proprietary to Backendless.com and its
* suppliers and may be covered by U.S. and Foreign Patents, patents in process, and are protected by trade secret
* or copyright law. Dissemination of this information or reproduction of this material is strictly forbidden
* unless prior written permission is obtained from Backendless.com.
* <p/>
* ********************************************************************************************************************
*/
package com.backendless;
import java.io.IOException;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnPreparedListener;
import android.view.SurfaceHolder;
import com.backendless.exceptions.BackendlessException;
import com.backendless.media.DisplayOrientation;
import com.backendless.media.Session;
import com.backendless.media.SessionBuilder;
import com.backendless.media.StreamProtocolType;
import com.backendless.media.StreamVideoQuality;
import com.backendless.media.StreamType;
import com.backendless.media.audio.AudioQuality;
import com.backendless.media.gl.SurfaceView;
import com.backendless.media.rtsp.RtspClient;
import com.backendless.media.video.VideoQuality;
public final class Media
{
private final static String WOWZA_SERVER_IP = ( true ) ? "10.0.1.48" : "media.backendless.com";
private final static String WOWZA_SERVER_LIVE_APP_NAME = "mediaAppLive";
private final static String WOWZA_SERVER_VOD_APP_NAME = "mediaAppVod";
private final static Integer WOWZA_SERVER_PORT = 1935;
private final static String RTSP_PROTOCOL = StreamProtocolType.RTSP.getValue();
private final static String HLS_PROTOCOL = StreamProtocolType.HLS.getValue();
private static final String MEDIA_FILES_LOCATION = "/files/media/";
private static final String HLS_PLAYLIST_CONSTANT = "/playlist.m3u8";
private RtspClient rtspClient;
private Session session;
private MediaPlayer mediaPlayer;
private StreamProtocolType protocolType;
private static final Media instance = new Media();
static Media getInstance()
{
return instance;
}
public void toggleFlash()
{
checkSessionIsNull();
session.toggleFlash();
}
public StreamVideoQuality getStreamQuality()
{
checkSessionIsNull();
// "176x144, 30 fps, 170 Kbps"
VideoQuality videoQuality = session.getVideoTrack().getVideoQuality();
int width = videoQuality.resX;
int height = videoQuality.resY;
int framerate = videoQuality.framerate;
int bitrate = videoQuality.bitrate;
StreamVideoQuality streamQuality = StreamVideoQuality.getFromString( width + "x" + height + ", " + framerate + "fps, " + bitrate
+ " Kbps" );
return streamQuality;
}
public void setVideoQuality( StreamVideoQuality streamQuality )
{
checkSessionIsNull();
if( streamQuality == null )
{
return;
}
VideoQuality videoQuality = convertVideoQuality( streamQuality );
session.setVideoQuality( videoQuality );
}
private VideoQuality convertVideoQuality( StreamVideoQuality streamQuality )
{
Pattern pattern = Pattern.compile( "(\\d+)x(\\d+)\\D+(\\d+)\\D+(\\d+)" );
Matcher matcher = pattern.matcher( streamQuality.getValue() );
matcher.find();
int width = Integer.parseInt( matcher.group( 1 ) );
int height = Integer.parseInt( matcher.group( 2 ) );
int framerate = Integer.parseInt( matcher.group( 3 ) );
int bitrate = Integer.parseInt( matcher.group( 4 ) ) * 1000;
VideoQuality videoQuality = new VideoQuality( width, height, framerate, bitrate );
return videoQuality;
}
public void setAudioQuality( int sampleRate, int bitRate )
{
checkSessionIsNull();
session.setAudioQuality( new AudioQuality( sampleRate, bitRate ) );
}
public void switchCamera()
{
checkSessionIsNull();
session.switchCamera();
}
public void startPreview()
{
checkSessionIsNull();
session.startPreview();
}
public void stopPreview()
{
checkSessionIsNull();
session.stopPreview();
}
public void stopClientStream()
{
checkRtspClientIsNull();
rtspClient.stopStream();
}
public void releaseSession()
{
checkSessionIsNull();
session.release();
}
public void releaseClient()
{
checkRtspClientIsNull();
rtspClient.release();
}
/**
* <p>
* default video quality to 176x144 20fps 500Kbps<br/>
* default audio quality to 8000 sampleRate 32000 bitRate
* </p>
*/
public void configureForPublish( Context context, SurfaceView mSurfaceView, DisplayOrientation orientation )
{
session = getSession( context, mSurfaceView, orientation.getValue() );
rtspClient = getRtspClient( context, session );
}
/**
* StreamProtocolType sets to default value - RTSP
*
* @param mSurfaceHolder
*/
public void configureForPlay( SurfaceHolder mSurfaceHolder )
{
configureForPlay( mSurfaceHolder, StreamProtocolType.RTSP );
}
public void configureForPlay( SurfaceHolder mSurfaceHolder, StreamProtocolType protocolType )
{
this.protocolType = protocolType;
mediaPlayer = new MediaPlayer();
mediaPlayer.setDisplay( mSurfaceHolder );
mediaPlayer.setAudioStreamType( AudioManager.STREAM_MUSIC );
mediaPlayer.setOnPreparedListener( new OnPreparedListener() {
@Override
public void onPrepared( MediaPlayer mp )
{
mp.start();
}
} );
}
public void publishRecordOrStop( String tube, String streamName )
{
publishStreamOrStop( tube, streamName, StreamType.LIVE_RECORDING );
}
public void publishLiveOrStop( String tube, String streamName )
{
publishStreamOrStop( tube, streamName, StreamType.LIVE );
}
public void playLiveOrStop( String tube, String streamName ) throws IllegalArgumentException, SecurityException,
IllegalStateException, IOException
{
playStreamOrStop( tube, streamName, StreamType.RECORDING );
}
public void playRecordOrStop( String tube, String streamName ) throws IllegalArgumentException, SecurityException,
IllegalStateException, IOException
{
playStreamOrStop( tube, streamName, StreamType.AVAILABLE );
}
private void publishStreamOrStop( String tube, String streamName, StreamType streamType )
{
checkSessionIsNull();
checkRtspClientIsNull();
if( mediaPlayer != null )
{
mediaPlayer.reset();
}
if( streamName == null || streamName.isEmpty() )
{
streamName = "default";
}
else
{
streamName = streamName.trim().replace( '.', '_' );
}
if( tube == null || tube.isEmpty() )
{
tube = "default";
}
else
{
tube = tube.trim();
}
String operationType = getOperationType( streamType );
String params = getConnectParams( tube, operationType, streamName );
startOrStopStream( rtspClient, streamName, params );
}
private void checkPlayerIsNull()
{
if( mediaPlayer == null )
{
throw new BackendlessException( "Player client is null. Method configure( .. ) must be invoked" );
}
}
private void checkSessionIsNull()
{
if( session == null )
{
throw new BackendlessException( "Session client is null. Method configure( .. ) must be invoked" );
}
}
private void checkRtspClientIsNull()
{
if( rtspClient == null )
{
throw new BackendlessException( "Streaming client is null. Method configure( .. ) must be invoked" );
}
}
private String getOperationType( StreamType streamType )
{
return ( streamType == StreamType.LIVE ) ? "publishLive" : "publishRecorded";
}
private void playStreamOrStop( String tube, String streamName, StreamType streamType ) throws IllegalArgumentException,
SecurityException, IllegalStateException, IOException
{
checkPlayerIsNull();
if( streamName == null || streamName.isEmpty() )
{
streamName = "default";
}
else
{
streamName = streamName.trim().replace( '.', '_' );
}
if( tube == null || tube.isEmpty() )
{
tube = "default";
}
else
{
tube = tube.trim();
}
if( mediaPlayer.isPlaying() )
{
mediaPlayer.stop();
mediaPlayer.reset();
}
else
{
if( session != null )
{
session.stopPreview();
}
mediaPlayer.reset();
if( protocolType == null )
{
protocolType = StreamProtocolType.RTSP;
}
String protocol = getProtocol( protocolType );
String operationType = ( streamType == StreamType.RECORDING ) ? "playLive" : "playRecorded";
String wowzaAddress = WOWZA_SERVER_IP + ":" + WOWZA_SERVER_PORT + "/"
+ ( ( streamType == StreamType.RECORDING ) ? WOWZA_SERVER_LIVE_APP_NAME : WOWZA_SERVER_VOD_APP_NAME ) + "/_definst_/";
String params = getConnectParams( tube, operationType, streamName );
String streamPath = getStreamName( streamName, protocolType );
String url = protocol + wowzaAddress + streamPath + params;
mediaPlayer.setDataSource( url );
mediaPlayer.prepare();
mediaPlayer.start();
}
}
private String getStreamName( String fileName, StreamProtocolType protocol )
{
String subDir = Backendless.getApplicationId().toLowerCase() + MEDIA_FILES_LOCATION;
String hlsAdditionalParameter = ( protocol == StreamProtocolType.HLS ) ? HLS_PLAYLIST_CONSTANT : "";
return subDir + fileName + hlsAdditionalParameter;
}
private String getProtocol( StreamProtocolType streamProtocolType )
{
if( streamProtocolType.equals( StreamProtocolType.RTSP ) )
{
return RTSP_PROTOCOL;
}
if( streamProtocolType.equals( StreamProtocolType.HLS ) )
{
return HLS_PROTOCOL;
}
throw new BackendlessException( "Backendless Android SDK not supported protocol type '" + streamProtocolType + "'" );
}
private Session getSession( Context context, SurfaceView mSurfaceView, int orientation )
{
Session mSession = SessionBuilder.getInstance().setContext( context ).setAudioEncoder( SessionBuilder.AUDIO_AAC )
.setVideoEncoder( SessionBuilder.VIDEO_H264 ).setSurfaceView( mSurfaceView ).setPreviewOrientation( orientation )
.setCallback( (Session.Callback) context ).build();
return mSession;
}
private String getConnectParams( String tube, String operationType, String streamName )
{
String paramsToSend;
BackendlessUser currentUser = Backendless.UserService.CurrentUser();
Object identity = currentUser != null ? currentUser.getProperty( "user-token" ) : null;
HashMap<String, String> map = new HashMap<String, String>();
map.putAll( HeadersManager.getInstance().getHeaders() );
map.put( "identity", identity != null ? identity.toString() : map.get( "user-token" ) );
paramsToSend = "?application-id=" + map.get( "application-id" ) + "&version=" + Backendless.getVersion() + "&identity="
+ map.get( "identity" ) + "&tube=" + tube + "&operationType=" + operationType + "&streamName=" + streamName;
return paramsToSend;
}
// Connects/disconnects to the RTSP server and starts/stops the stream
private void startOrStopStream( RtspClient rtspClient, String streamName, String params )
{
if( !rtspClient.isStreaming() )
{
rtspClient.setServerAddress( WOWZA_SERVER_IP, WOWZA_SERVER_PORT );
rtspClient.setStreamPath( "/" + WOWZA_SERVER_LIVE_APP_NAME + "/" + streamName + params );
rtspClient.startStream();
}
else
{
// Stops the stream and disconnects from the RTSP server
rtspClient.stopStream();
}
}
private RtspClient getRtspClient( Context context, Session mSession )
{
// Configures the RTSP client
if( rtspClient == null )
{
rtspClient = new RtspClient();
rtspClient.setSession( mSession );
rtspClient.setCallback( (RtspClient.Callback) context );
}
return rtspClient;
}
public StreamProtocolType getProtocolType()
{
return protocolType;
}
public void setProtocolType( StreamProtocolType protocolType )
{
this.protocolType = protocolType;
}
}
| Add capability separate Activity and Session callback
remove old StramQuality | src/com/backendless/Media.java | Add capability separate Activity and Session callback remove old StramQuality |
|
Java | mit | 390babd1fc1d4ccf0d659adec7dac9b0a24798e1 | 0 | JPMoresmau/sqlg,pietermartin/sqlg,pietermartin/sqlg,pietermartin/sqlg,JPMoresmau/sqlg,JPMoresmau/sqlg,pietermartin/sqlg | package org.umlg.sqlg.strategy;
import com.tinkerpop.gremlin.process.Step;
import com.tinkerpop.gremlin.process.Traversal;
import com.tinkerpop.gremlin.process.TraversalStrategy;
import com.tinkerpop.gremlin.process.graph.step.filter.IntervalStep;
import com.tinkerpop.gremlin.process.graph.step.sideEffect.IdentityStep;
import com.tinkerpop.gremlin.process.util.EmptyStep;
import com.tinkerpop.gremlin.process.util.TraversalHelper;
import com.tinkerpop.gremlin.structure.Vertex;
import org.umlg.sqlg.process.graph.util.SqlgHasStep;
import org.umlg.sqlg.process.graph.util.SqlgVertexStep;
import org.umlg.sqlg.structure.SqlgGraphStep;
import java.util.HashSet;
import java.util.Set;
/**
* Date: 2014/07/12
* Time: 5:45 AM
*/
public class SqlGGraphStepStrategy implements TraversalStrategy.NoDependencies {
private static final SqlGGraphStepStrategy INSTANCE = new SqlGGraphStepStrategy();
private SqlGGraphStepStrategy() {
}
public void apply(final Traversal traversal) {
if (traversal.getSteps().get(0) instanceof SqlgGraphStep) {
final SqlgGraphStep sqlgGraphStep = (SqlgGraphStep) traversal.getSteps().get(0);
Step currentStep = sqlgGraphStep.getNextStep();
while (true) {
if (currentStep == EmptyStep.instance() || TraversalHelper.isLabeled(currentStep)) break;
if (currentStep instanceof SqlgHasStep) {
sqlgGraphStep.hasContainers.add(((SqlgHasStep) currentStep).getHasContainer());
TraversalHelper.removeStep(currentStep, traversal);
} else if (currentStep instanceof IntervalStep) {
sqlgGraphStep.hasContainers.add(((IntervalStep) currentStep).startContainer);
sqlgGraphStep.hasContainers.add(((IntervalStep) currentStep).endContainer);
TraversalHelper.removeStep(currentStep, traversal);
} else if (currentStep instanceof IdentityStep) {
// do nothing
} else {
break;
}
currentStep = currentStep.getNextStep();
}
}
//TODO do has on edges
Set<Step> toRemove = new HashSet<>();
for (Object step : traversal.getSteps()) {
if (step instanceof SqlgVertexStep && Vertex.class.isAssignableFrom(((SqlgVertexStep)step).returnClass)) {
SqlgVertexStep sqlgVertexStep = (SqlgVertexStep) step;
Step currentStep = sqlgVertexStep.getNextStep();
while (true) {
if (currentStep == EmptyStep.instance() || TraversalHelper.isLabeled(currentStep)) break;
if (currentStep instanceof SqlgHasStep) {
sqlgVertexStep.hasContainers.add(((SqlgHasStep) currentStep).getHasContainer());
toRemove.add(currentStep);
} else if (currentStep instanceof IntervalStep) {
sqlgVertexStep.hasContainers.add(((IntervalStep) currentStep).startContainer);
sqlgVertexStep.hasContainers.add(((IntervalStep) currentStep).endContainer);
toRemove.add(currentStep);
} else if (currentStep instanceof IdentityStep) {
// do nothing
} else {
break;
}
currentStep = currentStep.getNextStep();
}
}
}
for (Step stepToRemove : toRemove) {
TraversalHelper.removeStep(stepToRemove, traversal);
}
}
public static SqlGGraphStepStrategy instance() {
return INSTANCE;
}
}
| sqlg-core/src/main/java/org/umlg/sqlg/strategy/SqlGGraphStepStrategy.java | package org.umlg.sqlg.strategy;
import com.tinkerpop.gremlin.process.Step;
import com.tinkerpop.gremlin.process.Traversal;
import com.tinkerpop.gremlin.process.TraversalStrategy;
import com.tinkerpop.gremlin.process.graph.step.filter.IntervalStep;
import com.tinkerpop.gremlin.process.graph.step.util.IdentityStep;
import com.tinkerpop.gremlin.process.util.EmptyStep;
import com.tinkerpop.gremlin.process.util.TraversalHelper;
import com.tinkerpop.gremlin.structure.Vertex;
import org.umlg.sqlg.process.graph.util.SqlgHasStep;
import org.umlg.sqlg.process.graph.util.SqlgVertexStep;
import org.umlg.sqlg.structure.SqlgGraphStep;
import org.umlg.sqlg.structure.SqlgVertex;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
/**
* Date: 2014/07/12
* Time: 5:45 AM
*/
public class SqlGGraphStepStrategy implements TraversalStrategy.NoDependencies {
private static final SqlGGraphStepStrategy INSTANCE = new SqlGGraphStepStrategy();
private SqlGGraphStepStrategy() {
}
public void apply(final Traversal traversal) {
if (traversal.getSteps().get(0) instanceof SqlgGraphStep) {
final SqlgGraphStep sqlgGraphStep = (SqlgGraphStep) traversal.getSteps().get(0);
Step currentStep = sqlgGraphStep.getNextStep();
while (true) {
if (currentStep == EmptyStep.instance() || TraversalHelper.isLabeled(currentStep)) break;
if (currentStep instanceof SqlgHasStep) {
sqlgGraphStep.hasContainers.add(((SqlgHasStep) currentStep).getHasContainer());
TraversalHelper.removeStep(currentStep, traversal);
} else if (currentStep instanceof IntervalStep) {
sqlgGraphStep.hasContainers.add(((IntervalStep) currentStep).startContainer);
sqlgGraphStep.hasContainers.add(((IntervalStep) currentStep).endContainer);
TraversalHelper.removeStep(currentStep, traversal);
} else if (currentStep instanceof IdentityStep) {
// do nothing
} else {
break;
}
currentStep = currentStep.getNextStep();
}
}
//TODO do has on edges
Set<Step> toRemove = new HashSet<>();
for (Object step : traversal.getSteps()) {
if (step instanceof SqlgVertexStep && Vertex.class.isAssignableFrom(((SqlgVertexStep)step).returnClass)) {
SqlgVertexStep sqlgVertexStep = (SqlgVertexStep) step;
Step currentStep = sqlgVertexStep.getNextStep();
while (true) {
if (currentStep == EmptyStep.instance() || TraversalHelper.isLabeled(currentStep)) break;
if (currentStep instanceof SqlgHasStep) {
sqlgVertexStep.hasContainers.add(((SqlgHasStep) currentStep).getHasContainer());
toRemove.add(currentStep);
} else if (currentStep instanceof IntervalStep) {
sqlgVertexStep.hasContainers.add(((IntervalStep) currentStep).startContainer);
sqlgVertexStep.hasContainers.add(((IntervalStep) currentStep).endContainer);
toRemove.add(currentStep);
} else if (currentStep instanceof IdentityStep) {
// do nothing
} else {
break;
}
currentStep = currentStep.getNextStep();
}
}
}
for (Step stepToRemove : toRemove) {
TraversalHelper.removeStep(stepToRemove, traversal);
}
}
public static SqlGGraphStepStrategy instance() {
return INSTANCE;
}
}
| make compile to tp3
| sqlg-core/src/main/java/org/umlg/sqlg/strategy/SqlGGraphStepStrategy.java | make compile to tp3 |
|
Java | mit | 87de4a0d8586e4a95e525f10c48e0db856a63f85 | 0 | GluuFederation/oxCore | /*
* oxCore is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2020, Gluu
*/
package org.gluu.service.external;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import org.gluu.model.SimpleCustomProperty;
import org.gluu.model.custom.script.CustomScriptType;
import org.gluu.model.custom.script.conf.CustomScriptConfiguration;
import org.gluu.model.custom.script.type.persistence.PersistenceType;
import org.gluu.persist.PersistenceEntryManager;
import org.gluu.persist.exception.extension.PersistenceExtension;
import org.gluu.service.custom.script.ExternalScriptService;
import org.gluu.service.external.context.PersistenceExternalContext;
/**
* Provides factory methods needed to create persistence extension
*
* @author Yuriy Movchan Date: 06/04/2020
*/
@ApplicationScoped
public class ExternalPersistenceExtensionService extends ExternalScriptService {
private static final long serialVersionUID = 5466361778036208685L;
@Inject
private Instance<PersistenceEntryManager> persistenceEntryManagerInstance;
@Inject
private Instance<List<PersistenceEntryManager>> persistenceEntryManagerListInstance;
public ExternalPersistenceExtensionService() {
super(CustomScriptType.PERSISTENCE_EXTENSION);
}
@Override
protected void reloadExternal() {
for (Iterator<PersistenceEntryManager> it = persistenceEntryManagerInstance.iterator(); it.hasNext();) {
PersistenceEntryManager persistenceEntryManager = it.next();
executePersistenceExtensionAfterCreate(null, persistenceEntryManager);
}
for (Iterator<List<PersistenceEntryManager>> it = persistenceEntryManagerListInstance.iterator(); it.hasNext();) {
List<PersistenceEntryManager> persistenceEntryManagerList = it.next();
for (PersistenceEntryManager persistenceEntryManager : persistenceEntryManagerList) {
executePersistenceExtensionAfterCreate(null, persistenceEntryManager);
}
}
}
public void executePersistenceExtensionAfterCreate(Properties connectionProperties, PersistenceEntryManager persistenceEntryManager) {
if (isEnabled()) {
PersistenceExternalContext persistenceExternalContext = new PersistenceExternalContext();
persistenceExternalContext.setConnectionProperties(connectionProperties);
persistenceExternalContext.setPersistenceEntryManager(persistenceEntryManager);
executeExternalOnAfterCreateMethod(persistenceExternalContext);
setPersistenceExtension(persistenceEntryManager);
}
}
public void executePersistenceExtensionAfterDestroy(PersistenceEntryManager persistenceEntryManager) {
if (isEnabled()) {
PersistenceExternalContext persistenceExternalContext = new PersistenceExternalContext();
persistenceExternalContext.setPersistenceEntryManager(persistenceEntryManager);
executeExternalOnAfterDestroyMethod(persistenceExternalContext);
}
}
public void setPersistenceExtension(PersistenceEntryManager persistenceEntryManager) {
PersistenceExtension persistenceExtension = null;
if (isEnabled()) {
persistenceExtension = (PersistenceExtension) this.defaultExternalCustomScript.getExternalType();
}
persistenceEntryManager.setPersistenceExtension(persistenceExtension);
}
public void executeExternalOnAfterCreateMethod(PersistenceExternalContext context) {
executeExternalOnAfterCreateMethod(this.defaultExternalCustomScript, context);
}
public void executeExternalOnAfterCreateMethod(CustomScriptConfiguration customScriptConfiguration, PersistenceExternalContext context) {
try {
log.debug("Executing python 'onAfterCreate' method");
PersistenceType persistenceType = (PersistenceType) customScriptConfiguration.getExternalType();
Map<String, SimpleCustomProperty> configurationAttributes = customScriptConfiguration.getConfigurationAttributes();
persistenceType.onAfterCreate(context, configurationAttributes);
} catch (Exception ex) {
log.error(ex.getMessage(), ex);
saveScriptError(customScriptConfiguration.getCustomScript(), ex);
}
}
public void executeExternalOnAfterDestroyMethod(PersistenceExternalContext context) {
executeExternalOnAfterDestroyMethod(this.defaultExternalCustomScript, context);
}
public void executeExternalOnAfterDestroyMethod(CustomScriptConfiguration customScriptConfiguration, PersistenceExternalContext context) {
try {
log.debug("Executing python 'onAfterDestroy' method");
PersistenceType persistenceType = (PersistenceType) customScriptConfiguration.getExternalType();
Map<String, SimpleCustomProperty> configurationAttributes = customScriptConfiguration.getConfigurationAttributes();
persistenceType.onAfterDestroy(context, configurationAttributes);
} catch (Exception ex) {
log.error(ex.getMessage(), ex);
saveScriptError(customScriptConfiguration.getCustomScript(), ex);
}
}
}
| oxService/src/main/java/org/gluu/service/external/ExternalPersistenceExtensionService.java | /*
* oxCore is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2020, Gluu
*/
package org.gluu.service.external;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import org.gluu.model.SimpleCustomProperty;
import org.gluu.model.custom.script.CustomScriptType;
import org.gluu.model.custom.script.conf.CustomScriptConfiguration;
import org.gluu.model.custom.script.type.persistence.PersistenceType;
import org.gluu.persist.PersistenceEntryManager;
import org.gluu.persist.exception.extension.PersistenceExtension;
import org.gluu.service.custom.script.ExternalScriptService;
import org.gluu.service.external.context.PersistenceExternalContext;
/**
* Provides factory methods needed to create persistence extension
*
* @author Yuriy Movchan Date: 06/04/2020
*/
@ApplicationScoped
public class ExternalPersistenceExtensionService extends ExternalScriptService {
private static final long serialVersionUID = 5466361778036208685L;
@Inject
private Instance<PersistenceEntryManager> persistenceEntryManagerInstance;
@Inject
private Instance<List<PersistenceEntryManager>> persistenceEntryManagerListInstance;
public ExternalPersistenceExtensionService() {
super(CustomScriptType.PERSISTENCE_EXTENSION);
}
@Override
protected void reloadExternal() {
PersistenceExtension persistenceExtension = null;
if (isEnabled()) {
persistenceExtension = (PersistenceExtension) this.defaultExternalCustomScript.getExternalType();
}
for (Iterator<PersistenceEntryManager> it = persistenceEntryManagerInstance.iterator(); it.hasNext();) {
PersistenceEntryManager persistenceEntryManager = it.next();
persistenceEntryManager.setPersistenceExtension(persistenceExtension);
}
for (Iterator<List<PersistenceEntryManager>> it = persistenceEntryManagerListInstance.iterator(); it.hasNext();) {
List<PersistenceEntryManager> persistenceEntryManagerList = it.next();
for (PersistenceEntryManager persistenceEntryManager: persistenceEntryManagerList) {
persistenceEntryManager.setPersistenceExtension(persistenceExtension);
}
}
}
public void setPersistenceExtension(PersistenceEntryManager persistenceEntryManager) {
PersistenceExtension persistenceExtension = null;
if (isEnabled()) {
persistenceExtension = (PersistenceExtension) this.defaultExternalCustomScript.getExternalType();
}
persistenceEntryManager.setPersistenceExtension(persistenceExtension);
}
public void executeExternalOnAfterCreateMethod(PersistenceExternalContext context) {
executeExternalOnAfterCreateMethod(this.defaultExternalCustomScript, context);
}
public void executeExternalOnAfterCreateMethod(CustomScriptConfiguration customScriptConfiguration, PersistenceExternalContext context) {
try {
log.debug("Executing python 'onAfterCreate' method");
PersistenceType persistenceType = (PersistenceType) customScriptConfiguration.getExternalType();
Map<String, SimpleCustomProperty> configurationAttributes = customScriptConfiguration.getConfigurationAttributes();
persistenceType.onAfterCreate(context, configurationAttributes);
} catch (Exception ex) {
log.error(ex.getMessage(), ex);
saveScriptError(customScriptConfiguration.getCustomScript(), ex);
}
}
public void executeExternalOnAfterDestroyMethod(PersistenceExternalContext context) {
executeExternalOnAfterDestroyMethod(this.defaultExternalCustomScript, context);
}
public void executeExternalOnAfterDestroyMethod(CustomScriptConfiguration customScriptConfiguration, PersistenceExternalContext context) {
try {
log.debug("Executing python 'onAfterDestroy' method");
PersistenceType persistenceType = (PersistenceType) customScriptConfiguration.getExternalType();
Map<String, SimpleCustomProperty> configurationAttributes = customScriptConfiguration.getConfigurationAttributes();
persistenceType.onAfterDestroy(context, configurationAttributes);
} catch (Exception ex) {
log.error(ex.getMessage(), ex);
saveScriptError(customScriptConfiguration.getCustomScript(), ex);
}
}
}
| Allow to reuse after create/destroy methods in persistence script | oxService/src/main/java/org/gluu/service/external/ExternalPersistenceExtensionService.java | Allow to reuse after create/destroy methods in persistence script |
|
Java | mit | a4717e67d8260556b014aa9de36c293489d8e758 | 0 | peichhorn/tinyaudioplayer | /*
* Copyright 2011 Philipp Eichhorn.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package de.fips.plugin.tinyaudioplayer.audio;
import static de.fips.plugin.tinyaudioplayer.audio.PlaylistItemTag.playlistItemTag;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.UnsupportedAudioFileException;
import lombok.NoArgsConstructor;
import lombok.VisibleForTesting;
import org.tritonus.share.sampled.file.TAudioFileFormat;
@NoArgsConstructor()
public class PlaylistItemTagFactory {
private final static Map<String, IPlaylistItemTagBuilder> builder = new HashMap<String, IPlaylistItemTagBuilder>();
static {
builder.put("mp3", new MpegPlaylistItemTagBuilder());
builder.put("ogg", new OggPlaylistItemTagBuilder());
builder.put("flac", new FlacPlaylistItemTagBuilder());
}
public PlaylistItemTag formURI(final URI uri) {
PlaylistItemTag tag = null;
try {
AudioFileFormat aff = AudioSystem.getAudioFileFormat(new File(uri));
final String type = aff.getType().toString().toLowerCase();
final IPlaylistItemTagBuilder tagBuilder = builder.get(type);
if (tagBuilder != null) {
tag = tagBuilder.fromAudioFileFormat(aff);
}
} catch (IllegalArgumentException ignore) {
// File(URI) preconditions did not hold
} catch (UnsupportedAudioFileException ignore) {
} catch (IOException ignore) {
}
return tag;
}
@VisibleForTesting static class OggPlaylistItemTagBuilder implements IPlaylistItemTagBuilder {
@Override
public PlaylistItemTag fromAudioFileFormat(final AudioFileFormat aff) {
final PlaylistItemTag.$OptionalDef builder = playlistItemTag();
if (aff instanceof TAudioFileFormat) {
final Map<?, ?> props = ((TAudioFileFormat) aff).properties();
Object currentValue = props.get("ogg.channels");
if (currentValue != null) {
builder.channels((Integer) currentValue);
}
currentValue = props.get("ogg.frequency.hz");
if (currentValue != null) {
builder.samplingRate((Integer) currentValue);
}
currentValue = props.get("ogg.bitrate.nominal.bps");
if (currentValue != null) {
builder.bitRate((Integer) currentValue);
}
currentValue = props.get("title");
if (currentValue != null) {
builder.title((String) currentValue);
}
currentValue = props.get("author");
if (currentValue != null) {
builder.artist((String) currentValue);
}
currentValue = props.get("album");
if (currentValue != null) {
builder.album((String) currentValue);
}
currentValue = props.get("year");
if (currentValue != null) {
builder.year((String) currentValue);
}
currentValue = props.get("duration");
if (currentValue != null) {
builder.playTime(((Long) currentValue) / 1000000L);
}
currentValue = props.get("ogg.comment.genre");
if (currentValue != null) {
builder.genre((String) currentValue);
}
currentValue = props.get("ogg.comment.track");
if (currentValue != null) {
try {
builder.track(new Integer((String) currentValue));
} catch (NumberFormatException ignore) {
}
}
}
return builder.build();
}
}
@VisibleForTesting static class MpegPlaylistItemTagBuilder implements IPlaylistItemTagBuilder {
@Override
public PlaylistItemTag fromAudioFileFormat(final AudioFileFormat aff) {
final PlaylistItemTag.$OptionalDef builder = playlistItemTag();
if (aff instanceof TAudioFileFormat) {
final Map<?, ?> props = ((TAudioFileFormat) aff).properties();
Object currentValue = props.get("mp3.channels");
if (currentValue != null) {
builder.channels((Integer) currentValue);
}
currentValue = props.get("mp3.frequency.hz");
if (currentValue != null) {
builder.samplingRate((Integer) currentValue);
}
currentValue = props.get("mp3.bitrate.nominal.bps");
if (currentValue != null) {
builder.bitRate((Integer) currentValue);
}
currentValue = props.get("title");
if (currentValue != null) {
builder.title((String) currentValue);
}
currentValue = props.get("author");
if (currentValue != null) {
builder.artist((String) currentValue);
}
currentValue = props.get("album");
if (currentValue != null) {
builder.album((String) currentValue);
}
currentValue = props.get("year");
if (currentValue != null) {
builder.year((String) currentValue);
}
currentValue = props.get("duration");
if (currentValue != null) {
builder.playTime(((Long) currentValue) / 1000000L);
}
currentValue = props.get("mp3.id3tag.genre");
if (currentValue != null) {
final StringBuilder genre = new StringBuilder();
final Pattern pattern = Pattern.compile("[^()]+");
final Matcher matcher = pattern.matcher((String) currentValue);
while (matcher.find()) {
final String id3tagGenre = matcher.group();
try {
int index = Integer.valueOf(id3tagGenre);
genre.append(index < ID3TAG_GENRES.length ? ID3TAG_GENRES[index] : "Custom");
} catch (NumberFormatException e) {
genre.append(id3tagGenre);
}
}
builder.genre(genre.toString());
}
currentValue = props.get("mp3.id3tag.track");
if (currentValue != null) {
try {
builder.track(Integer.valueOf((String) currentValue));
} catch (NumberFormatException ignore) {
}
}
}
return builder.build();
}
}
private static class FlacPlaylistItemTagBuilder implements IPlaylistItemTagBuilder {
@Override
public PlaylistItemTag fromAudioFileFormat(final AudioFileFormat aff) {
final AudioFormat af = aff.getFormat();
return playlistItemTag() //
.channels(af.getChannels()) //
.samplingRate((int) af.getSampleRate()) //
.bitRate(af.getSampleSizeInBits()).build();
}
}
private static interface IPlaylistItemTagBuilder {
public PlaylistItemTag fromAudioFileFormat(final AudioFileFormat aff);
}
// http://www.id3.org/id3v2.3.0#head-129376727ebe5309c1de1888987d070288d7c7e7
private final static String[] ID3TAG_GENRES = new String[] { "Blues", "Classic Rock", "Country", "Dance", "Disco", "Funk", "Grunge", "Hip-Hop", "Jazz", "Metal", "New Age",
"Oldies", "Other", "Pop", "R&B", "Rap", "Reggae", "Rock", "Techno", "Industrial", "Alternative", "Ska", "Death Metal", "Pranks", "Soundtrack", "Euro-Techno",
"Ambient", "Trip-Hop", "Vocal", "Jazz+Funk", "Fusion", "Trance", "Classical", "Instrumental", "Acid", "House", "Game", "Sound Clip", "Gospel", "Noise", "AlternRock",
"Bass", "Soul", "Punk", "Space", "Meditative", "Instrumental Pop", "Instrumental Rock", "Ethnic", "Gothic", "Darkwave", "Techno-Industrial", "Electronic", "Pop-Folk",
"Eurodance", "Dream", "Southern Rock", "Comedy", "Cult", "Gangsta", "Top 40", "Christian Rap", "Pop/Funk", "Jungle", "Native American", "Cabaret", "New Wave",
"Psychadelic", "Rave", "Showtunes", "Trailer", "Lo-Fi", "Tribal", "Acid Punk", "Acid Jazz", "Polka", "Retro", "Musical", "Rock & Roll", "Hard Rock",
// These were made up by the authors of Winamp but backported into the ID3 spec
"Folk", "Folk-Rock", "National Folk", "Swing", "Fast Fusion", "Bebob", "Latin", "Revival", "Celtic", "Bluegrass", "Avantgarde", "Gothic Rock", "Progressive Rock",
"Psychedelic Rock", "Symphonic Rock", "Slow Rock", "Big Band", "Chorus", "Easy Listening", "Acoustic", "Humour", "Speech", "Chanson", "Opera", "Chamber Music",
"Sonata", "Symphony", "Booty Bass", "Primus", "Porn Groove", "Satire", "Slow Jam", "Club", "Tango", "Samba", "Folklore", "Ballad", "Power Ballad", "Rhythmic Soul",
"Freestyle", "Duet", "Punk Rock", "Drum Solo", "A capella", "Euro-House", "Dance Hall",
// These were also invented by the Winamp folks but ignored by the ID3 authors.
"Goa", "Drum & Bass", "Club-House", "Hardcore", "Terror", "Indie", "BritPop", "Negerpunk", "Polsk Punk", "Beat", "Christian Gangsta Rap", "Heavy Metal", "Black Metal",
"Crossover", "Contemporary Christian", "Christian Rock", "Merengue", "Salsa", "Thrash Metal", "Anime", "Jpop", "Synthpop" };
}
| src/main/de/fips/plugin/tinyaudioplayer/audio/PlaylistItemTagFactory.java | /*
* Copyright 2011 Philipp Eichhorn.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package de.fips.plugin.tinyaudioplayer.audio;
import static de.fips.plugin.tinyaudioplayer.audio.PlaylistItemTag.playlistItemTag;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.UnsupportedAudioFileException;
import lombok.NoArgsConstructor;
import org.tritonus.share.sampled.file.TAudioFileFormat;
@NoArgsConstructor()
public class PlaylistItemTagFactory {
private final static Map<String, IPlaylistItemTagBuilder> builder = new HashMap<String, IPlaylistItemTagBuilder>();
static {
builder.put("mp3", new MpegPlaylistItemTagBuilder());
builder.put("ogg", new OggPlaylistItemTagBuilder());
builder.put("flac", new FlacPlaylistItemTagBuilder());
}
public PlaylistItemTag formURI(final URI uri) {
PlaylistItemTag tag = null;
try {
AudioFileFormat aff = AudioSystem.getAudioFileFormat(new File(uri));
final String type = aff.getType().toString().toLowerCase();
final IPlaylistItemTagBuilder tagBuilder = builder.get(type);
if (tagBuilder != null) {
tag = tagBuilder.fromAudioFileFormat(aff);
}
} catch (IllegalArgumentException ignore) {
// File(URI) preconditions did not hold
} catch (UnsupportedAudioFileException ignore) {
} catch (IOException ignore) {
}
return tag;
}
private static class OggPlaylistItemTagBuilder implements IPlaylistItemTagBuilder {
@Override
public PlaylistItemTag fromAudioFileFormat(final AudioFileFormat aff) {
final PlaylistItemTag.$OptionalDef builder = playlistItemTag();
if (aff instanceof TAudioFileFormat) {
final Map<?, ?> props = ((TAudioFileFormat) aff).properties();
Object currentValue = props.get("ogg.channels");
if (currentValue != null) {
builder.channels((Integer) currentValue);
}
currentValue = props.get("ogg.frequency.hz");
if (currentValue != null) {
builder.samplingRate((Integer) currentValue);
}
currentValue = props.get("ogg.bitrate.nominal.bps");
if (currentValue != null) {
builder.bitRate((Integer) currentValue);
}
currentValue = props.get("title");
if (currentValue != null) {
builder.title((String) currentValue);
}
currentValue = props.get("author");
if (currentValue != null) {
builder.artist((String) currentValue);
}
currentValue = props.get("album");
if (currentValue != null) {
builder.album((String) currentValue);
}
currentValue = props.get("year");
if (currentValue != null) {
builder.year((String) currentValue);
}
currentValue = props.get("duration");
if (currentValue != null) {
builder.playTime(((Long) currentValue) / 1000000L);
}
currentValue = props.get("ogg.comment.genre");
if (currentValue != null) {
builder.genre((String) currentValue);
}
currentValue = props.get("ogg.comment.track");
if (currentValue != null) {
try {
builder.track(new Integer((String) currentValue));
} catch (NumberFormatException ignore) {
}
}
}
return builder.build();
}
}
private static class MpegPlaylistItemTagBuilder implements IPlaylistItemTagBuilder {
@Override
public PlaylistItemTag fromAudioFileFormat(final AudioFileFormat aff) {
final PlaylistItemTag.$OptionalDef builder = playlistItemTag();
if (aff instanceof TAudioFileFormat) {
final Map<?, ?> props = ((TAudioFileFormat) aff).properties();
Object currentValue = props.get("mp3.channels");
if (currentValue != null) {
builder.channels((Integer) currentValue);
}
currentValue = props.get("mp3.frequency.hz");
if (currentValue != null) {
builder.samplingRate((Integer) currentValue);
}
currentValue = props.get("mp3.bitrate.nominal.bps");
if (currentValue != null) {
builder.bitRate((Integer) currentValue);
}
currentValue = props.get("title");
if (currentValue != null) {
builder.title((String) currentValue);
}
currentValue = props.get("author");
if (currentValue != null) {
builder.artist((String) currentValue);
}
currentValue = props.get("album");
if (currentValue != null) {
builder.album((String) currentValue);
}
currentValue = props.get("year");
if (currentValue != null) {
builder.year((String) currentValue);
}
currentValue = props.get("duration");
if (currentValue != null) {
builder.playTime(((Long) currentValue) / 1000000L);
}
currentValue = props.get("mp3.id3tag.genre");
if (currentValue != null) {
final StringBuilder genre = new StringBuilder();
final Pattern pattern = Pattern.compile("[^()]+");
final Matcher matcher = pattern.matcher((String) currentValue);
while (matcher.find()) {
final String id3tagGenre = matcher.group();
try {
genre.append(ID3TAG_GENRES[Integer.valueOf(id3tagGenre)]);
} catch (NumberFormatException e) {
genre.append(id3tagGenre);
}
}
builder.genre(genre.toString());
}
currentValue = props.get("mp3.id3tag.track");
if (currentValue != null) {
try {
builder.track(Integer.valueOf((String) currentValue));
} catch (NumberFormatException ignore) {
}
}
}
return builder.build();
}
}
private static class FlacPlaylistItemTagBuilder implements IPlaylistItemTagBuilder {
@Override
public PlaylistItemTag fromAudioFileFormat(final AudioFileFormat aff) {
final AudioFormat af = aff.getFormat();
return playlistItemTag() //
.channels(af.getChannels()) //
.samplingRate((int) af.getSampleRate()) //
.bitRate(af.getSampleSizeInBits()).build();
}
}
private static interface IPlaylistItemTagBuilder {
public PlaylistItemTag fromAudioFileFormat(final AudioFileFormat aff);
}
// http://www.id3.org/id3v2.3.0#head-129376727ebe5309c1de1888987d070288d7c7e7
private final static String[] ID3TAG_GENRES = new String[] { "Blues", "Classic Rock", "Country", "Dance", "Disco", "Funk", "Grunge", "Hip-Hop", "Jazz", "Metal", "New Age",
"Oldies", "Other", "Pop", "R&B", "Rap", "Reggae", "Rock", "Techno", "Industrial", "Alternative", "Ska", "Death Metal", "Pranks", "Soundtrack", "Euro-Techno",
"Ambient", "Trip-Hop", "Vocal", "Jazz+Funk", "Fusion", "Trance", "Classical", "Instrumental", "Acid", "House", "Game", "Sound Clip", "Gospel", "Noise", "AlternRock",
"Bass", "Soul", "Punk", "Space", "Meditative", "Instrumental Pop", "Instrumental Rock", "Ethnic", "Gothic", "Darkwave", "Techno-Industrial", "Electronic", "Pop-Folk",
"Eurodance", "Dream", "Southern Rock", "Comedy", "Cult", "Gangsta", "Top 40", "Christian Rap", "Pop/Funk", "Jungle", "Native American", "Cabaret", "New Wave",
"Psychadelic", "Rave", "Showtunes", "Trailer", "Lo-Fi", "Tribal", "Acid Punk", "Acid Jazz", "Polka", "Retro", "Musical", "Rock & Roll", "Hard Rock",
// These were made up by the authors of Winamp but backported into the ID3 spec
"Folk", "Folk-Rock", "National Folk", "Swing", "Fast Fusion", "Bebob", "Latin", "Revival", "Celtic", "Bluegrass", "Avantgarde", "Gothic Rock", "Progressive Rock",
"Psychedelic Rock", "Symphonic Rock", "Slow Rock", "Big Band", "Chorus", "Easy Listening", "Acoustic", "Humour", "Speech", "Chanson", "Opera", "Chamber Music",
"Sonata", "Symphony", "Booty Bass", "Primus", "Porn Groove", "Satire", "Slow Jam", "Club", "Tango", "Samba", "Folklore", "Ballad", "Power Ballad", "Rhythmic Soul",
"Freestyle", "Duet", "Punk Rock", "Drum Solo", "A capella", "Euro-House", "Dance Hall",
// These were also invented by the Winamp folks but ignored by the ID3 authors.
"Goa", "Drum & Bass", "Club-House", "Hardcore", "Terror", "Indie", "BritPop", "Negerpunk", "Polsk Punk", "Beat", "Christian Gangsta Rap", "Heavy Metal", "Black Metal",
"Crossover", "Contemporary Christian", "Christian Rock", "Merengue", "Salsa", "Thrash Metal", "Anime", "Jpop", "Synthpop" };
}
| cleanup
| src/main/de/fips/plugin/tinyaudioplayer/audio/PlaylistItemTagFactory.java | cleanup |
|
Java | agpl-3.0 | 0b40ea851e9e1b1787ecfdf7a8fe33d4e0709e1f | 0 | jtricker/IRIS,ssasikumar86/IRIS,andrewmcguinness/IRIS,andrewmcguinness/IRIS,asooben/IRIS,ritumalhotra8/IRIS,asooben/IRIS,ssasikumar86/IRIS,ritumalhotra8/IRIS,jtricker/IRIS,ritumalhotra8/IRIS,sathishchet/IRIS,ssethupathi/IRIS,temenostech/IRIS,ssethupathi/IRIS,MohamedNazir/IRIS,sathishchet/IRIS,MohamedNazir/IRIS,junejosheeraz/IRIS,temenostech/IRIS,asooben/IRIS,junejosheeraz/IRIS,ssethupathi/IRIS,temenostech/IRIS,ssasikumar86/IRIS,andrewmcguinness/IRIS,sathishchet/IRIS,MohamedNazir/IRIS,junejosheeraz/IRIS,jtricker/IRIS | package com.temenos.interaction.springdsl;
/*
* #%L
* interaction-springdsl
* %%
* Copyright (C) 2012 - 2014 Temenos Holdings N.V.
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
import java.io.File;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.context.support.FileSystemXmlApplicationContext;
import com.temenos.interaction.core.hypermedia.Event;
import com.temenos.interaction.core.hypermedia.MethodNotAllowedException;
import com.temenos.interaction.core.hypermedia.PathTree;
import com.temenos.interaction.core.hypermedia.ResourceState;
import com.temenos.interaction.core.hypermedia.ResourceStateProvider;
import com.temenos.interaction.core.resource.ConfigLoader;
public class SpringDSLResourceStateProvider implements ResourceStateProvider, DynamicRegistrationResourceStateProvider {
private final Logger logger = LoggerFactory.getLogger(SpringDSLResourceStateProvider.class);
private ConcurrentMap<String, ResourceState> resources = new ConcurrentHashMap<String, ResourceState>();
protected StateRegisteration stateRegisteration;
private ConfigLoader configLoader = new ConfigLoader();
/**
* Map of ResourceState bean names, to paths.
*/
protected Properties beanMap;
protected boolean initialised = false;
/**
* Map of paths to state names
*/
protected Map<String, Set<String>> resourceStatesByPath = new HashMap<String, Set<String>>();
/**
* Map of request to state names
*/
protected Map<String, String> resourceStatesByRequest = new HashMap<String, String>();
/**
* Map of resource methods where state name is the key
*/
protected Map<String, Set<String>> resourceMethodsByState = new HashMap<String, Set<String>>();
/**
* Map to a resource path where the state name is the key
*/
protected Map<String, String> resourcePathsByState = new HashMap<String, String>();
PathTree pathTree = new PathTree();
public SpringDSLResourceStateProvider() {}
public SpringDSLResourceStateProvider(Properties beanMap) {
this.beanMap = beanMap;
}
public void setResourceMap(Properties beanMap) {
this.beanMap = beanMap;
}
@Autowired(required = false)
public void setConfigLoader(ConfigLoader configLoader) {
this.configLoader = configLoader;
}
protected void initialise() {
if (initialised)
return;
for (Object stateObj : beanMap.keySet()) {
storeState(stateObj, null);
}
initialised = true;
}
protected void storeState(Object stateObj, String binding) {
String stateName = stateObj.toString();
// binding is [GET,PUT /thePath]
if (binding == null){
binding = beanMap.getProperty(stateName);
}
// split into methods and path
String[] strs = binding.split(" ");
String methodPart = strs[0];
String path = strs[1];
// methods
String[] methodsStrs = methodPart.split(",");
// path
resourcePathsByState.put(stateName, path);
// methods
Set<String> methodSet = resourceMethodsByState.get(stateName);
if (methodSet == null) {
methodSet = new HashSet<String>();
}
for(String methodStr: methodsStrs) {
methodSet.add(methodStr);
pathTree.put(path, methodStr, stateName);
}
resourceMethodsByState.put(stateName, methodSet);
for (String method : methodSet) {
String request = method + " " + path;
logger.debug("Binding ["+stateName+"] to ["+request+"]");
String found = resourceStatesByRequest.get(request);
if (found != null) {
logger.error("Multiple states bound to the same request ["+request+"], overriding ["+found+"] with ["+stateName+"]");
}
resourceStatesByRequest.put(request, stateName);
}
Set<String> stateNames = resourceStatesByPath.get(path);
if (stateNames == null) {
stateNames = new HashSet<String>();
}
stateNames.add(stateName);
resourceStatesByPath.put(path, stateNames);
}
public void addState(String stateObj, Properties properties) {
if (initialised) {
String stateName = stateObj.toString();
// binding is [GET,PUT /thePath]
String binding = properties.getProperty(stateName);
// split into methods and path
String[] strs = binding.split(" ");
String methodPart = strs[0];
String path = strs[1];
// methods
String[] methods = methodPart.split(",");
logger.info("Attempting to register state: " + stateName + " methods: " + methods + " path: " + path);
// preemptive loading
ResourceState state = getResourceState(stateName);
if (state != null){
storeState(stateName, binding);
Set<String> methodSet = new HashSet<String>();
for(String methodStr: methods) {
methodSet.add(methodStr);
}
}
}
}
public void unload(String name) {
resources.remove(name);
}
@Override
public boolean isLoaded(String name) {
return resources.containsKey(name);
}
@Override
public ResourceState getResourceState(String resourceStateName) {
ResourceState result = null;
try {
if (resourceStateName != null) {
// Try to retrieve the resource state
result = resources.get(resourceStateName);
if (result == null) {
// Resource state has not already been loaded so attempt to load it
ResourceStateLoad newState = new ResourceStateLoad(resourceStateName);
newState.load();
if ( newState.isLoaded() ) {
result = newState.loaded();
} else {
logger.error( newState.toString() );
}
}
}
} catch (BeansException e) {
logger.error("Failed to load ["+resourceStateName+"]", e);
}
return result;
}
@Override
public ResourceState determineState(Event event, String resourcePath) {
initialise();
String request = event.getMethod() + " " + resourcePath;
String stateName = resourceStatesByRequest.get(request);
if (stateName != null){
logger.debug("Found state ["+stateName+"] for ["+request+"]");
return getResourceState(stateName);
}else{
logger.warn("NOT Found state ["+stateName+"] for ["+request+"]");
return null;
}
}
@Override
public Map<String, Set<String>> getResourceStatesByPath() {
initialise();
return resourceStatesByPath;
}
public Map<String, Set<String>> getResourceMethodsByState() {
initialise();
return resourceMethodsByState;
}
public Map<String, String> getResourcePathsByState() {
initialise();
return resourcePathsByState;
}
protected Map<String, Set<String>> getResourceStatesByPath(Properties beanMap) {
initialise();
return resourceStatesByPath;
}
@Override
public void setStateRegisteration(StateRegisteration registerState) {
this.stateRegisteration = registerState;
}
/** Load a Resource State from the appropriate location (file or classpath).
* There are likely to be several possibilities for where the requested
* resource could be: this tracks all of them so that they can be logged
* if the resource is not found.
*/
private class ResourceStateLoad {
private String state;
private List<String> attempts = new ArrayList<String>(2);
private String foundFile;
private ResourceState result;
/** Define a resource state to load. Must call load() to actually load it. */
public ResourceStateLoad( String resourceStateName ) {
state = resourceStateName;
}
/** Was the load operation successful?
*/
public boolean isLoaded() {
return ( result != null );
}
/** Get the Resource State from a successful load
*/
public ResourceState loaded() {
return result;
}
/** Description of the state of this load, intended for logging
*/
public String toString() {
if ( isLoaded() ) {
return "Loaded Resource State " + state + " from " + foundFile;
} else if (attempts.size()==0) {
return "Not-loaded Resource State " + state;
} else if (foundFile != null ) {
return "State " + state + " not found in " + foundFile;
}
StringBuilder msg = new StringBuilder( "Failed to load resource state " );
msg.append( state );
msg.append( ". Attempted to load from " );
for ( int i = 0 ; i < attempts.size() ; ++i ) {
if ( i > 0 ) msg.append(", ");
msg.append("[");
msg.append(attempts.get(i));
msg.append("]");
}
return msg.toString();
}
/** Load the configured resource state.
* Use this method only once.
* call isLoaded() to discover success or failure
*/
public void load() {
// check that this has not been called before
if ( attempts.size() > 0 )
throw new IllegalStateException( "repeated call to load()" );
String tmpResourceStateName = state;
String tmpResourceName = tmpResourceStateName;
if(tmpResourceName.contains("-")) {
tmpResourceName = tmpResourceName.substring(0, tmpResourceName.indexOf("-"));
}
String beanXml = "IRIS-" + tmpResourceName + "-PRD.xml";
// Attempt to create Spring context based on current resource filename pattern
ApplicationContext context = createApplicationContext(beanXml);
if (context == null) {
// Failed to create Spring context using current resource filename pattern so use old pattern
int pos = tmpResourceName.lastIndexOf("_");
if (pos > 3){
tmpResourceName = tmpResourceName.substring(0, pos);
beanXml = "IRIS-" + tmpResourceName + "-PRD.xml";
context = createApplicationContext(beanXml);
if (context != null) {
// Successfully created Spring context using old resource filename pattern
// Convert resource state name to old resource name format
pos = tmpResourceStateName.lastIndexOf("-");
if (pos < 0){
pos = tmpResourceStateName.lastIndexOf("_");
if (pos > 0){
tmpResourceStateName = tmpResourceStateName.substring(0, pos) + "-" + tmpResourceStateName.substring(pos+1);
}
}
}
}
}
if(context != null) {
result = loadAllResourceStatesFromFile(context, tmpResourceStateName);
}
}
private ResourceState loadAllResourceStatesFromFile(ApplicationContext context, String resourceState) {
Map<String,ResourceState> tmpResources = context.getBeansOfType(ResourceState.class);
// Save all the loaded resources into the main resource state cache
resources.putAll(tmpResources);
ResourceState result = null;
if(tmpResources.containsKey(resourceState)) {
result = tmpResources.get(resourceState);
}
return result;
}
/**
* @param beanXml the filename to locate
* @return a Spring ApplicationContext
*/
private ApplicationContext createApplicationContext(String beanXml) {
ApplicationContext result = null;
String irisResourceDirPath = configLoader.getIrisConfigDirPath();
if(irisResourceDirPath == null) {
// Try and load the resource from the classpath
String description = "classpath:" + beanXml;
attempts.add(description);
result = new ClassPathXmlApplicationContext(new String[] {beanXml});
if ( result != null ) foundFile = description;
} else {
// Try and load the resource from the file system as a resource directory has been specified
File irisResourceDir = new File(irisResourceDirPath);
if(irisResourceDir.exists() && irisResourceDir.isDirectory()) {
File file = new File(irisResourceDir, beanXml);
String path = "";
try {
path = file.toURL().toString();
} catch (MalformedURLException e) {
logger.error("Failed to load IRIS PRD file: " + file.getAbsolutePath(), e);
}
attempts.add(path);
if(file.exists()) {
// Only attempt to create an application context if the file exists
foundFile = path;
result = new FileSystemXmlApplicationContext( new String[] { path });
}
} else {
logger.error("The path for IRIS generated files directory (parameter 'com.temenos.interaction.config')," +
" is pointing to a file or doesn't exists: " + irisResourceDir.getAbsolutePath());
}
}
return result;
}
}
@Override
public ResourceState getResourceState(String httpMethod, String url) throws MethodNotAllowedException {
String resourceStateId = getResourceStateId(httpMethod, url);
if(resourceStateId == null) {
if(pathTree.get(url) != null) {
Set<String> allowedMethods = pathTree.get(url).keySet();
throw new MethodNotAllowedException(allowedMethods);
} else {
return null;
}
}
return getResourceState(resourceStateId);
}
public String getResourceStateId(String httpMethod, String url) throws MethodNotAllowedException {
Map<String,String> methodToState = null;
initialise();
methodToState = pathTree.get(url);
String resourceStateId = null;
if(methodToState != null) {
resourceStateId = methodToState.get(httpMethod);
if(resourceStateId == null) {
if(pathTree.get(url) != null) {
Set<String> allowedMethods = pathTree.get(url).keySet();
throw new MethodNotAllowedException(allowedMethods);
}
}
} else {
return null;
}
return resourceStateId;
}
} | interaction-dsl/interaction-springdsl/src/main/java/com/temenos/interaction/springdsl/SpringDSLResourceStateProvider.java | package com.temenos.interaction.springdsl;
/*
* #%L
* interaction-springdsl
* %%
* Copyright (C) 2012 - 2014 Temenos Holdings N.V.
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
import java.io.File;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.context.support.FileSystemXmlApplicationContext;
import com.temenos.interaction.core.hypermedia.Event;
import com.temenos.interaction.core.hypermedia.MethodNotAllowedException;
import com.temenos.interaction.core.hypermedia.PathTree;
import com.temenos.interaction.core.hypermedia.ResourceState;
import com.temenos.interaction.core.hypermedia.ResourceStateProvider;
import com.temenos.interaction.core.resource.ConfigLoader;
public class SpringDSLResourceStateProvider implements ResourceStateProvider, DynamicRegistrationResourceStateProvider {
private final Logger logger = LoggerFactory.getLogger(SpringDSLResourceStateProvider.class);
private ConcurrentMap<String, ResourceState> resources = new ConcurrentHashMap<String, ResourceState>();
protected StateRegisteration stateRegisteration;
private ConfigLoader configLoader = new ConfigLoader();
/**
* Map of ResourceState bean names, to paths.
*/
protected Properties beanMap;
protected boolean initialised = false;
/**
* Map of paths to state names
*/
protected Map<String, Set<String>> resourceStatesByPath = new HashMap<String, Set<String>>();
/**
* Map of request to state names
*/
protected Map<String, String> resourceStatesByRequest = new HashMap<String, String>();
/**
* Map of resource methods where state name is the key
*/
protected Map<String, Set<String>> resourceMethodsByState = new HashMap<String, Set<String>>();
/**
* Map to a resource path where the state name is the key
*/
protected Map<String, String> resourcePathsByState = new HashMap<String, String>();
PathTree pathTree = new PathTree();
public SpringDSLResourceStateProvider() {}
public SpringDSLResourceStateProvider(Properties beanMap) {
this.beanMap = beanMap;
}
public void setResourceMap(Properties beanMap) {
this.beanMap = beanMap;
}
@Autowired(required = false)
public void setConfigLoader(ConfigLoader configLoader) {
this.configLoader = configLoader;
}
protected void initialise() {
if (initialised)
return;
for (Object stateObj : beanMap.keySet()) {
storeState(stateObj, null);
}
initialised = true;
}
protected void storeState(Object stateObj, String binding) {
String stateName = stateObj.toString();
// binding is [GET,PUT /thePath]
if (binding == null){
binding = beanMap.getProperty(stateName);
}
// split into methods and path
String[] strs = binding.split(" ");
String methodPart = strs[0];
String path = strs[1];
// methods
String[] methodsStrs = methodPart.split(",");
// path
resourcePathsByState.put(stateName, path);
// methods
Set<String> methodSet = resourceMethodsByState.get(stateName);
if (methodSet == null) {
methodSet = new HashSet<String>();
}
for(String methodStr: methodsStrs) {
methodSet.add(methodStr);
pathTree.put(path, methodStr, stateName);
}
resourceMethodsByState.put(stateName, methodSet);
for (String method : methodSet) {
String request = method + " " + path;
logger.debug("Binding ["+stateName+"] to ["+request+"]");
String found = resourceStatesByRequest.get(request);
if (found != null) {
logger.error("Multiple states bound to the same request ["+request+"], overriding ["+found+"] with ["+stateName+"]");
}
resourceStatesByRequest.put(request, stateName);
}
Set<String> stateNames = resourceStatesByPath.get(path);
if (stateNames == null) {
stateNames = new HashSet<String>();
}
stateNames.add(stateName);
resourceStatesByPath.put(path, stateNames);
}
public void addState(String stateObj, Properties properties) {
if (initialised) {
String stateName = stateObj.toString();
// binding is [GET,PUT /thePath]
String binding = properties.getProperty(stateName);
// split into methods and path
String[] strs = binding.split(" ");
String methodPart = strs[0];
String path = strs[1];
// methods
String[] methods = methodPart.split(",");
logger.info("Attempting to register state: " + stateName + " methods: " + methods + " path: " + path);
// preemptive loading
ResourceState state = getResourceState(stateName);
if (state != null){
storeState(stateName, binding);
Set<String> methodSet = new HashSet<String>();
for(String methodStr: methods) {
methodSet.add(methodStr);
}
}
}
}
public void unload(String name) {
resources.remove(name);
}
@Override
public boolean isLoaded(String name) {
return resources.containsKey(name);
}
@Override
public ResourceState getResourceState(String resourceStateName) {
ResourceState result = null;
try {
if (resourceStateName != null) {
// Try to retrieve the resource state
result = resources.get(resourceStateName);
if (result == null) {
// Resource state has not already been loaded so attempt to load it
ResourceStateLoad newState = new ResourceStateLoad(resourceStateName);
newState.load();
if ( newState.isLoaded() ) {
result = newState.loaded();
} else {
logger.error( newState.toString() );
}
}
}
} catch (BeansException e) {
logger.error("Failed to load ["+resourceStateName+"]", e);
}
return result;
}
@Override
public ResourceState determineState(Event event, String resourcePath) {
initialise();
String request = event.getMethod() + " " + resourcePath;
String stateName = resourceStatesByRequest.get(request);
if (stateName != null){
logger.debug("Found state ["+stateName+"] for ["+request+"]");
return getResourceState(stateName);
}else{
logger.warn("NOT Found state ["+stateName+"] for ["+request+"]");
return null;
}
}
@Override
public Map<String, Set<String>> getResourceStatesByPath() {
initialise();
return resourceStatesByPath;
}
public Map<String, Set<String>> getResourceMethodsByState() {
initialise();
return resourceMethodsByState;
}
public Map<String, String> getResourcePathsByState() {
initialise();
return resourcePathsByState;
}
protected Map<String, Set<String>> getResourceStatesByPath(Properties beanMap) {
initialise();
return resourceStatesByPath;
}
@Override
public void setStateRegisteration(StateRegisteration registerState) {
this.stateRegisteration = registerState;
}
/** Load a Resource State from the appropriate location (file or classpath).
* There are likely to be several possibilities for where the requested
* resource could be: this tracks all of them so that they can be logged
* if the resource is not found.
*/
private class ResourceStateLoad {
private String state;
private List<String> attempts = new ArrayList<String>(2);
private String foundFile;
private ResourceState result;
/** Define a resource state to load. Must call load() to actually load it. */
public ResourceStateLoad( String resourceStateName ) {
state = resourceStateName;
}
/** Was the load operation successful?
*/
public boolean isLoaded() {
return ( result != null );
}
/** Get the Resource State from a successful load
*/
public ResourceState loaded() {
return result;
}
/** Description of the state of this load, intended for logging
*/
public String toString() {
if ( isLoaded() ) {
return "Loaded Resource State " + state + " from " + foundFile;
} else if (attempts.size()==0) {
return "Not-loaded Resource State " + state;
} else if (foundFile != null ) {
return "State " + state + " not found in " + foundFile;
}
StringBuilder msg = new StringBuilder( "Failed to load resource state " );
msg.append( state );
msg.append( ". Attempted to load from " );
for ( int i = 0 ; i < attempts.size() ; ++i ) {
if ( i > 0 ) msg.append(", ");
msg.append("[");
msg.append(attempts.get(i));
msg.append("]");
}
return msg.toString();
}
/** Load the configured resource state.
* Use this method only once.
* call isLoaded() to discover success or failure
*/
public void load() {
// check that this has not been called before
if ( attempts.size() > 0 )
throw new IllegalStateException( "repeated call to load()" );
String tmpResourceStateName = state;
String tmpResourceName = tmpResourceStateName;
if(tmpResourceName.contains("-")) {
tmpResourceName = tmpResourceName.substring(0, tmpResourceName.indexOf("-"));
}
String beanXml = "IRIS-" + tmpResourceName + "-PRD.xml";
// Attempt to create Spring context based on current resource filename pattern
ApplicationContext context = createApplicationContext(beanXml);
if (context == null) {
// Failed to create Spring context using current resource filename pattern so use old pattern
int pos = tmpResourceName.lastIndexOf("_");
if (pos > 3){
tmpResourceName = tmpResourceName.substring(0, pos);
beanXml = "IRIS-" + tmpResourceName + "-PRD.xml";
context = createApplicationContext(beanXml);
if (context != null) {
// Successfully created Spring context using old resource filename pattern
// Convert resource state name to old resource name format
pos = tmpResourceStateName.lastIndexOf("-");
if (pos < 0){
pos = tmpResourceStateName.lastIndexOf("_");
if (pos > 0){
tmpResourceStateName = tmpResourceStateName.substring(0, pos) + "-" + tmpResourceStateName.substring(pos+1);
}
}
}
}
}
if(context != null) {
result = loadAllResourceStatesFromFile(context, tmpResourceStateName);
}
}
private ResourceState loadAllResourceStatesFromFile(ApplicationContext context, String resourceState) {
Map<String,ResourceState> tmpResources = context.getBeansOfType(ResourceState.class);
// Save all the loaded resources into the main resource state cache
resources.putAll(tmpResources);
ResourceState result = null;
if(tmpResources.containsKey(resourceState)) {
result = tmpResources.get(resourceState);
}
return result;
}
/**
* @param beanXml the filename to locate
* @return a Spring ApplicationContext
*/
private ApplicationContext createApplicationContext(String beanXml) {
ApplicationContext result = null;
String irisResourceDirPath = configLoader.getIrisConfigDirPath();
if(irisResourceDirPath == null) {
// Try and load the resource from the classpath
String description = "classpath:" + beanXml;
attempts.add(description);
result = new ClassPathXmlApplicationContext(new String[] {beanXml});
if ( result != null ) foundFile = description;
} else {
// Try and load the resource from the file system as a resource directory has been specified
File irisResourceDir = new File(irisResourceDirPath);
if(irisResourceDir.exists() && irisResourceDir.isDirectory()) {
File file = new File(irisResourceDir, beanXml);
String path = "";
try {
path = file.toURL().toString();
} catch (MalformedURLException e) {
logger.error("Failed to load IRIS PRD file: " + file.getAbsolutePath(), e);
}
attempts.add(path);
if(file.exists()) {
// Only attempt to create an application context if the file exists
foundFile = path;
result = new FileSystemXmlApplicationContext( new String[] { path });
}
}
}
return result;
}
}
@Override
public ResourceState getResourceState(String httpMethod, String url) throws MethodNotAllowedException {
String resourceStateId = getResourceStateId(httpMethod, url);
if(resourceStateId == null) {
if(pathTree.get(url) != null) {
Set<String> allowedMethods = pathTree.get(url).keySet();
throw new MethodNotAllowedException(allowedMethods);
} else {
return null;
}
}
return getResourceState(resourceStateId);
}
public String getResourceStateId(String httpMethod, String url) throws MethodNotAllowedException {
Map<String,String> methodToState = null;
initialise();
methodToState = pathTree.get(url);
String resourceStateId = null;
if(methodToState != null) {
resourceStateId = methodToState.get(httpMethod);
if(resourceStateId == null) {
if(pathTree.get(url) != null) {
Set<String> allowedMethods = pathTree.get(url).keySet();
throw new MethodNotAllowedException(allowedMethods);
}
}
} else {
return null;
}
return resourceStateId;
}
} | 1851274: Message showing an absolute path for IRIS generated directory
| interaction-dsl/interaction-springdsl/src/main/java/com/temenos/interaction/springdsl/SpringDSLResourceStateProvider.java | 1851274: Message showing an absolute path for IRIS generated directory |
|
Java | lgpl-2.1 | 24aaa4b5aba73cd41bf58413fffd618a18b77200 | 0 | zebrafishmine/intermine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,justincc/intermine,elsiklab/intermine,tomck/intermine,elsiklab/intermine,zebrafishmine/intermine,justincc/intermine,justincc/intermine,elsiklab/intermine,drhee/toxoMine,JoeCarlson/intermine,drhee/toxoMine,justincc/intermine,drhee/toxoMine,JoeCarlson/intermine,tomck/intermine,julie-sullivan/phytomine,Arabidopsis-Information-Portal/intermine,julie-sullivan/phytomine,justincc/intermine,kimrutherford/intermine,JoeCarlson/intermine,JoeCarlson/intermine,zebrafishmine/intermine,tomck/intermine,julie-sullivan/phytomine,zebrafishmine/intermine,kimrutherford/intermine,joshkh/intermine,zebrafishmine/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,drhee/toxoMine,drhee/toxoMine,joshkh/intermine,elsiklab/intermine,zebrafishmine/intermine,elsiklab/intermine,kimrutherford/intermine,JoeCarlson/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,tomck/intermine,tomck/intermine,drhee/toxoMine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,kimrutherford/intermine,kimrutherford/intermine,tomck/intermine,kimrutherford/intermine,JoeCarlson/intermine,elsiklab/intermine,elsiklab/intermine,joshkh/intermine,justincc/intermine,julie-sullivan/phytomine,elsiklab/intermine,kimrutherford/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,justincc/intermine,julie-sullivan/phytomine,justincc/intermine,tomck/intermine,julie-sullivan/phytomine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,zebrafishmine/intermine,joshkh/intermine,JoeCarlson/intermine,JoeCarlson/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,JoeCarlson/intermine,kimrutherford/intermine,julie-sullivan/phytomine,drhee/toxoMine,kimrutherford/intermine,joshkh/intermine,tomck/intermine | package org.flymine.web.results;
/*
* Copyright (C) 2002-2003 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletException;
import org.apache.struts.actions.LookupDispatchAction;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
/**
* Implementation of <strong>LookupDispatchAction</strong>. Changes the
* size of the results displayed.
*
* @author Andrew Varley
*/
public class ChangeResultsSizeAction extends LookupDispatchAction
{
protected static final String DISPLAYABLERESULTS_NAME = "resultsTable";
/**
* Move a column nearer to the bottom of the list of columns. Must
* pass in a parameter "columnAlias" to indicate the column being
* moved.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
*
* @exception ServletException if a servlet error occurs
*/
public ActionForward changePageSize(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws ServletException {
HttpSession session = request.getSession();
DisplayableResults dr = (DisplayableResults) session.getAttribute(DISPLAYABLERESULTS_NAME);
ChangeResultsForm changeResultsForm = (ChangeResultsForm) form;
dr.setPageSize(Integer.parseInt(changeResultsForm.getPageSize()));
// Need to set the start so that we are on the page containing the current start item
dr.setStart((dr.getStart() / dr.getPageSize()) * dr.getPageSize());
return mapping.findForward("results");
}
/**
* Move a column nearer to the bottom of the list of columns. Must
* pass in a parameter "columnAlias" to indicate the column being
* moved.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
*
* @exception ServletException if a servlet error occurs
*/
public ActionForward saveCollection(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws ServletException {
return mapping.findForward("results");
}
/**
* Distributes the actions to the necessary methods, by providing a Map from action to
* the name of a method.
*
* @return a Map
*/
protected Map getKeyMethodMap() {
Map map = new HashMap();
map.put("button.change", "changePageSize");
map.put("button.save", "saveCollection");
return map;
}
}
| intermine/src/java/org/intermine/web/results/ChangeResultsSizeAction.java | package org.flymine.web.results;
/*
* Copyright (C) 2002-2003 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletException;
import org.apache.struts.actions.LookupDispatchAction;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
/**
* Implementation of <strong>LookupDispatchAction</strong>. Changes the
* size of the results displayed.
*
* @author Andrew Varley
*/
public class ChangeResultsSizeAction extends LookupDispatchAction
{
protected static final String DISPLAYABLERESULTS_NAME = "resultsTable";
/**
* Move a column nearer to the bottom of the list of columns. Must
* pass in a parameter "columnAlias" to indicate the column being
* moved.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
*
* @exception ServletException if a servlet error occurs
*/
public ActionForward changePageSize(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws ServletException {
HttpSession session = request.getSession();
DisplayableResults dr = (DisplayableResults) session.getAttribute(DISPLAYABLERESULTS_NAME);
ChangeResultsForm changeResultsForm = (ChangeResultsForm) form;
dr.setPageSize(Integer.parseInt(changeResultsForm.getPageSize()));
// Need to set the start so that we are on the page containing the current start item
dr.setStart((dr.getStart() / dr.getPageSize()) * dr.getPageSize());
return mapping.findForward("results");
}
/**
* Distributes the actions to the necessary methods, by providing a Map from action to
* the name of a method.
*
* @return a Map
*/
protected Map getKeyMethodMap() {
Map map = new HashMap();
map.put("button.change", "changePageSize");
return map;
}
}
| Added saveCollection() method (currently does nothing)
| intermine/src/java/org/intermine/web/results/ChangeResultsSizeAction.java | Added saveCollection() method (currently does nothing) |
|
Java | apache-2.0 | 2493f8bd5a75c7deef2756ff619e380609414e43 | 0 | tpb1908/AndroidProjectsClient,tpb1908/AndroidProjectsClient,tpb1908/AndroidProjectsClient | package com.tpb.projects.project;
import android.content.ClipData;
import android.graphics.drawable.Drawable;
import android.support.v7.widget.CardView;
import android.support.v7.widget.RecyclerView;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.DragEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ProgressBar;
import com.commonsware.cwac.anddown.AndDown;
import com.tpb.projects.R;
import com.tpb.projects.data.Loader;
import com.tpb.projects.data.models.Card;
import com.tpb.projects.data.models.Issue;
import org.sufficientlysecure.htmltextview.HtmlHttpImageGetter;
import org.sufficientlysecure.htmltextview.HtmlTextView;
import java.util.ArrayList;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by theo on 20/12/16.
*/
class CardAdapter extends RecyclerView.Adapter<CardAdapter.CardHolder> {
private static final String TAG = CardAdapter.class.getSimpleName();
private ArrayList<Card> mCards = new ArrayList<>();
private AndDown md = new AndDown();
private ColumnFragment mParent;
CardAdapter(ColumnFragment parent) {
mParent = parent;
}
void setCards(ArrayList<Card> cards) {
mCards = cards;
notifyDataSetChanged();
}
private void addCard(Card card) {
mCards.add(card);
notifyItemInserted(mCards.size());
}
private void addCard(int pos, Card card) {
Log.i(TAG, "addCard: Card being added to " + pos);
mCards.add(pos, card);
notifyItemInserted(pos);
}
private void removeCard(Card card) {
mCards.remove(card);
notifyDataSetChanged();
}
private void moveCard(int oldPos, int newPos) {
final Card card = mCards.get(oldPos);
mCards.remove(oldPos);
mCards.add(newPos, card);
notifyItemMoved(oldPos, newPos);
}
private int indexOf(int cardId) {
for(int i = 0; i < mCards.size(); i++) {
if(mCards.get(i).getId() == cardId) return i;
}
return -1;
}
private ArrayList<Card> getCards() {
return mCards;
}
@Override
public CardHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new CardHolder(LayoutInflater.from(parent.getContext()).inflate(R.layout.viewholder_card, parent, false));
}
@Override
public void onBindViewHolder(CardHolder holder, int position) {
final int pos = holder.getAdapterPosition();
holder.mCardView.setTag(mCards.get(pos).getId());
holder.mCardView.setOnLongClickListener(view -> {
final ClipData data = ClipData.newPlainText("", "");
final View.DragShadowBuilder shadowBuilder = new View.DragShadowBuilder(view);
view.startDrag(data, shadowBuilder, view, 0);
view.setVisibility(View.INVISIBLE);
return true;
});
holder.mCardView.setOnDragListener(new DragListener());
if(mCards.get(pos).requiresLoadingFromIssue()) {
holder.mSpinner.setVisibility(View.VISIBLE);
mParent.loadIssue(new Loader.IssueLoader() {
@Override
public void issueLoaded(Issue issue) {
mCards.get(pos).setRequiresLoadingFromIssue(false);
mCards.get(pos).setNote(issue.getTitle());
holder.mSpinner.setVisibility(View.INVISIBLE);
notifyItemChanged(pos);
}
@Override
public void loadError() {
}
}, mCards.get(pos).getIssueId());
} else {
holder.mMarkDown.setHtml(
md.markdownToHtml(
mCards.get(holder.getAdapterPosition()).getNote()
),
new HtmlHttpImageGetter(holder.mMarkDown)
);
}
}
@Override
public int getItemCount() {
return mCards.size();
}
class CardHolder extends RecyclerView.ViewHolder {
@BindView(R.id.card_markdown) HtmlTextView mMarkDown;
@BindView(R.id.card_issue_progress) ProgressBar mSpinner;
@BindView(R.id.viewholder_card) CardView mCardView;
CardHolder(View view) {
super(view);
ButterKnife.bind(this, view);
}
}
class DragListener implements View.OnDragListener {
boolean isDropped = false;
private DisplayMetrics metrics;
private Drawable selectedBG;
private int accent;
DragListener() {
metrics = new DisplayMetrics();
accent = mParent.getContext().getResources().getColor(R.color.colorAccent);
}
@Override
public boolean onDrag(View view, DragEvent event) {
final int action = event.getAction();
//FIXME When things are moved the tags must be changed
switch(action) {
case DragEvent.ACTION_DRAG_LOCATION:
mParent.getActivity().getWindowManager().getDefaultDisplay().getMetrics(metrics);
if(event.getX() / metrics.widthPixels > 0.85f) {
((ProjectActivity) mParent.getActivity()).dragRight();
} else if(event.getX() / metrics.widthPixels < 0.15f) {
((ProjectActivity) mParent.getActivity()).dragLeft();
}
break;
case DragEvent.ACTION_DROP:
isDropped = true;
int sourcePosition, targetPosition = -1;
final View sourceView = (View) event.getLocalState();
view.setVisibility(View.VISIBLE);
final RecyclerView target;
final RecyclerView source = (RecyclerView) sourceView.getParent();
final CardAdapter sourceAdapter = (CardAdapter) source.getAdapter();
sourcePosition = sourceAdapter.indexOf((int) sourceView.getTag());
final Card card = sourceAdapter.getCards().get(sourcePosition);
if(view.getId() == R.id.viewholder_card) {
target = (RecyclerView) view.getParent();
} else {
target = (RecyclerView) view;
}
final CardAdapter targetAdapter = (CardAdapter) target.getAdapter();
if(view.getId() == R.id.viewholder_card) {
targetPosition = targetAdapter.indexOf((int) view.getTag());
//TODO get y positions of each view and decide on which side to add the card
if(source != target) {
if(targetPosition >= 0) {
Log.i(TAG, "onDrag: Adding to position " + targetPosition);
targetAdapter.addCard(targetPosition, card);
} else {
targetAdapter.addCard(card);
}
sourceAdapter.removeCard(card);
} else { //We are moving a card
sourceAdapter.moveCard(sourcePosition, targetPosition);
}
} else if(view.getId() == R.id.column_recycler && ((RecyclerView) view).getAdapter().getItemCount() == 0) {
Log.i(TAG, "onDrag: Drop on the recycler");
sourceAdapter.removeCard(card);
targetAdapter.addCard(card);
}
view.setBackground(selectedBG);
break;
case DragEvent.ACTION_DRAG_ENTERED:
// Log.i(TAG, "onDrag: Drag entered");
if(view.getId() == R.id.viewholder_card
|| (view.getId() == R.id.column_recycler && ((RecyclerView) view).getAdapter().getItemCount() == 0)) {
selectedBG = view.getBackground();
view.setBackgroundColor(accent);
}
//This is when we have entered another view
break;
case DragEvent.ACTION_DRAG_EXITED:
Log.i(TAG, "onDrag: Drag exited");
view.setBackground(selectedBG);
//This is when we have exited another view
break;
default:
break;
}
if (!isDropped) {
View vw = (View) event.getLocalState();
vw.setVisibility(View.VISIBLE);
}
return true;
}
}
}
| app/src/main/java/com/tpb/projects/project/CardAdapter.java | package com.tpb.projects.project;
import android.content.ClipData;
import android.graphics.drawable.Drawable;
import android.support.v7.widget.CardView;
import android.support.v7.widget.RecyclerView;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.DragEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ProgressBar;
import com.commonsware.cwac.anddown.AndDown;
import com.tpb.projects.R;
import com.tpb.projects.data.Loader;
import com.tpb.projects.data.models.Card;
import com.tpb.projects.data.models.Issue;
import org.sufficientlysecure.htmltextview.HtmlHttpImageGetter;
import org.sufficientlysecure.htmltextview.HtmlTextView;
import java.util.ArrayList;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by theo on 20/12/16.
*/
class CardAdapter extends RecyclerView.Adapter<CardAdapter.CardHolder> {
private static final String TAG = CardAdapter.class.getSimpleName();
private ArrayList<Card> mCards = new ArrayList<>();
private AndDown md = new AndDown();
private ColumnFragment mParent;
CardAdapter(ColumnFragment parent) {
mParent = parent;
}
void setCards(ArrayList<Card> cards) {
mCards = cards;
notifyDataSetChanged();
}
void addCard(Card card) {
mCards.add(card);
notifyDataSetChanged();
}
private void setUnconditionally(ArrayList<Card> cards) {
mCards = cards;
}
ArrayList<Card> getCards() {
return mCards;
}
@Override
public CardHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new CardHolder(LayoutInflater.from(parent.getContext()).inflate(R.layout.viewholder_card, parent, false));
}
@Override
public void onBindViewHolder(CardHolder holder, int position) {
final int pos = holder.getAdapterPosition();
holder.mCardView.setTag(pos);
holder.mCardView.setOnLongClickListener(view -> {
final ClipData data = ClipData.newPlainText("", "");
final View.DragShadowBuilder shadowBuilder = new View.DragShadowBuilder(view);
view.startDrag(data, shadowBuilder, view, 0);
view.setVisibility(View.INVISIBLE);
return true;
});
holder.mCardView.setOnDragListener(new DragListener());
if(mCards.get(pos).requiresLoadingFromIssue()) {
holder.mSpinner.setVisibility(View.VISIBLE);
mParent.loadIssue(new Loader.IssueLoader() {
@Override
public void issueLoaded(Issue issue) {
mCards.get(pos).setRequiresLoadingFromIssue(false);
mCards.get(pos).setNote(issue.getTitle());
holder.mSpinner.setVisibility(View.INVISIBLE);
notifyItemChanged(pos);
}
@Override
public void loadError() {
}
}, mCards.get(pos).getIssueId());
} else {
holder.mMarkDown.setHtml(
md.markdownToHtml(
mCards.get(holder.getAdapterPosition()).getNote()
),
new HtmlHttpImageGetter(holder.mMarkDown)
);
}
}
@Override
public int getItemCount() {
return mCards.size();
}
class CardHolder extends RecyclerView.ViewHolder {
@BindView(R.id.card_markdown) HtmlTextView mMarkDown;
@BindView(R.id.card_issue_progress) ProgressBar mSpinner;
@BindView(R.id.viewholder_card) CardView mCardView;
CardHolder(View view) {
super(view);
ButterKnife.bind(this, view);
}
}
public class DragListener implements View.OnDragListener {
private boolean isDropped = false;
private DisplayMetrics metrics;
private Drawable selectedBG;
private int accent;
DragListener() {
metrics = new DisplayMetrics();
accent = mParent.getContext().getResources().getColor(R.color.colorAccent);
}
@Override
public boolean onDrag(View view, DragEvent event) {
final int action = event.getAction();
switch(action) {
case DragEvent.ACTION_DRAG_LOCATION:
mParent.getActivity().getWindowManager().getDefaultDisplay().getMetrics(metrics);
if(event.getX() / metrics.widthPixels > 0.85f) {
Log.i(TAG, "onDrag: Right");
((ProjectActivity) mParent.getActivity()).dragRight();
} else if(event.getX() / metrics.widthPixels < 0.15f) {
Log.i(TAG, "onDrag: Left");
((ProjectActivity) mParent.getActivity()).dragLeft();
}
break;
case DragEvent.ACTION_DROP:
isDropped = true;
int positionSource, positionTarget = -1;
final View viewSource = (View) event.getLocalState();
final RecyclerView target;
final RecyclerView source = (RecyclerView) viewSource.getParent();
final CardAdapter adapterSource = (CardAdapter) source.getAdapter();
positionSource = (int) viewSource.getTag();
final Card card = adapterSource.getCards().get(positionSource);
final ArrayList<Card> cardsSource = adapterSource.getCards();
if(view.getId() == R.id.viewholder_card) {
target = (RecyclerView) view.getParent();
} else {
target = (RecyclerView) view;
}
final CardAdapter targetAdapter = (CardAdapter) target.getAdapter();
ArrayList<Card> cardsTarget = targetAdapter.getCards();
if(view.getId() == R.id.viewholder_card) {
positionTarget = (int) view.getTag();
//TODO get y positions of each view and decide on which side to add the card
if(positionTarget >= 0) {
cardsTarget.add(positionTarget, card);
} else {
cardsTarget.add(card);
}
targetAdapter.setUnconditionally(cardsTarget);
targetAdapter.notifyDataSetChanged();
view.setVisibility(View.VISIBLE);
cardsSource.remove(card);
adapterSource.setUnconditionally(cardsSource);
adapterSource.notifyDataSetChanged();
//TODO If recyclers are the same, then switch the views
if(source == target) {
Log.i(TAG, "onDrag: Same recyclerview");
} else {
Log.i(TAG, "onDrag: Different recyclers");
}
} else if(view.getId() == R.id.column_recycler && ((RecyclerView) view).getAdapter().getItemCount() == 0) {
Log.i(TAG, "onDrag: Drop on the recycler");
cardsSource.remove(card);
adapterSource.setUnconditionally(cardsSource);
adapterSource.notifyDataSetChanged();
targetAdapter.addCard(card);
}
break;
case DragEvent.ACTION_DRAG_ENTERED:
Log.i(TAG, "onDrag: Drag entered");
if(view.getId() == R.id.viewholder_card
|| (view.getId() == R.id.column_recycler && ((RecyclerView) view).getAdapter().getItemCount() == 0)) {
selectedBG = view.getBackground();
view.setBackgroundColor(accent);
}
//This is when we have entered another view
break;
case DragEvent.ACTION_DRAG_EXITED:
Log.i(TAG, "onDrag: Drag exited");
view.setBackground(selectedBG);
//This is when we have exited another view
break;
default:
break;
}
if(!isDropped) {
((View) event.getLocalState()).setVisibility(View.VISIBLE);
}
return true;
}
}
}
| Resolved issue of tags not being changed without datasetchanged. Used card ids and index of.
| app/src/main/java/com/tpb/projects/project/CardAdapter.java | Resolved issue of tags not being changed without datasetchanged. Used card ids and index of. |
|
Java | apache-2.0 | 7347c2d7ae8bad67aaa55081e0ab5f9c43474da1 | 0 | jspecify/nullness-checker-for-checker-framework,jspecify/nullness-checker-for-checker-framework | // Copyright 2020 The JSpecify Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.jspecify.nullness;
import static com.sun.source.tree.Tree.Kind.NULL_LITERAL;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptySet;
import static java.util.Collections.singleton;
import static java.util.Collections.unmodifiableList;
import static java.util.Collections.unmodifiableSet;
import static javax.lang.model.element.ElementKind.ENUM_CONSTANT;
import static javax.lang.model.element.ElementKind.PACKAGE;
import static javax.lang.model.type.TypeKind.ARRAY;
import static javax.lang.model.type.TypeKind.DECLARED;
import static javax.lang.model.type.TypeKind.WILDCARD;
import static org.checkerframework.framework.qual.TypeUseLocation.CONSTRUCTOR_RESULT;
import static org.checkerframework.framework.qual.TypeUseLocation.EXCEPTION_PARAMETER;
import static org.checkerframework.framework.qual.TypeUseLocation.IMPLICIT_LOWER_BOUND;
import static org.checkerframework.framework.qual.TypeUseLocation.LOCAL_VARIABLE;
import static org.checkerframework.framework.qual.TypeUseLocation.OTHERWISE;
import static org.checkerframework.framework.qual.TypeUseLocation.RECEIVER;
import static org.checkerframework.framework.qual.TypeUseLocation.RESOURCE_VARIABLE;
import static org.checkerframework.framework.qual.TypeUseLocation.UNBOUNDED_WILDCARD_UPPER_BOUND;
import static org.checkerframework.javacutil.AnnotationUtils.areSame;
import static org.checkerframework.javacutil.TreeUtils.elementFromUse;
import static org.checkerframework.javacutil.TypesUtils.isPrimitive;
import static org.checkerframework.javacutil.TypesUtils.wildcardToTypeParam;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.IdentifierTree;
import com.sun.source.tree.LiteralTree;
import com.sun.source.tree.MemberSelectTree;
import com.sun.source.tree.Tree;
import com.sun.tools.javac.code.Type.WildcardType;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.TypeParameterElement;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.Elements;
import org.checkerframework.common.basetype.BaseTypeChecker;
import org.checkerframework.framework.flow.CFAnalysis;
import org.checkerframework.framework.flow.CFStore;
import org.checkerframework.framework.flow.CFTransfer;
import org.checkerframework.framework.flow.CFValue;
import org.checkerframework.framework.qual.TypeUseLocation;
import org.checkerframework.framework.type.AnnotatedTypeFactory;
import org.checkerframework.framework.type.AnnotatedTypeFormatter;
import org.checkerframework.framework.type.AnnotatedTypeMirror;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedDeclaredType;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedPrimitiveType;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedTypeVariable;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedWildcardType;
import org.checkerframework.framework.type.DefaultAnnotatedTypeFormatter;
import org.checkerframework.framework.type.DefaultTypeHierarchy;
import org.checkerframework.framework.type.GenericAnnotatedTypeFactory;
import org.checkerframework.framework.type.NoElementQualifierHierarchy;
import org.checkerframework.framework.type.QualifierHierarchy;
import org.checkerframework.framework.type.StructuralEqualityComparer;
import org.checkerframework.framework.type.StructuralEqualityVisitHistory;
import org.checkerframework.framework.type.TypeHierarchy;
import org.checkerframework.framework.type.TypeVariableSubstitutor;
import org.checkerframework.framework.type.treeannotator.ListTreeAnnotator;
import org.checkerframework.framework.type.treeannotator.TreeAnnotator;
import org.checkerframework.framework.type.typeannotator.TypeAnnotator;
import org.checkerframework.framework.type.visitor.AnnotatedTypeScanner;
import org.checkerframework.framework.util.AnnotationFormatter;
import org.checkerframework.framework.util.DefaultAnnotationFormatter;
import org.checkerframework.framework.util.DefaultQualifierKindHierarchy;
import org.checkerframework.framework.util.QualifierKindHierarchy;
import org.checkerframework.framework.util.defaults.QualifierDefaults;
import org.checkerframework.javacutil.AnnotationBuilder;
import org.jspecify.annotations.DefaultNonNull;
import org.jspecify.annotations.Nullable;
import org.jspecify.annotations.NullnessUnspecified;
public final class NullSpecAnnotatedTypeFactory
extends GenericAnnotatedTypeFactory<CFValue, CFStore, CFTransfer, CFAnalysis> {
private final AnnotationMirror nonNull;
private final AnnotationMirror unionNull;
private final AnnotationMirror codeNotNullnessAware;
private final boolean leastConvenientWorld;
public NullSpecAnnotatedTypeFactory(BaseTypeChecker checker) {
// Only use flow-sensitive type refinement if implementation code should be checked
super(checker, checker.hasOption("checkImpl"));
/*
* Under our proposed subtyping rules, every type has "additional nullness." There are 3
* additional-nullness values. In this implementation, we *mostly* represent each one with an
* AnnotationMirror.
*
* There is one exception: We do not have an AnnotationMirror for the additional nullness
* NO_CHANGE. When we need to represent NO_CHANGE, we take one of two approaches, depending on
* the base type:
*
* - On most types, we use an extra AnnotationMirror we've defined, nonNull.
*
* - On type-variable usage, we use *no* annotation.
*
* For further discussion of this, see isNullExclusiveUnderEveryParameterization.
*
* Since the proposed subtyping rules use names like "CODE_NOT_NULLNESS_AWARE," we follow those
* names here. That way, we distinguish more clearly between "Does a type have a
* @NullnessUnspecified annotation written on it in source code?" and "Is the additional
* nullness of a type codeNotNullnessAware?" (The latter can happen not only from a
* @NullnessUnspecified annotation but also from the default in effect.)
*/
nonNull = AnnotationBuilder.fromClass(elements, NonNull.class);
unionNull = AnnotationBuilder.fromClass(elements, Nullable.class);
codeNotNullnessAware = AnnotationBuilder.fromClass(elements, NullnessUnspecified.class);
if (checker.hasOption("aliasCFannos")) {
addAliasedAnnotation(org.checkerframework.checker.nullness.qual.Nullable.class, unionNull);
}
leastConvenientWorld = checker.hasOption("strict");
postInit();
}
@Override
protected Set<Class<? extends Annotation>> createSupportedTypeQualifiers() {
return new LinkedHashSet<>(asList(Nullable.class, NullnessUnspecified.class, NonNull.class));
}
@Override
protected QualifierHierarchy createQualifierHierarchy() {
return new NullSpecQualifierHierarchy(getSupportedTypeQualifiers(), elements);
}
private final class NullSpecQualifierHierarchy extends NoElementQualifierHierarchy {
NullSpecQualifierHierarchy(
Collection<Class<? extends Annotation>> qualifierClasses, Elements elements) {
super(qualifierClasses, elements);
}
@Override
public boolean isSubtype(AnnotationMirror subAnno, AnnotationMirror superAnno) {
/*
* Since we perform all necessary checking in the isSubtype method in NullSpecTypeHierarchy, I
* tried replacing this body with `return true` to avoid duplicating logic. However, that's a
* problem because the result of this method is sometimes cached and used instead of a full
* call to the isSubtype method in NullSpecTypeHierarchy.
*
* Specifically: DefaultTypeHierarchy.visitDeclared_Declared calls isPrimarySubtype, which
* calls isAnnoSubtype, which directly calls NullSpecQualifierHierarchy.isSubtype (as opposed
* to NullSpecTypeHierarchy.isSubtype). That's still fine, since we'll reject the types in
* NullSpecTypeHierarchy.isSubtype. The problem, though, is that it also inserts a cache entry
* for the supposed subtyping relationship, and that entry can cause future checks to
* short-circuit. (I think I saw this in isContainedBy.)
*/
boolean subIsUnspecified = areSame(subAnno, codeNotNullnessAware);
boolean superIsUnspecified = areSame(superAnno, codeNotNullnessAware);
boolean eitherIsUnspecified = subIsUnspecified || superIsUnspecified;
boolean bothAreUnspecified = subIsUnspecified && superIsUnspecified;
if (leastConvenientWorld && bothAreUnspecified) {
return false;
}
if (!leastConvenientWorld && eitherIsUnspecified) {
return true;
}
return areSame(subAnno, nonNull) || areSame(superAnno, unionNull);
}
@Override
protected QualifierKindHierarchy createQualifierKindHierarchy(
Collection<Class<? extends Annotation>> qualifierClasses) {
return new DefaultQualifierKindHierarchy(qualifierClasses, /*bottom=*/ NonNull.class) {
@Override
protected Map<DefaultQualifierKind, Set<DefaultQualifierKind>> createDirectSuperMap() {
DefaultQualifierKind nonNullKind =
nameToQualifierKind.get(NonNull.class.getCanonicalName());
DefaultQualifierKind unionNullKind =
nameToQualifierKind.get(Nullable.class.getCanonicalName());
DefaultQualifierKind codeNotNullnessAwareKind =
nameToQualifierKind.get(NullnessUnspecified.class.getCanonicalName());
Map<DefaultQualifierKind, Set<DefaultQualifierKind>> supers = new HashMap<>();
supers.put(nonNullKind, singleton(codeNotNullnessAwareKind));
supers.put(codeNotNullnessAwareKind, singleton(unionNullKind));
supers.put(unionNullKind, emptySet());
return supers;
/*
* The rules above are incomplete:
*
* - In "lenient mode," we treat unionNull as a subtype of codeNotNullnesesAware.
*
* - In "strict mode," we do *not* treat codeNotNullnesesAware as a subtype of itself.
*
* These subtleties are handled by isSubtype above. The incomplete rules still provide us
* with useful implementations of leastUpperBound and greatestLowerBound.
*/
}
};
}
}
@Override
protected TypeHierarchy createTypeHierarchy() {
return new NullSpecTypeHierarchy(
checker,
getQualifierHierarchy(),
checker.getBooleanOption("ignoreRawTypeArguments", true),
checker.hasOption("invariantArrays"));
}
private final class NullSpecTypeHierarchy extends DefaultTypeHierarchy {
NullSpecTypeHierarchy(
BaseTypeChecker checker,
QualifierHierarchy qualifierHierarchy,
boolean ignoreRawTypeArguments,
boolean invariantArrays) {
super(checker, qualifierHierarchy, ignoreRawTypeArguments, invariantArrays);
}
@Override
protected StructuralEqualityComparer createEqualityComparer() {
return new NullSpecEqualityComparer(typeargVisitHistory);
}
@Override
protected boolean visitTypevarSubtype(
AnnotatedTypeVariable subtype, AnnotatedTypeMirror supertype) {
/*
* The superclass "projects" type-variable usages rather than unioning them. Consequently, if
* we delegate directly to the supermethod, it can fail when it shouldn't. Fortunately, we
* already handle the top-level nullness subtyping in isNullnessSubtype. So all we need to do
* here is to handle any type arguments. To do that, we still delegate to the supertype. But
* first we mark the supertype as unionNull so that the supertype's top-level check will
* always succeed.
*
* TODO(cpovirk): There are probably many more cases that we could short-circuit. We might
* consider doing that in isSubtype rather than with overrides.
*/
return super.visitTypevarSubtype(subtype, withUnionNull(supertype));
}
@Override
protected boolean visitWildcardSubtype(
AnnotatedWildcardType subtype, AnnotatedTypeMirror supertype) {
// See discussion in visitTypevarSubtype above.
return super.visitWildcardSubtype(subtype, withUnionNull(supertype));
}
@Override
protected boolean visitTypevarSupertype(
AnnotatedTypeMirror subtype, AnnotatedTypeVariable supertype) {
/*
* TODO(cpovirk): Why are the supertype cases so different from the subtype cases above? In
* particular: Why is it important to replace an argument only conditionally? And why is it
* important to replace the subtype instead of the supertype?
*/
return super.visitTypevarSupertype(
isNullInclusiveUnderEveryParameterization(supertype) ? withNonNull(subtype) : subtype,
supertype);
}
@Override
protected boolean visitWildcardSupertype(
AnnotatedTypeMirror subtype, AnnotatedWildcardType supertype) {
// See discussion in visitTypevarSupertype above.
return super.visitWildcardSupertype(
isNullInclusiveUnderEveryParameterization(supertype) ? withNonNull(subtype) : subtype,
supertype);
}
@Override
public Boolean visitTypevar_Typevar(
AnnotatedTypeVariable subtype, AnnotatedTypeVariable supertype, Void p) {
/*
* Everything we need to check will be handled by isNullnessSubtype. That's fortunate, as the
* supermethod does not account for our non-standard substitution rules for type variables.
* Under those rules, `@NullnessUnspecified T` can still produce a @Nullable value after
* substitution.
*/
return true;
}
@Override
protected boolean isSubtype(
AnnotatedTypeMirror subtype, AnnotatedTypeMirror supertype, AnnotationMirror top) {
return super.isSubtype(subtype, supertype, top) && isNullnessSubtype(subtype, supertype);
}
private boolean isNullnessSubtype(AnnotatedTypeMirror subtype, AnnotatedTypeMirror supertype) {
if (isPrimitive(subtype.getUnderlyingType())) {
return true;
}
if (supertype.getKind() == WILDCARD) {
/*
* super.isSubtype already called back into this.isSameType (and thus into
* isNullnessSubtype) for the bound. That's fortunate, as we don't define subtyping rules
* for wildcards (since the JLS says that they should be capture converted by this point, or
* we should be checking their *bounds* for a containment check).
*/
return true;
}
return isNullInclusiveUnderEveryParameterization(supertype)
|| isNullExclusiveUnderEveryParameterization(subtype)
|| nullnessEstablishingPathExists(subtype, supertype);
}
}
private boolean isNullInclusiveUnderEveryParameterization(AnnotatedTypeMirror type) {
/*
* Our draft subtyping rules specify a special case for intersection types. However, those rules
* make sense only because the rules also specify that an intersection type never has an
* additional-nullness value of its own. This is in contrast to CF, which does let an
* intersection type have an AnnotationMirror of its own.
*
* ...well, sort of. As I understand it, what CF does is more that it tries to keep the
* AnnotationMirror of the intersecton type in sync with the AnnotationMirror of each of its
* components (which should themselves all match). So the intersection type "has" an
* AnnotationMirror, but it provides no *additional* information beyond what is already carried
* by its components' AnnotationMirrors.
*
* Nevertheless, the result is that we don't need a special case here: The check below is
* redundant with the subsequent check on the intersection's components, but redundancy is
* harmless.
*/
return type.hasAnnotation(unionNull)
|| (!leastConvenientWorld && type.hasAnnotation(codeNotNullnessAware));
}
boolean isNullExclusiveUnderEveryParameterization(AnnotatedTypeMirror subtype) {
/*
* In most cases, it would be sufficient to check only nullnessEstablishingPathExists. However,
* consider a type that meets all 3 of the following criteria:
*
* 1. a local variable
*
* 2. whose type is a type variable
*
* 3. whose corresponding type parameter permits nullable type arguments
*
* For such a type, nullnessEstablishingPathExists would always return false. And that makes
* sense... until an implementation checks it with `if (foo != null)`. At that point, we need to
* store an additional piece of information: Yes, _the type written in code_ can permit null,
* but we know from dataflow that _this particular value_ is _not_ null. That additional
* information is stored by attaching nonNull to the type-variable usage. This produces a type
* distinct from all of:
*
* - `T`: `T` with additional nullness NO_CHANGE
*
* - `@NullnessUnspecified T`: `T` with additional nullness CODE_NOT_NULLNESS_AWARE
*
* - `@Nullable T`: `T` with additional nullness UNION_NULL
*
* It is unfortunate that this forces us to represent type-variable usages differently from how
* we represent all other types. For all other types, the way to represent a type with
* additional nullness NO_CHANGE is to attach nonNull. But again, for type-variable usages, the
* way to do it is to attach *no* annotation.
*
* TODO(cpovirk): Would CF let us get away with attaching no annotation to other types? At least
* by default, it requires annotations on all types other than type-variable usages. But it
* might be nice to get rid of that requirement for consistency -- and perhaps to help us avoid
* writing a package-private @NonNull annotation to class files, as discussed elsewhere.
* However, we would still need nonNull for the case of null checks in dataflow. And we might
* end up violating other CF assumptions and thus causing ourselves more trouble than we solve.
* *Plus*, we'd want to make sure that our type-variable usages don't end up getting the
* "default default" of codeNotNullnessAware, since the "default default" is applied after the
* element-scoped defaulting logic. We would probably need to still attach nonNull to them and
* then modify removeNonNullFromTypeVariableUsages to remove it from them, just as it removes
* nonNull from type-variable usages.
*/
return subtype.hasAnnotation(nonNull)
|| nullnessEstablishingPathExists(
subtype, t -> t.getKind() == DECLARED || t.getKind() == ARRAY);
}
private boolean nullnessEstablishingPathExists(
AnnotatedTypeMirror subtype, AnnotatedTypeMirror supertype) {
/*
* TODO(cpovirk): As an optimization, `return false` if `supertype` is not a type variable: If
* it's not a type variable, then the only ways for isNullnessSubtype to succeed were already
* checked by isNullInclusiveUnderEveryParameterization and
* isNullExclusiveUnderEveryParameterization.
*/
return nullnessEstablishingPathExists(
subtype, t -> checker.getTypeUtils().isSameType(t, supertype.getUnderlyingType()));
}
private boolean nullnessEstablishingPathExists(
AnnotatedTypeMirror subtype, Predicate<TypeMirror> supertypeMatcher) {
if (isUnionNullOrEquivalent(subtype)) {
return false;
}
if (supertypeMatcher.test(subtype.getUnderlyingType())) {
return true;
}
for (AnnotatedTypeMirror supertype : getUpperBounds(subtype)) {
if (nullnessEstablishingPathExists(supertype, supertypeMatcher)) {
return true;
}
}
/*
* We don't need to handle the "lower-bound rule" here: The Checker Framework doesn't perform
* wildcard capture conversion. (Hmm, but it might see post-capture-conversion types in some
* cases....) It compares "? super Foo" against "Bar" by more directly comparing Foo and Bar.
*/
return false;
}
private List<? extends AnnotatedTypeMirror> getUpperBounds(AnnotatedTypeMirror type) {
switch (type.getKind()) {
case INTERSECTION:
case TYPEVAR:
return withNonNull(type).directSuperTypes();
case WILDCARD:
List<AnnotatedTypeMirror> bounds = new ArrayList<>();
bounds.addAll(withNonNull(type).directSuperTypes());
/*
* We would use `((AnnotatedWildcardType) type).getTypeVariable()`, but it is not available
* in all cases that we need.
*/
WildcardType wildcard = (WildcardType) type.getUnderlyingType(); // javac internal type
TypeParameterElement typeParameter = wildcardToTypeParam(wildcard);
if (typeParameter != null) {
bounds.add(getAnnotatedType(typeParameter));
}
return unmodifiableList(bounds);
default:
return emptyList();
}
}
private boolean isUnionNullOrEquivalent(AnnotatedTypeMirror type) {
return type.hasAnnotation(unionNull)
|| (leastConvenientWorld && type.hasAnnotation(codeNotNullnessAware));
}
private final class NullSpecEqualityComparer extends StructuralEqualityComparer {
NullSpecEqualityComparer(StructuralEqualityVisitHistory typeargVisitHistory) {
super(typeargVisitHistory);
}
@Override
protected boolean checkOrAreEqual(AnnotatedTypeMirror type1, AnnotatedTypeMirror type2) {
Boolean pastResult = visitHistory.result(type1, type2, /*hierarchy=*/ unionNull);
if (pastResult != null) {
return pastResult;
}
boolean result = areEqual(type1, type2);
this.visitHistory.add(type1, type2, /*hierarchy=*/ unionNull, result);
return result;
}
@Override
public boolean areEqualInHierarchy(
AnnotatedTypeMirror type1, AnnotatedTypeMirror type2, AnnotationMirror top) {
return areEqual(type1, type2);
}
private boolean areEqual(AnnotatedTypeMirror type1, AnnotatedTypeMirror type2) {
/*
* I'd like to use the spec definition here: "type1 is a subtype of type2 and vice versa."
* However, that produces infinite recursion in some cases.
*/
boolean type1IsUnspecified = type1.hasAnnotation(codeNotNullnessAware);
boolean type2IsUnspecified = type2.hasAnnotation(codeNotNullnessAware);
boolean bothAreUnspecified = type1IsUnspecified && type2IsUnspecified;
boolean eitherIsUnspecified = type1IsUnspecified || type2IsUnspecified;
if (leastConvenientWorld && bothAreUnspecified) {
return false;
}
if (!leastConvenientWorld && eitherIsUnspecified) {
return true;
}
AnnotationMirror a1 = type1.getAnnotationInHierarchy(unionNull);
AnnotationMirror a2 = type2.getAnnotationInHierarchy(unionNull);
return a1 == a2 || (a1 != null && a2 != null && areSame(a1, a2));
/*
* TODO(cpovirk): Do we care about the base type, or is looking at annotations enough?
* super.visitDeclared_Declared has a TODO with a similar question. Err, presumably normal
* Java type-checking has done that job. A more interesting question may be why we don't look
* at type args. The answer might be simply: "That's the contract, even though it is
* surprising, given the names of the class and its methods." (Granted, the docs of
* super.visitDeclared_Declared also say that it checks that "The types are of the same
* class/interfaces," so the contract isn't completely clear.)
*/
}
}
@Override
protected TypeVariableSubstitutor createTypeVariableSubstitutor() {
return new NullSpecTypeVariableSubstitutor();
}
private final class NullSpecTypeVariableSubstitutor extends TypeVariableSubstitutor {
@Override
protected AnnotatedTypeMirror substituteTypeVariable(
AnnotatedTypeMirror argument, AnnotatedTypeVariable use) {
// TODO(cpovirk): Delegate to leastUpperBound?
AnnotatedTypeMirror substitute = argument.deepCopy(/*copyAnnotations=*/ true);
if (argument.hasAnnotation(unionNull) || use.hasAnnotation(unionNull)) {
substitute.replaceAnnotation(unionNull);
} else if (argument.hasAnnotation(codeNotNullnessAware)
|| use.hasAnnotation(codeNotNullnessAware)) {
substitute.replaceAnnotation(codeNotNullnessAware);
}
return substitute;
}
}
@Override
public AnnotatedDeclaredType getSelfType(Tree tree) {
AnnotatedDeclaredType superResult = super.getSelfType(tree);
return superResult == null ? null : withNonNull(superResult);
}
@Override
protected QualifierDefaults createQualifierDefaults() {
return new NullSpecQualifierDefaults(elements, this);
}
@Override
protected void addCheckedStandardDefaults(QualifierDefaults defs) {
/*
* This method sets up the defaults for *non-null-aware* code.
*
* All these defaults will be overridden (whether we like it or not) for null-aware code. That
* happens when NullSpecQualifierDefaults.annotate(...) sets a new default for OTHERWISE.
*
* Note that these two methods do not contain the totality of our defaulting logic. For example,
* our TypeAnnotator has special logic for upper bounds _in the case of `super` wildcards
* specifically_.
*/
// Here's the big default, the "default default":
defs.addCheckedCodeDefault(codeNotNullnessAware, OTHERWISE);
// Some locations are intrinsically non-nullable:
defs.addCheckedCodeDefault(nonNull, CONSTRUCTOR_RESULT);
defs.addCheckedCodeDefault(nonNull, RECEIVER);
// We do want *some* of the CLIMB standard defaults:
for (TypeUseLocation location : LOCATIONS_REFINED_BY_DATAFLOW) {
defs.addCheckedCodeDefault(unionNull, location);
}
defs.addCheckedCodeDefault(nonNull, IMPLICIT_LOWER_BOUND);
// But for exception parameters, we want the default to be nonNull:
defs.addCheckedCodeDefault(nonNull, EXCEPTION_PARAMETER);
/*
* Note one other difference from the CLIMB defaults: We want the default for implicit upper
* bounds to match the "default default" of codeNotNullnessAware, not to be top/unionNull. We
* accomplish this simply by not calling the supermethod (which would otherwise call
* addClimbStandardDefaults, which would override the "default default").
*/
}
private final class NullSpecQualifierDefaults extends QualifierDefaults {
NullSpecQualifierDefaults(Elements elements, AnnotatedTypeFactory atypeFactory) {
super(elements, atypeFactory);
}
@Override
public void annotate(Element elt, AnnotatedTypeMirror type) {
if (elt == null) {
super.annotate(elt, type);
return;
}
/*
* CF has some built-in support for package-level defaults. However, they cascade to
* subpackages (see 28.5.2), contrary to our semantics (see
* https://github.com/jspecify/jspecify/issues/8). To avoid CF semantics, we never set a
* default on a package element itself, only on each top-level class element in the package.
*
* XXX: When adding support for DefaultNullnessUnspecified, be sure that DefaultNullnessUnspecified on a *class* overrides
* DefaultNonNull on the *package* (and vice versa).
*
* XXX: When adding support for aliases, make sure to support them here.
*/
if (hasNullAwareAnnotation(elt)) {
/*
* Setting a default here affects not only this element but also its descendants in the
* syntax tree.
*/
addElementDefault(elt, unionNull, UNBOUNDED_WILDCARD_UPPER_BOUND);
addElementDefault(elt, nonNull, OTHERWISE);
/*
* Some defaults are common to null-aware and non-null-aware code. We reassert some of those
* here. If we didn't, then they would be overridden by OTHERWISE above.
*
* (Yes, our non-null-aware setup sets defaults for more locations than just these. But for
* the other locations, it sets the default to nonNull. And there's no need for the
* *null-aware* setup to default any specific location to nonNull: That is its default
* everywhere that is not specifically overridden, thanks to the same OTHERWISE rule
* discussed above.)
*/
for (TypeUseLocation location : LOCATIONS_REFINED_BY_DATAFLOW) {
addElementDefault(elt, unionNull, location);
}
}
super.annotate(elt, type);
removeNonNullFromTypeVariableUsages(type);
}
@Override
public void annotate(Tree tree, AnnotatedTypeMirror type) {
super.annotate(tree, type);
removeNonNullFromTypeVariableUsages(type);
}
private void removeNonNullFromTypeVariableUsages(AnnotatedTypeMirror type) {
new AnnotatedTypeScanner<Void, Void>() {
@Override
public Void visitTypeVariable(AnnotatedTypeVariable type, Void aVoid) {
// For an explanation, see shouldBeAnnotated below.
type.removeAnnotation(nonNull);
/*
* It probably doesn't matter whether we invoke the supermethod or not. But let's do it,
* simply because that's what tree visitors typically do.
*/
return super.visitTypeVariable(type, aVoid);
}
}.visit(type);
}
@Override
protected DefaultApplierElement createDefaultApplierElement(
AnnotatedTypeFactory atypeFactory,
Element annotationScope,
AnnotatedTypeMirror type,
boolean applyToTypeVar) {
return new DefaultApplierElement(atypeFactory, annotationScope, type, applyToTypeVar) {
@Override
protected boolean shouldBeAnnotated(AnnotatedTypeMirror type, boolean applyToTypeVar) {
/*
* CF usually doesn't apply defaults to type-variable usages. But in non-null-aware code,
* we want our default of codeNotNullnessAware to apply even to type variables.
*
* But there are 2 other things to keep in mind:
*
* - CF *does* apply defaults to type-variable usages *if* they are local variables.
* That's because it will refine their types with dataflow. This CF behavior works fine
* for us: Since we want to apply defaults in strictly more cases, we're happy to accept
* what CF already does for local variables. (We do need to be sure to apply unionNull
* (our top type) in that case, rather than codeNotNullnessAware. We accomplish that by
* setting specific defaults for LOCATIONS_REFINED_BY_DATAFLOW.)
*
* - Non-null-aware code (discussed above) is easy: We apply codeNotNullnessAware to
* everything except local variables. But null-aware code more complex. First, set aside
* local variables, which we handle as discussed above. After that, we need to apply
* nonNull to most types, but we need to *not* apply it to (non-local-variable)
* type-variable usages. (For more on this, see
* isNullExclusiveUnderEveryParameterization.) This need is weird enough that CF doesn't
* appear to support it directly. Our solution is to apply nonNull to everything but then
* remove it from any type variables it appears on. We do that in
* removeNonNullFromTypeVariableUsages above.
*/
return super.shouldBeAnnotated(type, /*applyToTypeVar=*/ true);
}
};
}
@Override
public boolean applyConservativeDefaults(Element annotationScope) {
/*
* Ignore any command-line flag to request conservative defaults. The principle of
* "unspecified nullness" is that we configure conservatism/leniency through changes in our
* subtyping rules, rather than changes in how we choose the default annotation / additional
* nullness of any type.
*/
return false;
}
// TODO(cpovirk): Should I override applyConservativeDefaults to always return false?
}
private static final Set<TypeUseLocation> LOCATIONS_REFINED_BY_DATAFLOW =
unmodifiableSet(new HashSet<>(asList(LOCAL_VARIABLE, RESOURCE_VARIABLE)));
@Override
protected void addComputedTypeAnnotations(Tree tree, AnnotatedTypeMirror type, boolean iUseFlow) {
// TODO(cpovirk): This helps, but why?
super.addComputedTypeAnnotations(tree, type, iUseFlow && type.getKind() != WILDCARD);
}
@Override
protected TypeAnnotator createTypeAnnotator() {
/*
* Override to:
*
* - write some defaults that are difficult to express with the addCheckedCodeDefault and
* addElementDefault APIs. But beware: Using TypeAnnotator for this purpose is safe only for
* defaults that are common to null-aware and non-null-aware code!
*
* - *not* do what the supermethod does. Specifically, the supermethod adds the top type
* (unionNull) to the bound of unbounded wildcards. But we want the ability to sometimes add
* codeNotNullnessAware instead.
*/
return new NullSpecTypeAnnotator(this);
}
private final class NullSpecTypeAnnotator extends TypeAnnotator {
NullSpecTypeAnnotator(AnnotatedTypeFactory typeFactory) {
super(typeFactory);
}
@Override
public Void visitDeclared(AnnotatedDeclaredType type, Void p) {
AnnotatedDeclaredType enclosingType = type.getEnclosingType();
if (enclosingType != null) {
/*
* TODO(cpovirk): If NullSpecVisitor starts looking at source trees instead of the derived
* AnnotatedTypeMirror objects, then change this code to fill in this value unconditionally
* (matching visitPrimitive below).
*/
addIfNoAnnotationPresent(enclosingType, nonNull);
}
return super.visitDeclared(type, p);
}
@Override
public Void visitPrimitive(AnnotatedPrimitiveType type, Void p) {
type.replaceAnnotation(nonNull);
return super.visitPrimitive(type, p);
}
@Override
public Void visitWildcard(AnnotatedWildcardType type, Void p) {
if (type.getUnderlyingType().getSuperBound() != null) {
addIfNoAnnotationPresent(type.getExtendsBound(), unionNull);
}
return super.visitWildcard(type, p);
}
}
@Override
protected TreeAnnotator createTreeAnnotator() {
return new ListTreeAnnotator(
asList(new NullSpecTreeAnnotator(this), super.createTreeAnnotator()));
}
private final class NullSpecTreeAnnotator extends TreeAnnotator {
NullSpecTreeAnnotator(AnnotatedTypeFactory typeFactory) {
super(typeFactory);
}
@Override
public Void visitLiteral(LiteralTree node, AnnotatedTypeMirror type) {
if (node.getKind().asInterface() == LiteralTree.class) {
type.addAnnotation(node.getKind() == NULL_LITERAL ? unionNull : nonNull);
}
return super.visitLiteral(node, type);
}
@Override
public Void visitIdentifier(IdentifierTree node, AnnotatedTypeMirror type) {
annotateIfEnumConstant(node, type);
return super.visitIdentifier(node, type);
}
@Override
public Void visitMemberSelect(MemberSelectTree node, AnnotatedTypeMirror type) {
annotateIfEnumConstant(node, type);
return super.visitMemberSelect(node, type);
}
private void annotateIfEnumConstant(ExpressionTree node, AnnotatedTypeMirror type) {
Element element = elementFromUse(node);
if (element != null && element.getKind() == ENUM_CONSTANT) {
/*
* Even if it was annotated before, override it. There are 2 cases:
*
* 1. The declaration had an annotation on it in source. That will still get reported as an
* error when we visit the declaration (assuming we're compiling the code with the
* declaration): Anything we do here affects the *usage* but not the declaration. And we
* know that the usage isn't really @Nullable/@NullnessUnspecified, even if the author of
* the declaration said so.
*
* 2. The declaration had no annotation on it in source, but it was in non-null-aware code.
* And consequently, defaults.visit(...), which ran before us, applied a default of
* codeNotNullnessAware. Again, that default isn't correct, so we override it here.
*/
type.replaceAnnotation(nonNull);
}
}
}
@Override
protected AnnotationFormatter createAnnotationFormatter() {
return new DefaultAnnotationFormatter() {
@Override
public String formatAnnotationString(
Collection<? extends AnnotationMirror> annos, boolean printInvisible) {
return super.formatAnnotationString(annos, /*printInvisible=*/ false);
}
};
}
@Override
protected AnnotatedTypeFormatter createAnnotatedTypeFormatter() {
return new DefaultAnnotatedTypeFormatter(
/*
* We would pass the result of getAnnotationFormatter(), but the superclass calls
* createAnnotatedTypeFormatter() before it initializes that field.
*
* Fortunately, it's harmless to use one AnnotationFormatter here and another equivalent
* one in createAnnotationFormatter().
*/
createAnnotationFormatter(),
// TODO(cpovirk): Permit configuration of these booleans?
/*printVerboseGenerics=*/ false,
/*defaultPrintInvisibleAnnos=*/ false);
}
@Override
public void postProcessClassTree(ClassTree tree) {
/*
* To avoid writing computed annotations into bytecode (or even into the in-memory javac Element
* objects), do not call the supermethod.
*
* We don't want to write computed annotations to bytecode because we don't want for checkers
* (including this one!) to depend on those annotations. All core JSpecify nullness information
* should be derivable from the originally written annotations.
*
* (We especially don't want to write @NonNull to bytecode, since it is an implementation detail
* of this current checker implementation.)
*
* "Computed annotations" includes not only annotations added from defaults but also any
* @Inherited/@InheritedAnnotation declaration annotations copied from supertypes. This could
* end up causing us trouble: JSpecify requires that annotations are *not* inherited
* (https://github.com/jspecify/jspecify/issues/14). Thankfully, there is no immediate problem:
* None of our annotations have @Inherited/@InheritedAnnotation. However someday we expect to
* support annotation "aliasing"/"implies." That may let users declare an
* @Inherited/@InheritedAnnotation alias for @DefaultNonNull -- and maybe even an
* @Inherited/@InheritedAnnotation *declaration* alias for @Nullable
* (https://github.com/jspecify/jspecify/issues/124). If so, we'll want to make sure that we not
* only prevent writing annotations back to bytecode / Element objects but also prevent even
* recognizing them in the first place. To some degree, we already accomplish this by (a)
* looking up @DefaultNonNull through the Element API (as opposed to a CF-specific API with special
* inheritance logic) and (b) preventing CF from writing the annotations to the Element API. But
* we'll likely need to hook into CF at a deeper level to prevent the annotations from being
* discovered in the first place. *And* our @DefaultNonNull logic may need to avoid calling even the
* javac API element.getAnnotation(...), as it returns any annotation "present" (including those
* inherited), in favor of element.getAnnotationMirrors(), which returns only those "directly
* present."
*
* XXX: When we implement aliasing, watch out for this!
*
* Additionally, when I was letting CF write computed annotations into bytecode, I ran into an
* type.invalid.conflicting.annos error, which I have described more in
* https://github.com/jspecify/nullness-checker-for-checker-framework/commit/d16a0231487e239bc94145177de464b5f77c8b19
*/
}
private void addIfNoAnnotationPresent(AnnotatedTypeMirror type, AnnotationMirror annotation) {
if (!type.isAnnotatedInHierarchy(unionNull)) {
type.addAnnotation(annotation);
}
}
@SuppressWarnings("unchecked") // safety guaranteed by API docs
private <T extends AnnotatedTypeMirror> T withNonNull(T type) {
// Remove the annotation from the *root* type, but preserve other annotations.
type = (T) type.deepCopy(/*copyAnnotations=*/ true);
/*
* TODO(cpovirk): In the case of a type-variable usage, I feel like we should need to *remove*
* any existing annotation but then not *add* nonNull. (This is because of the difference
* between type-variable usages and all other types, as discussed near the end of the giant
* comment in isNullExclusiveUnderEveryParameterization.) However, the current code passes all
* tests. Figure out whether that makes sense or we need more tests to show why not.
*/
type.replaceAnnotation(nonNull);
return type;
}
@SuppressWarnings("unchecked") // safety guaranteed by API docs
private <T extends AnnotatedTypeMirror> T withUnionNull(T type) {
// Remove the annotation from the *root* type, but preserve other annotations.
type = (T) type.deepCopy(/*copyAnnotations=*/ true);
type.replaceAnnotation(unionNull);
return type;
}
private static boolean hasNullAwareAnnotation(Element elt) {
if (elt.getAnnotation(DefaultNonNull.class) != null) {
return true;
}
Element enclosingElement = elt.getEnclosingElement();
return enclosingElement != null // possible only under `-source 8 -target 8` (i.e., pre-JPMS)?
&& enclosingElement.getKind() == PACKAGE
&& enclosingElement.getAnnotation(DefaultNonNull.class) != null;
}
}
| src/main/java/com/google/jspecify/nullness/NullSpecAnnotatedTypeFactory.java | // Copyright 2020 The JSpecify Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.jspecify.nullness;
import static com.sun.source.tree.Tree.Kind.NULL_LITERAL;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptySet;
import static java.util.Collections.singleton;
import static java.util.Collections.unmodifiableList;
import static java.util.Collections.unmodifiableSet;
import static javax.lang.model.element.ElementKind.ENUM_CONSTANT;
import static javax.lang.model.element.ElementKind.PACKAGE;
import static javax.lang.model.type.TypeKind.ARRAY;
import static javax.lang.model.type.TypeKind.DECLARED;
import static javax.lang.model.type.TypeKind.WILDCARD;
import static org.checkerframework.framework.qual.TypeUseLocation.CONSTRUCTOR_RESULT;
import static org.checkerframework.framework.qual.TypeUseLocation.EXCEPTION_PARAMETER;
import static org.checkerframework.framework.qual.TypeUseLocation.IMPLICIT_LOWER_BOUND;
import static org.checkerframework.framework.qual.TypeUseLocation.LOCAL_VARIABLE;
import static org.checkerframework.framework.qual.TypeUseLocation.OTHERWISE;
import static org.checkerframework.framework.qual.TypeUseLocation.RECEIVER;
import static org.checkerframework.framework.qual.TypeUseLocation.RESOURCE_VARIABLE;
import static org.checkerframework.framework.qual.TypeUseLocation.UNBOUNDED_WILDCARD_UPPER_BOUND;
import static org.checkerframework.javacutil.AnnotationUtils.areSame;
import static org.checkerframework.javacutil.TreeUtils.elementFromUse;
import static org.checkerframework.javacutil.TypesUtils.isPrimitive;
import static org.checkerframework.javacutil.TypesUtils.wildcardToTypeParam;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.IdentifierTree;
import com.sun.source.tree.LiteralTree;
import com.sun.source.tree.MemberSelectTree;
import com.sun.source.tree.Tree;
import com.sun.tools.javac.code.Type.WildcardType;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.TypeParameterElement;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.Elements;
import org.checkerframework.common.basetype.BaseTypeChecker;
import org.checkerframework.framework.flow.CFAnalysis;
import org.checkerframework.framework.flow.CFStore;
import org.checkerframework.framework.flow.CFTransfer;
import org.checkerframework.framework.flow.CFValue;
import org.checkerframework.framework.qual.TypeUseLocation;
import org.checkerframework.framework.type.AnnotatedTypeFactory;
import org.checkerframework.framework.type.AnnotatedTypeFormatter;
import org.checkerframework.framework.type.AnnotatedTypeMirror;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedDeclaredType;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedPrimitiveType;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedTypeVariable;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedWildcardType;
import org.checkerframework.framework.type.DefaultAnnotatedTypeFormatter;
import org.checkerframework.framework.type.DefaultTypeHierarchy;
import org.checkerframework.framework.type.GenericAnnotatedTypeFactory;
import org.checkerframework.framework.type.NoElementQualifierHierarchy;
import org.checkerframework.framework.type.QualifierHierarchy;
import org.checkerframework.framework.type.StructuralEqualityComparer;
import org.checkerframework.framework.type.StructuralEqualityVisitHistory;
import org.checkerframework.framework.type.TypeHierarchy;
import org.checkerframework.framework.type.TypeVariableSubstitutor;
import org.checkerframework.framework.type.treeannotator.ListTreeAnnotator;
import org.checkerframework.framework.type.treeannotator.TreeAnnotator;
import org.checkerframework.framework.type.typeannotator.TypeAnnotator;
import org.checkerframework.framework.type.visitor.AnnotatedTypeScanner;
import org.checkerframework.framework.util.AnnotationFormatter;
import org.checkerframework.framework.util.DefaultAnnotationFormatter;
import org.checkerframework.framework.util.DefaultQualifierKindHierarchy;
import org.checkerframework.framework.util.QualifierKindHierarchy;
import org.checkerframework.framework.util.defaults.QualifierDefaults;
import org.checkerframework.javacutil.AnnotationBuilder;
import org.jspecify.annotations.DefaultNonNull;
import org.jspecify.annotations.Nullable;
import org.jspecify.annotations.NullnessUnspecified;
public final class NullSpecAnnotatedTypeFactory
extends GenericAnnotatedTypeFactory<CFValue, CFStore, CFTransfer, CFAnalysis> {
private final AnnotationMirror nonNull;
private final AnnotationMirror unionNull;
private final AnnotationMirror codeNotNullnessAware;
private final boolean leastConvenientWorld;
public NullSpecAnnotatedTypeFactory(BaseTypeChecker checker) {
// Only use flow-sensitive type refinement if implementation code should be checked
super(checker, checker.hasOption("checkImpl"));
/*
* Under our proposed subtyping rules, every type has "additional nullness." There are 3
* additional-nullness values. In this implementation, we *mostly* represent each one with an
* AnnotationMirror.
*
* There is one exception: We do not have an AnnotationMirror for the additional nullness
* NO_CHANGE. When we need to represent NO_CHANGE, we take one of two approaches, depending on
* the base type:
*
* - On most types, we use an extra AnnotationMirror we've defined, nonNull.
*
* - On type-variable usage, we use *no* annotation.
*
* For further discussion of this, see isNullExclusiveUnderEveryParameterization.
*
* Since the proposed subtyping rules use names like "CODE_NOT_NULLNESS_AWARE," we follow those
* names here. That way, we distinguish more clearly between "Does a type have a
* @NullnessUnspecified annotation written on it in source code?" and "Is the additional
* nullness of a type codeNotNullnessAware?" (The latter can happen not only from a
* @NullnessUnspecified annotation but also from the default in effect.)
*/
nonNull = AnnotationBuilder.fromClass(elements, NonNull.class);
unionNull = AnnotationBuilder.fromClass(elements, Nullable.class);
codeNotNullnessAware = AnnotationBuilder.fromClass(elements, NullnessUnspecified.class);
if (checker.hasOption("aliasCFannos")) {
addAliasedAnnotation(org.checkerframework.checker.nullness.qual.Nullable.class, unionNull);
}
leastConvenientWorld = checker.hasOption("strict");
postInit();
}
@Override
protected Set<Class<? extends Annotation>> createSupportedTypeQualifiers() {
return new LinkedHashSet<>(asList(Nullable.class, NullnessUnspecified.class, NonNull.class));
}
@Override
protected QualifierHierarchy createQualifierHierarchy() {
return new NullSpecQualifierHierarchy(getSupportedTypeQualifiers(), elements);
}
private final class NullSpecQualifierHierarchy extends NoElementQualifierHierarchy {
NullSpecQualifierHierarchy(
Collection<Class<? extends Annotation>> qualifierClasses, Elements elements) {
super(qualifierClasses, elements);
}
@Override
public boolean isSubtype(AnnotationMirror subAnno, AnnotationMirror superAnno) {
/*
* Since we perform all necessary checking in the isSubtype method in NullSpecTypeHierarchy, I
* tried replacing this body with `return true` to avoid duplicating logic. However, that's a
* problem because the result of this method is sometimes cached and used instead of a full
* call to the isSubtype method in NullSpecTypeHierarchy.
*
* Specifically: DefaultTypeHierarchy.visitDeclared_Declared calls isPrimarySubtype, which
* calls isAnnoSubtype, which directly calls NullSpecQualifierHierarchy.isSubtype (as opposed
* to NullSpecTypeHierarchy.isSubtype). That's still fine, since we'll reject the types in
* NullSpecTypeHierarchy.isSubtype. The problem, though, is that it also inserts a cache entry
* for the supposed subtyping relationship, and that entry can cause future checks to
* short-circuit. (I think I saw this in isContainedBy.)
*/
boolean subIsUnspecified = areSame(subAnno, codeNotNullnessAware);
boolean superIsUnspecified = areSame(superAnno, codeNotNullnessAware);
boolean eitherIsUnspecified = subIsUnspecified || superIsUnspecified;
boolean bothAreUnspecified = subIsUnspecified && superIsUnspecified;
if (leastConvenientWorld && bothAreUnspecified) {
return false;
}
if (!leastConvenientWorld && eitherIsUnspecified) {
return true;
}
return areSame(subAnno, nonNull) || areSame(superAnno, unionNull);
}
@Override
protected QualifierKindHierarchy createQualifierKindHierarchy(
Collection<Class<? extends Annotation>> qualifierClasses) {
return new DefaultQualifierKindHierarchy(qualifierClasses, /*bottom=*/ NonNull.class) {
@Override
protected Map<DefaultQualifierKind, Set<DefaultQualifierKind>> createDirectSuperMap() {
DefaultQualifierKind nonNullKind =
nameToQualifierKind.get(NonNull.class.getCanonicalName());
DefaultQualifierKind unionNullKind =
nameToQualifierKind.get(Nullable.class.getCanonicalName());
DefaultQualifierKind codeNotNullnessAwareKind =
nameToQualifierKind.get(NullnessUnspecified.class.getCanonicalName());
Map<DefaultQualifierKind, Set<DefaultQualifierKind>> supers = new HashMap<>();
supers.put(nonNullKind, singleton(codeNotNullnessAwareKind));
supers.put(codeNotNullnessAwareKind, singleton(unionNullKind));
supers.put(unionNullKind, emptySet());
return supers;
/*
* The rules above are incomplete:
*
* - In "lenient mode," we treat unionNull as a subtype of codeNotNullnesesAware.
*
* - In "strict mode," we do *not* treat codeNotNullnesesAware as a subtype of itself.
*
* These subtleties are handled by isSubtype above. The incomplete rules still provide us
* with useful implementations of leastUpperBound and greatestLowerBound.
*/
}
};
}
}
@Override
protected TypeHierarchy createTypeHierarchy() {
return new NullSpecTypeHierarchy(
checker,
getQualifierHierarchy(),
checker.getBooleanOption("ignoreRawTypeArguments", true),
checker.hasOption("invariantArrays"));
}
private final class NullSpecTypeHierarchy extends DefaultTypeHierarchy {
NullSpecTypeHierarchy(
BaseTypeChecker checker,
QualifierHierarchy qualifierHierarchy,
boolean ignoreRawTypeArguments,
boolean invariantArrays) {
super(checker, qualifierHierarchy, ignoreRawTypeArguments, invariantArrays);
}
@Override
protected StructuralEqualityComparer createEqualityComparer() {
return new NullSpecEqualityComparer(typeargVisitHistory);
}
@Override
protected boolean visitTypevarSubtype(
AnnotatedTypeVariable subtype, AnnotatedTypeMirror supertype) {
/*
* The superclass "projects" type-variable usages rather than unioning them. Consequently, if
* we delegate directly to the supermethod, it can fail when it shouldn't. Fortunately, we
* already handle the top-level nullness subtyping in isNullnessSubtype. So all we need to do
* here is to handle any type arguments. To do that, we still delegate to the supertype. But
* first we mark the supertype as unionNull so that the supertype's top-level check will
* always succeed.
*
* TODO(cpovirk): There are probably many more cases that we could short-circuit. We might
* consider doing that in isSubtype rather than with overrides.
*/
return super.visitTypevarSubtype(subtype, withUnionNull(supertype));
}
@Override
protected boolean visitWildcardSubtype(
AnnotatedWildcardType subtype, AnnotatedTypeMirror supertype) {
// See discussion in visitTypevarSubtype above.
return super.visitWildcardSubtype(subtype, withUnionNull(supertype));
}
@Override
protected boolean visitTypevarSupertype(
AnnotatedTypeMirror subtype, AnnotatedTypeVariable supertype) {
/*
* TODO(cpovirk): Why are the supertype cases so different from the subtype cases above? In
* particular: Why is it important to replace an argument only conditionally? And why is it
* important to replace the subtype instead of the supertype?
*/
return super.visitTypevarSupertype(
isNullInclusiveUnderEveryParameterization(supertype) ? withNonNull(subtype) : subtype,
supertype);
}
@Override
protected boolean visitWildcardSupertype(
AnnotatedTypeMirror subtype, AnnotatedWildcardType supertype) {
// See discussion in visitTypevarSupertype above.
return super.visitWildcardSupertype(
isNullInclusiveUnderEveryParameterization(supertype) ? withNonNull(subtype) : subtype,
supertype);
}
@Override
public Boolean visitTypevar_Typevar(
AnnotatedTypeVariable subtype, AnnotatedTypeVariable supertype, Void p) {
/*
* Everything we need to check will be handled by isNullnessSubtype. That's fortunate, as the
* supermethod does not account for our non-standard substitution rules for type variables.
* Under those rules, `@NullnessUnspecified T` can still produce a @Nullable value after
* substitution.
*/
return true;
}
@Override
protected boolean isSubtype(
AnnotatedTypeMirror subtype, AnnotatedTypeMirror supertype, AnnotationMirror top) {
return super.isSubtype(subtype, supertype, top) && isNullnessSubtype(subtype, supertype);
}
private boolean isNullnessSubtype(AnnotatedTypeMirror subtype, AnnotatedTypeMirror supertype) {
if (isPrimitive(subtype.getUnderlyingType())) {
return true;
}
if (supertype.getKind() == WILDCARD) {
/*
* super.isSubtype already called back into this.isSameType (and thus into
* isNullnessSubtype) for the bound. That's fortunate, as we don't define subtyping rules
* for wildcards (since the JLS says that they should be capture converted by this point, or
* we should be checking their *bounds* for a containment check).
*/
return true;
}
return isNullInclusiveUnderEveryParameterization(supertype)
|| isNullExclusiveUnderEveryParameterization(subtype)
|| nullnessEstablishingPathExists(subtype, supertype);
}
}
private boolean isNullInclusiveUnderEveryParameterization(AnnotatedTypeMirror type) {
/*
* Our draft subtyping rules specify a special case for intersection types. However, those rules
* make sense only because the rules also specify that an intersection type never has an
* additional-nullness value of its own. This is in contrast to CF, which does let an
* intersection type have an AnnotationMirror of its own.
*
* ...well, sort of. As I understand it, what CF does is more that it tries to keep the
* AnnotationMirror of the intersecton type in sync with the AnnotationMirror of each of its
* components (which should themselves all match). So the intersection type "has" an
* AnnotationMirror, but it provides no *additional* information beyond what is already carried
* by its components' AnnotationMirrors.
*
* Nevertheless, the result is that we don't need a special case here: The check below is
* redundant with the subsequent check on the intersection's components, but redundancy is
* harmless.
*/
return type.hasAnnotation(unionNull)
|| (!leastConvenientWorld && type.hasAnnotation(codeNotNullnessAware));
}
boolean isNullExclusiveUnderEveryParameterization(AnnotatedTypeMirror subtype) {
/*
* In most cases, it would be sufficient to check only nullnessEstablishingPathExists. However,
* consider a type that meets all 3 of the following criteria:
*
* 1. a local variable
*
* 2. whose type is a type variable
*
* 3. whose corresponding type parameter permits nullable type arguments
*
* For such a type, nullnessEstablishingPathExists would always return false. And that makes
* sense... until an implementation checks it with `if (foo != null)`. At that point, we need to
* store an additional piece of information: Yes, _the type written in code_ can permit null,
* but we know from dataflow that _this particular value_ is _not_ null. That additional
* information is stored by attaching nonNull to the type-variable usage. This produces a type
* distinct from all of:
*
* - `T`: `T` with additional nullness NO_CHANGE
*
* - `@NullnessUnspecified T`: `T` with additional nullness CODE_NOT_NULLNESS_AWARE
*
* - `@Nullable T`: `T` with additional nullness UNION_NULL
*
* It is unfortunate that this forces us to represent type-variable usages differently from how
* we represent all other types. For all other types, the way to represent a type with
* additional nullness NO_CHANGE is to attach nonNull. But again, for type-variable usages, the
* way to do it is to attach *no* annotation.
*
* TODO(cpovirk): Would CF let us get away with attaching no annotation to other types? At least
* by default, it requires annotations on all types other than type-variable usages. But it
* might be nice to get rid of that requirement for consistency -- and perhaps to help us avoid
* writing a package-private @NonNull annotation to class files, as discussed elsewhere.
* However, we would still need nonNull for the case of null checks in dataflow. And we might
* end up violating other CF assumptions and thus causing ourselves more trouble than we solve.
* *Plus*, we'd want to make sure that our type-variable usages don't end up getting the
* "default default" of codeNotNullnessAware, since the "default default" is applied after the
* element-scoped defaulting logic. We would probably need to still attach nonNull to them and
* then modify removeNonNullFromTypeVariableUsages to remove it from them, just as it removes
* nonNull from type-variable usages.
*/
return subtype.hasAnnotation(nonNull)
|| nullnessEstablishingPathExists(
subtype, t -> t.getKind() == DECLARED || t.getKind() == ARRAY);
}
private boolean nullnessEstablishingPathExists(
AnnotatedTypeMirror subtype, AnnotatedTypeMirror supertype) {
/*
* TODO(cpovirk): As an optimization, `return false` if `supertype` is not a type variable: If
* it's not a type variable, then the only ways for isNullnessSubtype to succeed were already
* checked by isNullInclusiveUnderEveryParameterization and
* isNullExclusiveUnderEveryParameterization.
*/
return nullnessEstablishingPathExists(
subtype, t -> checker.getTypeUtils().isSameType(t, supertype.getUnderlyingType()));
}
private boolean nullnessEstablishingPathExists(
AnnotatedTypeMirror subtype, Predicate<TypeMirror> supertypeMatcher) {
if (isUnionNullOrEquivalent(subtype)) {
return false;
}
if (supertypeMatcher.test(subtype.getUnderlyingType())) {
return true;
}
for (AnnotatedTypeMirror supertype : getUpperBounds(subtype)) {
if (nullnessEstablishingPathExists(supertype, supertypeMatcher)) {
return true;
}
}
/*
* We don't need to handle the "lower-bound rule" here: The Checker Framework doesn't perform
* wildcard capture conversion. (Hmm, but it might see post-capture-conversion types in some
* cases....) It compares "? super Foo" against "Bar" by more directly comparing Foo and Bar.
*/
return false;
}
private List<? extends AnnotatedTypeMirror> getUpperBounds(AnnotatedTypeMirror type) {
switch (type.getKind()) {
case INTERSECTION:
case TYPEVAR:
return withNonNull(type).directSuperTypes();
case WILDCARD:
List<AnnotatedTypeMirror> bounds = new ArrayList<>();
bounds.addAll(withNonNull(type).directSuperTypes());
/*
* We would use `((AnnotatedWildcardType) type).getTypeVariable()`, but it is not available
* in all cases that we need.
*/
WildcardType wildcard = (WildcardType) type.getUnderlyingType(); // javac internal type
TypeParameterElement typeParameter = wildcardToTypeParam(wildcard);
if (typeParameter != null) {
bounds.add(getAnnotatedType(typeParameter));
}
return unmodifiableList(bounds);
default:
return emptyList();
}
}
private boolean isUnionNullOrEquivalent(AnnotatedTypeMirror type) {
return type.hasAnnotation(unionNull)
|| (leastConvenientWorld && type.hasAnnotation(codeNotNullnessAware));
}
private final class NullSpecEqualityComparer extends StructuralEqualityComparer {
NullSpecEqualityComparer(StructuralEqualityVisitHistory typeargVisitHistory) {
super(typeargVisitHistory);
}
@Override
protected boolean checkOrAreEqual(AnnotatedTypeMirror type1, AnnotatedTypeMirror type2) {
Boolean pastResult = visitHistory.result(type1, type2, /*hierarchy=*/ unionNull);
if (pastResult != null) {
return pastResult;
}
boolean result = areEqual(type1, type2);
this.visitHistory.add(type1, type2, /*hierarchy=*/ unionNull, result);
return result;
}
@Override
public boolean areEqualInHierarchy(
AnnotatedTypeMirror type1, AnnotatedTypeMirror type2, AnnotationMirror top) {
return areEqual(type1, type2);
}
private boolean areEqual(AnnotatedTypeMirror type1, AnnotatedTypeMirror type2) {
/*
* I'd like to use the spec definition here: "type1 is a subtype of type2 and vice versa."
* However, that produces infinite recursion in some cases.
*/
boolean type1IsUnspecified = type1.hasAnnotation(codeNotNullnessAware);
boolean type2IsUnspecified = type2.hasAnnotation(codeNotNullnessAware);
boolean bothAreUnspecified = type1IsUnspecified && type2IsUnspecified;
boolean eitherIsUnspecified = type1IsUnspecified || type2IsUnspecified;
if (leastConvenientWorld && bothAreUnspecified) {
return false;
}
if (!leastConvenientWorld && eitherIsUnspecified) {
return true;
}
AnnotationMirror a1 = type1.getAnnotationInHierarchy(unionNull);
AnnotationMirror a2 = type2.getAnnotationInHierarchy(unionNull);
return a1 == a2 || (a1 != null && a2 != null && areSame(a1, a2));
/*
* TODO(cpovirk): Do we care about the base type, or is looking at annotations enough?
* super.visitDeclared_Declared has a TODO with a similar question. Err, presumably normal
* Java type-checking has done that job. A more interesting question may be why we don't look
* at type args. The answer might be simply: "That's the contract, even though it is
* surprising, given the names of the class and its methods." (Granted, the docs of
* super.visitDeclared_Declared also say that it checks that "The types are of the same
* class/interfaces," so the contract isn't completely clear.)
*/
}
}
@Override
protected TypeVariableSubstitutor createTypeVariableSubstitutor() {
return new NullSpecTypeVariableSubstitutor();
}
private final class NullSpecTypeVariableSubstitutor extends TypeVariableSubstitutor {
@Override
protected AnnotatedTypeMirror substituteTypeVariable(
AnnotatedTypeMirror argument, AnnotatedTypeVariable use) {
// TODO(cpovirk): Delegate to leastUpperBound?
AnnotatedTypeMirror substitute = argument.deepCopy(/*copyAnnotations=*/ true);
if (argument.hasAnnotation(unionNull) || use.hasAnnotation(unionNull)) {
substitute.replaceAnnotation(unionNull);
} else if (argument.hasAnnotation(codeNotNullnessAware)
|| use.hasAnnotation(codeNotNullnessAware)) {
substitute.replaceAnnotation(codeNotNullnessAware);
}
return substitute;
}
}
@Override
public AnnotatedDeclaredType getSelfType(Tree tree) {
AnnotatedDeclaredType superResult = super.getSelfType(tree);
return superResult == null ? null : withNonNull(superResult);
}
@Override
protected QualifierDefaults createQualifierDefaults() {
return new NullSpecQualifierDefaults(elements, this);
}
@Override
protected void addCheckedStandardDefaults(QualifierDefaults defs) {
/*
* This method sets up the defaults for *non-null-aware* code.
*
* All these defaults will be overridden (whether we like it or not) for null-aware code. That
* happens when NullSpecQualifierDefaults.annotate(...) sets a new default for OTHERWISE.
*
* Note that these two methods do not contain the totality of our defaulting logic. For example,
* our TypeAnnotator has special logic for upper bounds _in the case of `super` wildcards
* specifically_.
*/
// Here's the big default, the "default default":
defs.addCheckedCodeDefault(codeNotNullnessAware, OTHERWISE);
// Some locations are intrinsically non-nullable:
defs.addCheckedCodeDefault(nonNull, CONSTRUCTOR_RESULT);
defs.addCheckedCodeDefault(nonNull, RECEIVER);
// We do want *some* of the CLIMB standard defaults:
for (TypeUseLocation location : LOCATIONS_REFINED_BY_DATAFLOW) {
defs.addCheckedCodeDefault(unionNull, location);
}
defs.addCheckedCodeDefault(nonNull, IMPLICIT_LOWER_BOUND);
// But for exception parameters, we want the default to be nonNull:
defs.addCheckedCodeDefault(nonNull, EXCEPTION_PARAMETER);
/*
* Note one other difference from the CLIMB defaults: We want the default for implicit upper
* bounds to match the "default default" of codeNotNullnessAware, not to be top/unionNull. We
* accomplish this simply by not calling the supermethod (which would otherwise call
* addClimbStandardDefaults, which would override the "default default").
*/
}
private final class NullSpecQualifierDefaults extends QualifierDefaults {
NullSpecQualifierDefaults(Elements elements, AnnotatedTypeFactory atypeFactory) {
super(elements, atypeFactory);
}
@Override
public void annotate(Element elt, AnnotatedTypeMirror type) {
if (elt == null) {
super.annotate(elt, type);
return;
}
/*
* CF has some built-in support for package-level defaults. However, they cascade to
* subpackages (see 28.5.2), contrary to our semantics (see
* https://github.com/jspecify/jspecify/issues/8). To avoid CF semantics, we never set a
* default on a package element itself, only on each top-level class element in the package.
*
* XXX: When adding support for DefaultNullnessUnspecified, be sure that DefaultNullnessUnspecified on a *class* overrides
* DefaultNonNull on the *package* (and vice versa).
*
* XXX: When adding support for aliases, make sure to support them here.
*/
if (hasNullAwareAnnotation(elt)) {
/*
* Setting a default here affects not only this element but also its descendants in the
* syntax tree.
*/
addElementDefault(elt, unionNull, UNBOUNDED_WILDCARD_UPPER_BOUND);
addElementDefault(elt, nonNull, OTHERWISE);
/*
* Some defaults are common to null-aware and non-null-aware code. We reassert some of those
* here. If we didn't, then they would be overridden by OTHERWISE above.
*
* (Yes, our non-null-aware setup sets defaults for more locations than just these. But for
* the other locations, it sets the default to nonNull. And there's no need for the
* *null-aware* setup to default any specific location to nonNull: That is its default
* everywhere that is not specifically overridden, thanks to the same OTHERWISE rule
* discussed above.)
*/
for (TypeUseLocation location : LOCATIONS_REFINED_BY_DATAFLOW) {
addElementDefault(elt, unionNull, location);
}
}
super.annotate(elt, type);
removeNonNullFromTypeVariableUsages(type);
}
@Override
public void annotate(Tree tree, AnnotatedTypeMirror type) {
super.annotate(tree, type);
removeNonNullFromTypeVariableUsages(type);
}
private void removeNonNullFromTypeVariableUsages(AnnotatedTypeMirror type) {
new AnnotatedTypeScanner<Void, Void>() {
@Override
public Void visitTypeVariable(AnnotatedTypeVariable type, Void aVoid) {
// For an explanation, see shouldBeAnnotated below.
type.removeAnnotation(nonNull);
/*
* It probably doesn't matter whether we invoke the supermethod or not. But let's do it,
* simply because that's what tree visitors typically do.
*/
return super.visitTypeVariable(type, aVoid);
}
}.visit(type);
}
@Override
protected DefaultApplierElement createDefaultApplierElement(
AnnotatedTypeFactory atypeFactory,
Element annotationScope,
AnnotatedTypeMirror type,
boolean applyToTypeVar) {
return new DefaultApplierElement(atypeFactory, annotationScope, type, applyToTypeVar) {
@Override
protected boolean shouldBeAnnotated(AnnotatedTypeMirror type, boolean applyToTypeVar) {
/*
* CF usually doesn't apply defaults to type-variable usages. But in non-null-aware code,
* we want our default of codeNotNullnessAware to apply even to type variables.
*
* But there are 2 other things to keep in mind:
*
* - CF *does* apply defaults to type-variable usages *if* they are local variables.
* That's because it will refine their types with dataflow. This CF behavior works fine
* for us: Since we want to apply defaults in strictly more cases, we're happy to accept
* what CF already does for local variables. (We do need to be sure to apply unionNull
* (our top type) in that case, rather than codeNotNullnessAware. We accomplish that by
* setting specific defaults for LOCATIONS_REFINED_BY_DATAFLOW.)
*
* - Non-null-aware code (discussed above) is easy: We apply codeNotNullnessAware to
* everything except local variables. But null-aware code more complex. First, set aside
* local variables, which we handle as discussed above. After that, we need to apply
* nonNull to most types, but we need to *not* apply it to (non-local-variable)
* type-variable usages. (For more on this, see
* isNullExclusiveUnderEveryParameterization.) This need is weird enough that CF doesn't
* appear to support it directly. Our solution is to apply nonNull to everything but then
* remove it from any type variables it appears on. We do that in
* removeNonNullFromTypeVariableUsages above.
*/
return super.shouldBeAnnotated(type, /*applyToTypeVar=*/ true);
}
};
}
@Override
public boolean applyConservativeDefaults(Element annotationScope) {
/*
* Ignore any command-line flag to request conservative defaults. The principle of
* "unspecified nullness" is that we configure conservatism/leniency through changes in our
* subtyping rules, rather than changes in how we choose the default annotation / additional
* nullness of any type.
*/
return false;
}
// TODO(cpovirk): Should I override applyConservativeDefaults to always return false?
}
private static final Set<TypeUseLocation> LOCATIONS_REFINED_BY_DATAFLOW =
unmodifiableSet(new HashSet<>(asList(LOCAL_VARIABLE, RESOURCE_VARIABLE)));
@Override
protected void addComputedTypeAnnotations(Tree tree, AnnotatedTypeMirror type, boolean iUseFlow) {
// TODO(cpovirk): This helps, but why?
super.addComputedTypeAnnotations(tree, type, iUseFlow && type.getKind() != WILDCARD);
}
@Override
protected TypeAnnotator createTypeAnnotator() {
/*
* Override to:
*
* - write some defaults that are difficult to express with the addCheckedCodeDefault and
* addElementDefault APIs. But beware: Using TypeAnnotator for this purpose is safe only for
* defaults that are common to null-aware and non-null-aware code!
*
* - *not* do what the supermethod does. Specifically, the supermethod adds the top type
* (unionNull) to the bound of unbounded wildcards. But we want the ability to sometimes add
* codeNotNullnessAware instead.
*/
return new NullSpecTypeAnnotator(this);
}
private final class NullSpecTypeAnnotator extends TypeAnnotator {
NullSpecTypeAnnotator(AnnotatedTypeFactory typeFactory) {
super(typeFactory);
}
@Override
public Void visitDeclared(AnnotatedDeclaredType type, Void p) {
AnnotatedDeclaredType enclosingType = type.getEnclosingType();
if (enclosingType != null) {
/*
* TODO(cpovirk): If NullSpecVisitor starts looking at source trees instead of the derived
* AnnotatedTypeMirror objects, then change this code to fill in this value unconditionally
* (matching visitPrimitive below).
*/
addIfNoAnnotationPresent(enclosingType, nonNull);
}
return super.visitDeclared(type, p);
}
@Override
public Void visitPrimitive(AnnotatedPrimitiveType type, Void p) {
type.replaceAnnotation(nonNull);
return super.visitPrimitive(type, p);
}
@Override
public Void visitWildcard(AnnotatedWildcardType type, Void p) {
if (type.getUnderlyingType().getSuperBound() != null) {
addIfNoAnnotationPresent(type.getExtendsBound(), unionNull);
}
return super.visitWildcard(type, p);
}
}
@Override
protected TreeAnnotator createTreeAnnotator() {
return new ListTreeAnnotator(
asList(new NullSpecTreeAnnotator(this), super.createTreeAnnotator()));
}
private final class NullSpecTreeAnnotator extends TreeAnnotator {
NullSpecTreeAnnotator(AnnotatedTypeFactory typeFactory) {
super(typeFactory);
}
@Override
public Void visitLiteral(LiteralTree node, AnnotatedTypeMirror type) {
if (node.getKind().asInterface() == LiteralTree.class) {
type.addAnnotation(node.getKind() == NULL_LITERAL ? unionNull : nonNull);
}
return super.visitLiteral(node, type);
}
@Override
public Void visitIdentifier(IdentifierTree node, AnnotatedTypeMirror type) {
annotateIfEnumConstant(node, type);
return super.visitIdentifier(node, type);
}
@Override
public Void visitMemberSelect(MemberSelectTree node, AnnotatedTypeMirror type) {
annotateIfEnumConstant(node, type);
return super.visitMemberSelect(node, type);
}
private void annotateIfEnumConstant(ExpressionTree node, AnnotatedTypeMirror type) {
Element element = elementFromUse(node);
if (element != null && element.getKind() == ENUM_CONSTANT) {
/*
* Even if it was annotated before, override it. There are 2 cases:
*
* 1. The declaration had an annotation on it in source. That will still get reported as an
* error when we visit the declaration (assuming we're compiling the code with the
* declaration): Anything we do here affects the *usage* but not the declaration. And we
* know that the usage isn't really @Nullable/@NullnessUnspecified, even if the author of
* the declaration said so.
*
* 2. The declaration had no annotation on it in source, but it was in non-null-aware code.
* And consequently, defaults.visit(...), which ran before us, applied a default of
* codeNotNullnessAware. Again, that default isn't correct, so we override it here.
*/
type.replaceAnnotation(nonNull);
}
}
}
@Override
protected AnnotationFormatter createAnnotationFormatter() {
return new DefaultAnnotationFormatter() {
@Override
public String formatAnnotationString(
Collection<? extends AnnotationMirror> annos, boolean printInvisible) {
return super.formatAnnotationString(annos, /*printInvisible=*/ false);
}
};
}
@Override
protected AnnotatedTypeFormatter createAnnotatedTypeFormatter() {
return new DefaultAnnotatedTypeFormatter(
/*
* We would pass the result of getAnnotationFormatter(), but the superclass calls
* createAnnotatedTypeFormatter() before it initializes that field.
*
* Fortunately, it's harmless to use one AnnotationFormatter here and another equivalent
* one in createAnnotationFormatter().
*/
createAnnotationFormatter(),
// TODO(cpovirk): Permit configuration of these booleans?
/*printVerboseGenerics=*/ false,
/*defaultPrintInvisibleAnnos=*/ false);
}
@Override
public void postProcessClassTree(ClassTree tree) {
/*
* To avoid writing computed annotations into bytecode, do not call the supermethod.
*
* We don't want to write computed annotations because we don't want for checkers (including
* this one!) to depend on those annotations. All core JSpecify nullness information should be
* derivable from the originally written annotations.
*
* (We especially don't want to write @NonNull to bytecode, since it is an implementation detail
* of this current checker implementation.)
*
* "Computed annotations" includes not only annotations added from defaults but also inherited
* declaration annotations. JSpecify requires that annotations are not inherited
* (https://github.com/jspecify/jspecify/issues/14), but this is academic until we support
* aliasing/implies *and* we extend that to declaration annotations (if we even do so:
* https://github.com/jspecify/jspecify/issues/124).
*
* Additionally, when I was letting CF write computed annotations into bytecode, I ran into an
* type.invalid.conflicting.annos error, which I have described more in
* https://github.com/jspecify/nullness-checker-for-checker-framework/commit/d16a0231487e239bc94145177de464b5f77c8b19
*
* Finally, I wonder if writing annotations back to Element objects (which is technically what
* the supermethod does) could cause problems with @DefaultNonNull: We look for @DefaultNonNull through
* the Element API. So would CF write an "inherited" @DefaultNonNull annotation back to the Element
* that we examine (and do so in time for us to see it)? This might not come up when the
* superclass and subclass are in the same compilation unit (or *maybe* even part of the same
* "compilation job?"), but I could imagine seeing it when the @DefaultNonNull annotation is in a
* library. Then again, surely CF already treats only *some* declaration annotations as
* inherited, since surely it doesn't treat, e.g., @AnnotatedFor that way, right?
*
* TODO(cpovirk): Report that error upstream if it turns out not to be our fault.
*
* TODO(cpovirk): Add a sample input that detects this problem, possibly as part of #141.
*/
}
private void addIfNoAnnotationPresent(AnnotatedTypeMirror type, AnnotationMirror annotation) {
if (!type.isAnnotatedInHierarchy(unionNull)) {
type.addAnnotation(annotation);
}
}
@SuppressWarnings("unchecked") // safety guaranteed by API docs
private <T extends AnnotatedTypeMirror> T withNonNull(T type) {
// Remove the annotation from the *root* type, but preserve other annotations.
type = (T) type.deepCopy(/*copyAnnotations=*/ true);
/*
* TODO(cpovirk): In the case of a type-variable usage, I feel like we should need to *remove*
* any existing annotation but then not *add* nonNull. (This is because of the difference
* between type-variable usages and all other types, as discussed near the end of the giant
* comment in isNullExclusiveUnderEveryParameterization.) However, the current code passes all
* tests. Figure out whether that makes sense or we need more tests to show why not.
*/
type.replaceAnnotation(nonNull);
return type;
}
@SuppressWarnings("unchecked") // safety guaranteed by API docs
private <T extends AnnotatedTypeMirror> T withUnionNull(T type) {
// Remove the annotation from the *root* type, but preserve other annotations.
type = (T) type.deepCopy(/*copyAnnotations=*/ true);
type.replaceAnnotation(unionNull);
return type;
}
private static boolean hasNullAwareAnnotation(Element elt) {
if (elt.getAnnotation(DefaultNonNull.class) != null) {
return true;
}
Element enclosingElement = elt.getEnclosingElement();
return enclosingElement != null // possible only under `-source 8 -target 8` (i.e., pre-JPMS)?
&& enclosingElement.getKind() == PACKAGE
&& enclosingElement.getAnnotation(DefaultNonNull.class) != null;
}
}
| Document my improved understanding of annotation inheritance.
| src/main/java/com/google/jspecify/nullness/NullSpecAnnotatedTypeFactory.java | Document my improved understanding of annotation inheritance. |
|
Java | apache-2.0 | de23d3316629fb06cf8d2c6ea2ff44685a9783b7 | 0 | unkascrack/lucene-databasedirectory | package com.github.lucene.store.database.lock;
import java.io.IOException;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockFactory;
import org.apache.lucene.store.LockObtainFailedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.lucene.store.database.DatabaseDirectory;
import com.github.lucene.store.database.DatabaseDirectoryException;
import com.github.lucene.store.database.handler.DatabaseDirectoryHandler;
public class DatabasePhantomReadLockFactory extends LockFactory {
private static final Logger LOGGER = LoggerFactory.getLogger(DatabasePhantomReadLockFactory.class);
private static final DatabaseDirectoryHandler handler = DatabaseDirectoryHandler.INSTANCE;
public static final LockFactory INSTANCE = new DatabasePhantomReadLockFactory();
private DatabasePhantomReadLockFactory() {
}
@Override
public Lock obtainLock(final Directory dir, final String lockName) throws IOException {
LOGGER.info("{}.obtainLock({}, {})", this, dir, lockName);
final DatabaseDirectory directory = (DatabaseDirectory) dir;
try {
if (handler.existsFile(directory, lockName)) {
throw new LockObtainFailedException("Lock instance already obtained: " + directory);
}
handler.saveFile(directory, lockName, null, 0);
return new DatabasePhantomReadLock(directory, lockName);
} catch (final DatabaseDirectoryException e) {
throw new LockObtainFailedException("Lock instance already obtained: " + directory);
}
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
public static final class DatabasePhantomReadLock extends Lock {
private final DatabaseDirectory directory;
private final String name;
private volatile boolean closed;
public DatabasePhantomReadLock(final DatabaseDirectory directory, final String name) {
this.directory = directory;
this.name = name;
}
@Override
public void ensureValid() throws IOException {
LOGGER.debug("{}.ensureValid()", this);
if (closed) {
throw new AlreadyClosedException("Lock instance already released: " + this);
}
if (!handler.existsFile(directory, name)) {
throw new AlreadyClosedException("Lock instance already released: " + this);
}
}
@Override
public void close() throws IOException {
LOGGER.debug("{}.close()", this);
if (!closed) {
handler.deleteFile(directory, name);
closed = true;
}
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
}
| src/main/java/com/github/lucene/store/database/lock/DatabasePhantomReadLockFactory.java | package com.github.lucene.store.database.lock;
import java.io.IOException;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockFactory;
import org.apache.lucene.store.LockObtainFailedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.lucene.store.database.DatabaseDirectory;
import com.github.lucene.store.database.DatabaseDirectoryException;
import com.github.lucene.store.database.handler.DatabaseDirectoryHandler;
public class DatabasePhantomReadLockFactory extends LockFactory {
private static final Logger LOGGER = LoggerFactory.getLogger(DatabasePhantomReadLockFactory.class);
private static final DatabaseDirectoryHandler handler = DatabaseDirectoryHandler.INSTANCE;
public static final LockFactory INSTANCE = new DatabasePhantomReadLockFactory();
private DatabasePhantomReadLockFactory() {
}
@Override
public Lock obtainLock(final Directory dir, final String lockName) throws IOException {
LOGGER.info("{}.obtainLock({}, {})", this, dir, lockName);
final DatabaseDirectory directory = (DatabaseDirectory) dir;
try {
handler.saveFile(directory, lockName, null, 0);
return new DatabasePhantomReadLock(directory, lockName);
} catch (final DatabaseDirectoryException e) {
throw new LockObtainFailedException("Lock instance already obtained: " + directory);
}
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
public static final class DatabasePhantomReadLock extends Lock {
private final DatabaseDirectory directory;
private final String name;
private volatile boolean closed;
public DatabasePhantomReadLock(final DatabaseDirectory directory, final String name) {
this.directory = directory;
this.name = name;
}
@Override
public void ensureValid() throws IOException {
LOGGER.debug("{}.ensureValid()", this);
if (closed) {
throw new AlreadyClosedException("Lock instance already released: " + this);
}
if (!handler.existsFile(directory, name)) {
throw new AlreadyClosedException("Lock instance already released: " + this);
}
}
@Override
public void close() throws IOException {
LOGGER.debug("{}.close()", this);
if (!closed) {
handler.deleteFile(directory, name);
closed = true;
}
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
}
| review lockfactory implementation | src/main/java/com/github/lucene/store/database/lock/DatabasePhantomReadLockFactory.java | review lockfactory implementation |
|
Java | apache-2.0 | 7b6b7d1308402e595d5e2f7eb2a5b49c600bed1e | 0 | GoogleCloudPlatform/data-science-on-gcp,GoogleCloudPlatform/data-science-on-gcp | 08_dataflow/chapter8/src/main/java/com/google/cloud/training/flights/CreateDatasets.java | /*
* Copyright (C) 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.training.flights;
import java.util.Map;
import org.apache.beam.runners.dataflow.DataflowRunner;
import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.PipelineResult;
import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Mean;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.View;
import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
import org.apache.beam.sdk.transforms.windowing.Window;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.joda.time.Duration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.services.bigquery.model.TableRow;
import com.google.cloud.training.flights.Flight.INPUTCOLS;
/**
* Runs on the cloud on 3 days of data, and takes about 10 minutes to process.
*
* @author vlakshmanan
*
*/
public class CreateDatasets {
private static final Logger LOG = LoggerFactory.getLogger(CreateDatasets.class);
public static interface MyOptions extends DataflowPipelineOptions {
@Description("Should we process the full dataset or just a small sample?")
@Default.Boolean(false)
boolean getFullDataset();
void setFullDataset(boolean b);
@Description("Path of the output directory")
@Default.String("gs://cloud-training-demos-ml/flights/chapter8/output/")
String getOutput();
void setOutput(String s);
@Description("Path of trainday.csv")
@Default.String("gs://cloud-training-demos-ml/flights/trainday.csv")
String getTraindayCsvPath();
void setTraindayCsvPath(String s);
}
@SuppressWarnings("serial")
public static void main(String[] args) {
MyOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().as(MyOptions.class);
// options.setStreaming(true);
options.setRunner(DataflowRunner.class);
options.setTempLocation("gs://cloud-training-demos-ml/flights/staging");
Pipeline p = Pipeline.create(options);
// read traindays.csv into memory for use as a side-input
PCollectionView<Map<String, String>> traindays = getTrainDays(p, options.getTraindayCsvPath());
String query = "SELECT EVENT_DATA FROM flights.simevents WHERE ";
if (!options.getFullDataset()) {
query += " STRING(FL_DATE) < '2015-01-04' AND ";
}
query += " (EVENT = 'wheelsoff' OR EVENT = 'arrived') ";
LOG.info(query);
PCollection<Flight> allFlights = p //
.apply("ReadLines", BigQueryIO.Read.fromQuery(query)) //
.apply("ParseFlights", ParDo.of(new DoFn<TableRow, Flight>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
TableRow row = c.element();
String line = (String) row.getOrDefault("EVENT_DATA", "");
Flight f = Flight.fromCsv(line);
if (f != null) {
c.outputWithTimestamp(f, f.getEventTimestamp());
}
}
})) //
.apply("GoodFlights", ParDo.of(new DoFn<Flight, Flight>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
Flight f = c.element();
if (f.isNotCancelled() && f.isNotDiverted()) {
c.output(f);
}
}
}));
PCollection<KV<String, Double>> depDelays = //
filterTrainOrTest("globalTrain", allFlights, traindays, true) //
.apply("airport:hour->depdelay", ParDo.of(new DoFn<Flight, KV<String, Double>>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
Flight f = c.element();
if (f.getField(Flight.INPUTCOLS.EVENT).equals("wheelsoff")) {
String key = f.getField(Flight.INPUTCOLS.ORIGIN) + ":" + f.getDepartureHour();
double value = f.getFieldAsFloat(Flight.INPUTCOLS.DEP_DELAY)
+ f.getFieldAsFloat(Flight.INPUTCOLS.TAXI_OUT);
c.output(KV.of(key, value));
}
}
})) //
.apply("avgDepDelay", Mean.perKey());
depDelays.apply("DepDelayToCsv", ParDo.of(new DoFn<KV<String, Double>, String>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
KV<String, Double> kv = c.element();
c.output(kv.getKey() + "," + kv.getValue());
}
})) //
.apply("WriteDepDelays", TextIO.Write.to(options.getOutput() + "delays").withSuffix(".csv").withoutSharding());
PCollectionView<Map<String, Double>> avgDepDelay = depDelays.apply("depdelay->map", View.asMap());
// note that departure delay is computed only on the training data and reused in test ...
writeFlights(allFlights, traindays, depDelays, avgDepDelay, options, true);
writeFlights(allFlights, traindays, depDelays, avgDepDelay, options, false);
PipelineResult result = p.run();
if (!options.getFullDataset()) {
// for small datasets, block
result.waitUntilFinish();
}
}
@SuppressWarnings("serial")
private static void writeFlights(PCollection<Flight> allFlights, PCollectionView<Map<String, String>> traindays, PCollection<KV<String, Double>> depDelays,
PCollectionView<Map<String, Double>> avgDepDelay, MyOptions options, boolean trainOrTest) {
String name = trainOrTest? "train_" : "test_";
PCollection<Flight> trainFlights = //
filterTrainOrTest(name + "hourly", allFlights, traindays, true) //
.apply(Window.into(SlidingWindows//
.of(Duration.standardHours(1))//
.every(Duration.standardMinutes(5))));
PCollection<KV<String, Double>> arrDelays = trainFlights
.apply(name + "airport->arrdelay", ParDo.of(new DoFn<Flight, KV<String, Double>>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
Flight f = c.element();
if (f.getField(Flight.INPUTCOLS.EVENT).equals("arrived")) {
String key = f.getField(Flight.INPUTCOLS.DEST);
double value = f.getFieldAsFloat(Flight.INPUTCOLS.ARR_DELAY);
c.output(KV.of(key, value));
}
}
})) //
.apply("avgArrDelay", Mean.perKey());
PCollectionView<Map<String, Double>> avgArrDelay = arrDelays.apply(name + "arrdelay->map", View.asMap());
trainFlights = trainFlights.apply(name + "AddDelayInfo", ParDo.withSideInputs(avgDepDelay, avgArrDelay).of(new DoFn<Flight, Flight>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
Flight f = c.element().newCopy();
String depKey = f.getField(Flight.INPUTCOLS.ORIGIN) + ":" + f.getDepartureHour();
Double depDelay = c.sideInput(avgDepDelay).get(depKey);
String arrKey = f.getField(Flight.INPUTCOLS.DEST);
Double arrDelay = c.sideInput(avgArrDelay).get(arrKey);
f.avgDepartureDelay = (float) ((depDelay == null) ? 0 : depDelay);
f.avgArrivalDelay = (float) ((arrDelay == null) ? 0 : arrDelay);
c.output(f);
}
}));
trainFlights.apply(name + "ToCsv", ParDo.of(new DoFn<Flight, String>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
Flight f = c.element();
if (f.getField(INPUTCOLS.EVENT).equals("arrived")) {
c.output(f.toTrainingCsv());
}
}
})) //
.apply("WriteFlights", TextIO.Write.to(options.getOutput() + name + "flights").withSuffix(".csv"));
}
@SuppressWarnings("serial")
private static PCollection<Flight> filterTrainOrTest(String name, PCollection<Flight> allFlights, PCollectionView<Map<String, String>> traindays, boolean trainOnly){
return allFlights.apply(name, ParDo.withSideInputs(traindays).of(new DoFn<Flight, Flight>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
Flight f = c.element();
String date = f.getField(Flight.INPUTCOLS.FL_DATE);
boolean isTrainDay = c.sideInput(traindays).containsKey(date);
if (isTrainDay == trainOnly) {
c.output(f); // training days only
}
}
}));
}
@SuppressWarnings("serial")
private static PCollectionView<Map<String, String>> getTrainDays(Pipeline p, String path) {
return p.apply("Read trainday.csv", TextIO.Read.from(path)) //
.apply("Parse trainday.csv", ParDo.of(new DoFn<String, KV<String, String>>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
String line = c.element();
String[] fields = line.split(",");
if (fields.length > 1 && "True".equals(fields[1])) {
c.output(KV.of(fields[0], "")); // ignore value
}
}
})) //
.apply("toView", View.asMap());
}
}
| CreateTrainingDataset9.java is sufficient
| 08_dataflow/chapter8/src/main/java/com/google/cloud/training/flights/CreateDatasets.java | CreateTrainingDataset9.java is sufficient |
||
Java | apache-2.0 | 6eb0731755dae2cb2ed726fd429130abb345ca93 | 0 | syncron/liquibase,maberle/liquibase,Datical/liquibase,evigeant/liquibase,dprguard2000/liquibase,fossamagna/liquibase,lazaronixon/liquibase,NSIT/liquibase,hbogaards/liquibase,liquibase/liquibase,hbogaards/liquibase,Datical/liquibase,lazaronixon/liquibase,syncron/liquibase,liquibase/liquibase,C0mmi3/liquibase,ivaylo5ev/liquibase,jimmycd/liquibase,fossamagna/liquibase,Datical/liquibase,Datical/liquibase,NSIT/liquibase,evigeant/liquibase,balazs-zsoldos/liquibase,jimmycd/liquibase,hbogaards/liquibase,liquibase/liquibase,fossamagna/liquibase,dbmanul/dbmanul,mattbertolini/liquibase,ivaylo5ev/liquibase,mattbertolini/liquibase,dprguard2000/liquibase,mbreslow/liquibase,C0mmi3/liquibase,C0mmi3/liquibase,jimmycd/liquibase,dbmanul/dbmanul,mbreslow/liquibase,evigeant/liquibase,mattbertolini/liquibase,hbogaards/liquibase,NSIT/liquibase,balazs-zsoldos/liquibase,balazs-zsoldos/liquibase,evigeant/liquibase,dbmanul/dbmanul,mbreslow/liquibase,lazaronixon/liquibase,NSIT/liquibase,lazaronixon/liquibase,dprguard2000/liquibase,dprguard2000/liquibase,maberle/liquibase,balazs-zsoldos/liquibase,syncron/liquibase,mattbertolini/liquibase,mbreslow/liquibase,maberle/liquibase,C0mmi3/liquibase,dbmanul/dbmanul,syncron/liquibase,maberle/liquibase,jimmycd/liquibase | package liquibase.database.core;
import liquibase.CatalogAndSchema;
import liquibase.database.AbstractJdbcDatabase;
import liquibase.database.DatabaseConnection;
import liquibase.database.OfflineConnection;
import liquibase.database.jvm.JdbcConnection;
import liquibase.exception.DatabaseException;
import liquibase.exception.ValidationErrors;
import liquibase.executor.ExecutorService;
import liquibase.logging.LogFactory;
import liquibase.statement.*;
import liquibase.statement.core.RawCallStatement;
import liquibase.statement.core.RawSqlStatement;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.*;
import liquibase.util.JdbcUtils;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Encapsulates Oracle database support.
*/
public class OracleDatabase extends AbstractJdbcDatabase {
public static final String PRODUCT_NAME = "oracle";
private Set<String> reservedWords = new HashSet<String>();
private Set<String> userDefinedTypes = null;
private boolean canAccessDbaRecycleBin = false;
public OracleDatabase() {
super.unquotedObjectsAreUppercased=true;
super.setCurrentDateTimeFunction("SYSTIMESTAMP");
// Setting list of Oracle's native functions
dateFunctions.add(new DatabaseFunction("SYSDATE"));
dateFunctions.add(new DatabaseFunction("SYSTIMESTAMP"));
dateFunctions.add(new DatabaseFunction("CURRENT_TIMESTAMP"));
super.sequenceNextValueFunction = "%s.nextval";
super.sequenceCurrentValueFunction = "%s.currval";
}
@Override
public int getPriority() {
return PRIORITY_DEFAULT;
}
@Override
public void setConnection(DatabaseConnection conn) {
if (!(conn instanceof OfflineConnection)) {
try {
Method wrappedConn = conn.getClass().getMethod("getWrappedConnection");
wrappedConn.setAccessible(true);
Connection sqlConn = (Connection) wrappedConn.invoke(conn);
reservedWords.addAll(Arrays.asList(sqlConn.getMetaData().getSQLKeywords().toUpperCase().split(",\\s*")));
reservedWords.addAll(Arrays.asList("GROUP", "USER", "SESSION", "PASSWORD", "RESOURCE", "START", "SIZE", "UID", "DESC")); //more reserved words not returned by driver
Method method = sqlConn.getClass().getMethod("setRemarksReporting", Boolean.TYPE);
method.setAccessible(true);
method.invoke(sqlConn, true);
} catch (Exception e) {
LogFactory.getLogger().info("Error configuring OracleDatabase connection: " + e.getMessage(), e);
; //cannot set it. That is OK
}
}
super.setConnection(conn);
}
@Override
public String getShortName() {
return "oracle";
}
@Override
protected String getDefaultDatabaseProductName() {
return "Oracle";
}
@Override
public Integer getDefaultPort() {
return 1521;
}
@Override
public String getJdbcCatalogName(CatalogAndSchema schema) {
return null;
}
@Override
public String getJdbcSchemaName(CatalogAndSchema schema) {
return correctObjectName(schema.getCatalogName() == null ? schema.getSchemaName() : schema.getCatalogName(), Schema.class);
}
@Override
public String generatePrimaryKeyName(String tableName) {
if (tableName.length() > 27) {
return "PK_" + tableName.toUpperCase().substring(0, 27);
} else {
return "PK_" + tableName.toUpperCase();
}
}
@Override
public boolean supportsInitiallyDeferrableColumns() {
return true;
}
@Override
public boolean isReservedWord(String objectName) {
return reservedWords.contains(objectName.toUpperCase());
}
@Override
public boolean supportsSequences() {
return true;
}
/**
* Oracle supports catalogs in liquibase terms
*
* @return
*/
@Override
public boolean supportsSchemas() {
return false;
}
@Override
protected String getConnectionCatalogName() throws DatabaseException {
if (getConnection() instanceof OfflineConnection) {
return getConnection().getCatalog();
}
try {
return ExecutorService.getInstance().getExecutor(this).queryForObject(new RawCallStatement("select sys_context( 'userenv', 'current_schema' ) from dual"), String.class);
} catch (Exception e) {
LogFactory.getLogger().info("Error getting default schema", e);
}
return null;
}
@Override
public boolean isCorrectDatabaseImplementation(DatabaseConnection conn) throws DatabaseException {
return PRODUCT_NAME.equalsIgnoreCase(conn.getDatabaseProductName());
}
@Override
public String getDefaultDriver(String url) {
if (url.startsWith("jdbc:oracle")) {
return "oracle.jdbc.OracleDriver";
}
return null;
}
@Override
public String getDefaultCatalogName() {//NOPMD
return super.getDefaultCatalogName() == null ? null : super.getDefaultCatalogName().toUpperCase();
}
/**
* Return an Oracle date literal with the same value as a string formatted using ISO 8601.
* <p/>
* Convert an ISO8601 date string to one of the following results:
* to_date('1995-05-23', 'YYYY-MM-DD')
* to_date('1995-05-23 09:23:59', 'YYYY-MM-DD HH24:MI:SS')
* <p/>
* Implementation restriction:
* Currently, only the following subsets of ISO8601 are supported:
* YYYY-MM-DD
* YYYY-MM-DDThh:mm:ss
*/
@Override
public String getDateLiteral(String isoDate) {
String normalLiteral = super.getDateLiteral(isoDate);
if (isDateOnly(isoDate)) {
StringBuffer val = new StringBuffer();
val.append("to_date(");
val.append(normalLiteral);
val.append(", 'YYYY-MM-DD')");
return val.toString();
} else if (isTimeOnly(isoDate)) {
StringBuffer val = new StringBuffer();
val.append("to_date(");
val.append(normalLiteral);
val.append(", 'HH24:MI:SS')");
return val.toString();
} else if (isDateTime(isoDate)) {
normalLiteral = normalLiteral.substring(0, normalLiteral.lastIndexOf('.')) + "'";
StringBuffer val = new StringBuffer(26);
val.append("to_date(");
val.append(normalLiteral);
val.append(", 'YYYY-MM-DD HH24:MI:SS')");
return val.toString();
} else {
return "UNSUPPORTED:" + isoDate;
}
}
@Override
public boolean isSystemObject(DatabaseObject example) {
if (example == null) {
return false;
}
if (this.isLiquibaseObject(example)) {
return false;
}
if (example instanceof Schema) {
if ("SYSTEM".equals(example.getName()) || "SYS".equals(example.getName()) || "CTXSYS".equals(example.getName())|| "XDB".equals(example.getName())) {
return true;
}
if ("SYSTEM".equals(example.getSchema().getCatalogName()) || "SYS".equals(example.getSchema().getCatalogName()) || "CTXSYS".equals(example.getSchema().getCatalogName()) || "XDB".equals(example.getSchema().getCatalogName())) {
return true;
}
} else if (isSystemObject(example.getSchema())) {
return true;
}
if (example instanceof Catalog) {
if (("SYSTEM".equals(example.getName()) || "SYS".equals(example.getName()) || "CTXSYS".equals(example.getName()) || "XDB".equals(example.getName()))) {
return true;
}
} else if (example.getName() != null) {
if (example.getName().startsWith("BIN$")) { //oracle deleted table
if (this.canAccessDbaRecycleBin()) {
if (example instanceof PrimaryKey || example instanceof Index || example instanceof liquibase.statement.UniqueConstraint) { //some objects don't get renamed back and so are already filtered in the metadata queries
return false;
} else {
return true;
}
} else {
return true;
}
} else if (example.getName().startsWith("AQ$")) { //oracle AQ tables
return true;
} else if (example.getName().startsWith("DR$")) { //oracle index tables
return true;
} else if (example.getName().startsWith("SYS_IOT_OVER")) { //oracle system table
return true;
} else if (example.getName().startsWith("MLOG$_")) { //Created by materliaized view logs for every table that is part of a materialized view. Not available for DDL operations.
return true;
} else if (example.getName().startsWith("RUPD$_")) { //Created by materialized view log tables using primary keys. Not available for DDL operations.
return true;
} else if (example.getName().startsWith("WM$_")) { //Workspace Manager backup tables.
return true;
} else if (example.getName().equals("CREATE$JAVA$LOB$TABLE")) { //This table contains the name of the Java object, the date it was loaded, and has a BLOB column to store the Java object.
return true;
} else if (example.getName().equals("JAVA$CLASS$MD5$TABLE")) { //This is a hash table that tracks the loading of Java objects into a schema.
return true;
}
}
return super.isSystemObject(example);
}
@Override
public boolean supportsTablespaces() {
return true;
}
@Override
public boolean supportsAutoIncrement() {
return false;
}
// public Set<UniqueConstraint> findUniqueConstraints(String schema) throws DatabaseException {
// Set<UniqueConstraint> returnSet = new HashSet<UniqueConstraint>();
//
// List<Map> maps = new Executor(this).queryForList(new RawSqlStatement("SELECT UC.CONSTRAINT_NAME, UCC.TABLE_NAME, UCC.COLUMN_NAME FROM USER_CONSTRAINTS UC, USER_CONS_COLUMNS UCC WHERE UC.CONSTRAINT_NAME=UCC.CONSTRAINT_NAME AND CONSTRAINT_TYPE='U' ORDER BY UC.CONSTRAINT_NAME"));
//
// UniqueConstraint constraint = null;
// for (Map map : maps) {
// if (constraint == null || !constraint.getName().equals(constraint.getName())) {
// returnSet.add(constraint);
// Table table = new Table((String) map.get("TABLE_NAME"));
// constraint = new UniqueConstraint(map.get("CONSTRAINT_NAME").toString(), table);
// }
// }
// if (constraint != null) {
// returnSet.add(constraint);
// }
//
// return returnSet;
// }
@Override
public boolean supportsRestrictForeignKeys() {
return false;
}
@Override
public int getDataTypeMaxParameters(String dataTypeName) {
if (dataTypeName.toUpperCase().equals("BINARY_FLOAT")) {
return 0;
}
if (dataTypeName.toUpperCase().equals("BINARY_DOUBLE")) {
return 0;
}
return super.getDataTypeMaxParameters(dataTypeName);
}
@Override
public boolean jdbcCallsCatalogsSchemas() {
return true;
}
public Set<String> getUserDefinedTypes() {
if (userDefinedTypes == null) {
userDefinedTypes = new HashSet<String>();
if (getConnection() != null && !(getConnection() instanceof OfflineConnection)) {
try {
userDefinedTypes.addAll(ExecutorService.getInstance().getExecutor(this).queryForList(new RawSqlStatement("SELECT TYPE_NAME FROM USER_TYPES"), String.class));
} catch (DatabaseException e) {
//ignore error
}
}
}
return userDefinedTypes;
}
@Override
public String generateDatabaseFunctionValue(DatabaseFunction databaseFunction) {
if (databaseFunction != null && databaseFunction.toString().equalsIgnoreCase("current_timestamp")) {
return databaseFunction.toString();
}
return super.generateDatabaseFunctionValue(databaseFunction);
}
@Override
public ValidationErrors validate() {
ValidationErrors errors = super.validate();
DatabaseConnection connection = getConnection();
if (connection == null || connection instanceof OfflineConnection) {
LogFactory.getInstance().getLog().info("Cannot validate offline database");
return errors;
}
Statement statement = null;
try {
statement = ((JdbcConnection) connection).createStatement();
statement.executeQuery("select 1 from dba_recyclebin where 0=1");
this.canAccessDbaRecycleBin = true;
} catch (Exception e) {
if (e instanceof SQLException && e.getMessage().startsWith("ORA-00942")) { //ORA-00942: table or view does not exist
errors.addWarning("Liquibase needs to access the DBA_RECYCLEBIN table so we can automatically handle the case where constraints are deleted and restored. Since Oracle doesn't properly restore the original table names referenced in the constraint, we use the information from the DBA_RECYCLEBIN to automatically correct this issue.\n" +
"\n" +
"The user you used to connect to the database ("+getConnection().getConnectionUserName()+") needs to have \"select any dictionary\" permissions set before we can perform this operation. Please run the following SQL to set the appropriate permissions, and try running the command again.\n" +
"\n" +
" grant select any dictionary to "+getConnection().getConnectionUserName()+";");
} else {
errors.addError(e.getMessage());
}
} finally {
JdbcUtils.close(null, statement);
}
return errors;
}
public boolean canAccessDbaRecycleBin() {
return canAccessDbaRecycleBin;
}
}
| liquibase-core/src/main/java/liquibase/database/core/OracleDatabase.java | package liquibase.database.core;
import liquibase.CatalogAndSchema;
import liquibase.database.AbstractJdbcDatabase;
import liquibase.database.DatabaseConnection;
import liquibase.database.OfflineConnection;
import liquibase.database.jvm.JdbcConnection;
import liquibase.exception.DatabaseException;
import liquibase.exception.ValidationErrors;
import liquibase.executor.ExecutorService;
import liquibase.logging.LogFactory;
import liquibase.statement.*;
import liquibase.statement.core.RawCallStatement;
import liquibase.statement.core.RawSqlStatement;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.*;
import liquibase.util.JdbcUtils;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Encapsulates Oracle database support.
*/
public class OracleDatabase extends AbstractJdbcDatabase {
public static final String PRODUCT_NAME = "oracle";
private Set<String> reservedWords = new HashSet<String>();
private Set<String> userDefinedTypes = null;
private boolean canAccessDbaRecycleBin = false;
public OracleDatabase() {
super.unquotedObjectsAreUppercased=true;
super.setCurrentDateTimeFunction("SYSTIMESTAMP");
// Setting list of Oracle's native functions
dateFunctions.add(new DatabaseFunction("SYSDATE"));
dateFunctions.add(new DatabaseFunction("SYSTIMESTAMP"));
dateFunctions.add(new DatabaseFunction("CURRENT_TIMESTAMP"));
super.sequenceNextValueFunction = "%s.nextval";
super.sequenceCurrentValueFunction = "%s.currval";
}
@Override
public int getPriority() {
return PRIORITY_DEFAULT;
}
@Override
public void setConnection(DatabaseConnection conn) {
if (!(conn instanceof OfflineConnection)) {
try {
Method wrappedConn = conn.getClass().getMethod("getWrappedConnection");
wrappedConn.setAccessible(true);
Connection sqlConn = (Connection) wrappedConn.invoke(conn);
reservedWords.addAll(Arrays.asList(sqlConn.getMetaData().getSQLKeywords().toUpperCase().split(",\\s*")));
reservedWords.addAll(Arrays.asList("GROUP", "USER", "SESSION", "PASSWORD", "RESOURCE", "START", "SIZE", "UID", "DESC")); //more reserved words not returned by driver
Method method = sqlConn.getClass().getMethod("setRemarksReporting", Boolean.TYPE);
method.setAccessible(true);
method.invoke(sqlConn, true);
} catch (Exception e) {
LogFactory.getLogger().info("Error configuring OracleDatabase connection: " + e.getMessage(), e);
; //cannot set it. That is OK
}
}
super.setConnection(conn);
}
@Override
public String getShortName() {
return "oracle";
}
@Override
protected String getDefaultDatabaseProductName() {
return "Oracle";
}
@Override
public Integer getDefaultPort() {
return 1521;
}
@Override
public String getJdbcCatalogName(CatalogAndSchema schema) {
return null;
}
@Override
public String getJdbcSchemaName(CatalogAndSchema schema) {
return correctObjectName(schema.getCatalogName() == null ? schema.getSchemaName() : schema.getCatalogName(), Schema.class);
}
@Override
public String generatePrimaryKeyName(String tableName) {
if (tableName.length() > 27) {
return "PK_" + tableName.toUpperCase().substring(0, 27);
} else {
return "PK_" + tableName.toUpperCase();
}
}
@Override
public boolean supportsInitiallyDeferrableColumns() {
return true;
}
@Override
public boolean isReservedWord(String objectName) {
return reservedWords.contains(objectName.toUpperCase());
}
@Override
public boolean supportsSequences() {
return true;
}
/**
* Oracle supports catalogs in liquibase terms
*
* @return
*/
@Override
public boolean supportsSchemas() {
return false;
}
@Override
protected String getConnectionCatalogName() throws DatabaseException {
if (getConnection() instanceof OfflineConnection) {
return getConnection().getCatalog();
}
try {
return ExecutorService.getInstance().getExecutor(this).queryForObject(new RawCallStatement("select sys_context( 'userenv', 'current_schema' ) from dual"), String.class);
} catch (Exception e) {
LogFactory.getLogger().info("Error getting default schema", e);
}
return null;
}
@Override
public boolean isCorrectDatabaseImplementation(DatabaseConnection conn) throws DatabaseException {
return PRODUCT_NAME.equalsIgnoreCase(conn.getDatabaseProductName());
}
@Override
public String getDefaultDriver(String url) {
if (url.startsWith("jdbc:oracle")) {
return "oracle.jdbc.OracleDriver";
}
return null;
}
@Override
public String getDefaultCatalogName() {//NOPMD
return super.getDefaultCatalogName() == null ? null : super.getDefaultCatalogName().toUpperCase();
}
/**
* Return an Oracle date literal with the same value as a string formatted using ISO 8601.
* <p/>
* Convert an ISO8601 date string to one of the following results:
* to_date('1995-05-23', 'YYYY-MM-DD')
* to_date('1995-05-23 09:23:59', 'YYYY-MM-DD HH24:MI:SS')
* <p/>
* Implementation restriction:
* Currently, only the following subsets of ISO8601 are supported:
* YYYY-MM-DD
* YYYY-MM-DDThh:mm:ss
*/
@Override
public String getDateLiteral(String isoDate) {
String normalLiteral = super.getDateLiteral(isoDate);
if (isDateOnly(isoDate)) {
StringBuffer val = new StringBuffer();
val.append("to_date(");
val.append(normalLiteral);
val.append(", 'YYYY-MM-DD')");
return val.toString();
} else if (isTimeOnly(isoDate)) {
StringBuffer val = new StringBuffer();
val.append("to_date(");
val.append(normalLiteral);
val.append(", 'HH24:MI:SS')");
return val.toString();
} else if (isDateTime(isoDate)) {
normalLiteral = normalLiteral.substring(0, normalLiteral.lastIndexOf('.')) + "'";
StringBuffer val = new StringBuffer(26);
val.append("to_date(");
val.append(normalLiteral);
val.append(", 'YYYY-MM-DD HH24:MI:SS')");
return val.toString();
} else {
return "UNSUPPORTED:" + isoDate;
}
}
@Override
public boolean isSystemObject(DatabaseObject example) {
if (example == null) {
return false;
}
if (this.isLiquibaseObject(example)) {
return false;
}
if (example instanceof Schema) {
if ("SYSTEM".equals(example.getName()) || "SYS".equals(example.getName()) || "CTXSYS".equals(example.getName())|| "XDB".equals(example.getName())) {
return true;
}
if ("SYSTEM".equals(example.getSchema().getCatalogName()) || "SYS".equals(example.getSchema().getCatalogName()) || "CTXSYS".equals(example.getSchema().getCatalogName()) || "XDB".equals(example.getSchema().getCatalogName())) {
return true;
}
} else if (isSystemObject(example.getSchema())) {
return true;
}
if (example instanceof Catalog) {
if (("SYSTEM".equals(example.getName()) || "SYS".equals(example.getName()) || "CTXSYS".equals(example.getName()) || "XDB".equals(example.getName()))) {
return true;
}
} else if (example.getName() != null) {
if (example.getName().startsWith("BIN$")) { //oracle deleted table
if (example instanceof PrimaryKey || example instanceof Index || example instanceof liquibase.statement.UniqueConstraint) { //some objects don't get renamed back and so are already filtered in the metadata queries
return false;
} else {
return true;
}
} else if (example.getName().startsWith("AQ$")) { //oracle AQ tables
return true;
} else if (example.getName().startsWith("DR$")) { //oracle index tables
return true;
} else if (example.getName().startsWith("SYS_IOT_OVER")) { //oracle system table
return true;
} else if (example.getName().startsWith("MLOG$_")) { //Created by materliaized view logs for every table that is part of a materialized view. Not available for DDL operations.
return true;
} else if (example.getName().startsWith("RUPD$_")) { //Created by materialized view log tables using primary keys. Not available for DDL operations.
return true;
} else if (example.getName().startsWith("WM$_")) { //Workspace Manager backup tables.
return true;
} else if (example.getName().equals("CREATE$JAVA$LOB$TABLE")) { //This table contains the name of the Java object, the date it was loaded, and has a BLOB column to store the Java object.
return true;
} else if (example.getName().equals("JAVA$CLASS$MD5$TABLE")) { //This is a hash table that tracks the loading of Java objects into a schema.
return true;
}
}
return super.isSystemObject(example);
}
@Override
public boolean supportsTablespaces() {
return true;
}
@Override
public boolean supportsAutoIncrement() {
return false;
}
// public Set<UniqueConstraint> findUniqueConstraints(String schema) throws DatabaseException {
// Set<UniqueConstraint> returnSet = new HashSet<UniqueConstraint>();
//
// List<Map> maps = new Executor(this).queryForList(new RawSqlStatement("SELECT UC.CONSTRAINT_NAME, UCC.TABLE_NAME, UCC.COLUMN_NAME FROM USER_CONSTRAINTS UC, USER_CONS_COLUMNS UCC WHERE UC.CONSTRAINT_NAME=UCC.CONSTRAINT_NAME AND CONSTRAINT_TYPE='U' ORDER BY UC.CONSTRAINT_NAME"));
//
// UniqueConstraint constraint = null;
// for (Map map : maps) {
// if (constraint == null || !constraint.getName().equals(constraint.getName())) {
// returnSet.add(constraint);
// Table table = new Table((String) map.get("TABLE_NAME"));
// constraint = new UniqueConstraint(map.get("CONSTRAINT_NAME").toString(), table);
// }
// }
// if (constraint != null) {
// returnSet.add(constraint);
// }
//
// return returnSet;
// }
@Override
public boolean supportsRestrictForeignKeys() {
return false;
}
@Override
public int getDataTypeMaxParameters(String dataTypeName) {
if (dataTypeName.toUpperCase().equals("BINARY_FLOAT")) {
return 0;
}
if (dataTypeName.toUpperCase().equals("BINARY_DOUBLE")) {
return 0;
}
return super.getDataTypeMaxParameters(dataTypeName);
}
@Override
public boolean jdbcCallsCatalogsSchemas() {
return true;
}
public Set<String> getUserDefinedTypes() {
if (userDefinedTypes == null) {
userDefinedTypes = new HashSet<String>();
if (getConnection() != null && !(getConnection() instanceof OfflineConnection)) {
try {
userDefinedTypes.addAll(ExecutorService.getInstance().getExecutor(this).queryForList(new RawSqlStatement("SELECT TYPE_NAME FROM USER_TYPES"), String.class));
} catch (DatabaseException e) {
//ignore error
}
}
}
return userDefinedTypes;
}
@Override
public String generateDatabaseFunctionValue(DatabaseFunction databaseFunction) {
if (databaseFunction != null && databaseFunction.toString().equalsIgnoreCase("current_timestamp")) {
return databaseFunction.toString();
}
return super.generateDatabaseFunctionValue(databaseFunction);
}
@Override
public ValidationErrors validate() {
ValidationErrors errors = super.validate();
DatabaseConnection connection = getConnection();
if (connection == null || connection instanceof OfflineConnection) {
LogFactory.getInstance().getLog().info("Cannot validate offline database");
return errors;
}
Statement statement = null;
try {
statement = ((JdbcConnection) connection).createStatement();
statement.executeQuery("select 1 from dba_recyclebin where 0=1");
this.canAccessDbaRecycleBin = true;
} catch (Exception e) {
if (e instanceof SQLException && e.getMessage().startsWith("ORA-00942")) { //ORA-00942: table or view does not exist
errors.addWarning("Liquibase needs to access the DBA_RECYCLEBIN table so we can automatically handle the case where constraints are deleted and restored. Since Oracle doesn't properly restore the original table names referenced in the constraint, we use the information from the DBA_RECYCLEBIN to automatically correct this issue.\n" +
"\n" +
"The user you used to connect to the database ("+getConnection().getConnectionUserName()+") needs to have \"select any dictionary\" permissions set before we can perform this operation. Please run the following SQL to set the appropriate permissions, and try running the command again.\n" +
"\n" +
" grant select any dictionary to "+getConnection().getConnectionUserName()+";");
} else {
errors.addError(e.getMessage());
}
} finally {
JdbcUtils.close(null, statement);
}
return errors;
}
public boolean canAccessDbaRecycleBin() {
return canAccessDbaRecycleBin;
}
}
| CORE-2623 Oracle: primary keys on tables recovered from recyclebin are not properly snapshotted
don't include any BIN$ objects if they can't check dba_recyclebin
| liquibase-core/src/main/java/liquibase/database/core/OracleDatabase.java | CORE-2623 Oracle: primary keys on tables recovered from recyclebin are not properly snapshotted |
|
Java | apache-2.0 | f4fc986a83f6bc262555f5e1e62bb547acac9ae0 | 0 | Doctoror/PainlessMusicPlayer,Doctoror/PainlessMusicPlayer,Doctoror/PainlessMusicPlayer,Doctoror/FuckOffMusicPlayer,Doctoror/FuckOffMusicPlayer | package com.doctoror.fuckoffmusicplayer.search;
import com.doctoror.commons.wear.nano.WearSearchData;
import com.doctoror.fuckoffmusicplayer.R;
import com.doctoror.fuckoffmusicplayer.eventbus.EventSearchResults;
import com.doctoror.fuckoffmusicplayer.remote.RemoteControl;
import com.doctoror.fuckoffmusicplayer.root.RootActivity;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.app.Activity;
import android.app.Fragment;
import android.content.Intent;
import android.content.pm.ResolveInfo;
import android.content.res.Resources;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.widget.RecyclerView;
import android.transition.AutoTransition;
import android.transition.Scene;
import android.transition.Transition;
import android.transition.TransitionManager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import java.util.List;
/**
* Created by Yaroslav Mytkalyk on 22.11.16.
*/
public final class SearchFragment extends Fragment {
private static final int REQUEST_CODE_SPEECH = 1;
private final Transition mDefaultTransition = new AutoTransition();
private WearSearchData.Results mSearchResults;
private Scene mSceneCurrent;
private Scene mSceneMessage;
private Scene mSceneResults;
private ViewGroup mSceneRoot;
private View mBtnInput;
private View mProgress;
private TextView mTextViewMessage;
private RecyclerView mListView;
private SearchResultsAdapter mAdapter;
private boolean mBtnSearchVisible = true;
private boolean mSearching;
private String mSearchQuery;
private Toast mToastSpeechNotSupported;
public SearchFragment() {
mDefaultTransition.addListener(new TransitionListenerImpl());
}
@Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
EventBus.getDefault().register(this);
}
@Override
public void onDestroy() {
super.onDestroy();
EventBus.getDefault().unregister(this);
}
@Nullable
@Override
public View onCreateView(final LayoutInflater inflater, final ViewGroup container,
final Bundle savedInstanceState) {
final View view = inflater.inflate(R.layout.fragment_search, container, false);
mSceneRoot = (ViewGroup) view.findViewById(R.id.sceneRoot);
mSceneMessage = new Scene(mSceneRoot, mSceneRoot.findViewById(R.id.container));
mSceneResults = Scene.getSceneForLayout(mSceneRoot, R.layout.fragment_search_results,
getActivity());
return view;
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
if (savedInstanceState == null) {
mSceneCurrent = mSceneMessage;
TransitionManager.go(mSceneMessage, mDefaultTransition);
}
}
@Override
public void onResume() {
super.onResume();
if (mBtnInput != null) {
mBtnInput.setClickable(true);
}
}
private void findViews() {
mBtnInput = mSceneRoot.findViewById(R.id.btnInput);
mProgress = mSceneRoot.findViewById(android.R.id.progress);
mListView = (RecyclerView) mSceneRoot.findViewById(android.R.id.list);
mTextViewMessage = (TextView) mSceneRoot.findViewById(android.R.id.message);
}
private void bindViews() {
mBtnInput.setClickable(true);
mBtnInput.setOnClickListener(mOnInputClickListener);
if (mListView != null) {
mListView.clearOnScrollListeners();
mListView.addOnScrollListener(mOnScrollListener);
if (mAdapter != null) {
mListView.setAdapter(mAdapter);
}
}
bindProgress();
}
private void bindProgress() {
if (mProgress != null) {
mProgress.setVisibility(mSearching ? View.VISIBLE : View.GONE);
}
}
private void bindScene() {
if (mSearchResults == null) {
if (mSceneCurrent != mSceneMessage) {
goToScene(mSceneMessage);
} else {
mTextViewMessage.setText(R.string.Search_for_artists_albums_and_songs);
}
} else {
if (areSearchResultsEmpty()) {
if (mSceneCurrent != mSceneMessage) {
goToScene(mSceneMessage);
} else {
if (mSearchQuery == null) {
mSearchQuery = "";
}
mTextViewMessage
.setText(getString(R.string.No_media_found_for_s, mSearchQuery));
}
} else {
if (mSceneCurrent != mSceneResults) {
goToScene(mSceneResults);
} else {
if (mAdapter == null) {
mAdapter = new SearchResultsAdapter(getActivity(), mSearchResults);
mAdapter.setOnItemClickListener(mOnItemClickListener);
mListView.setAdapter(mAdapter);
} else {
mAdapter.setResults(mSearchResults);
}
}
}
}
}
private boolean areSearchResultsEmpty() {
//noinspection SimplifiableIfStatement
if (mSearchResults == null) {
return true;
}
return (mSearchResults.albums == null || mSearchResults.albums.length == 0)
&& (mSearchResults.artists == null || mSearchResults.artists.length == 0)
&& (mSearchResults.tracks == null || mSearchResults.tracks.length == 0);
}
private void goToScene(@NonNull final Scene scene) {
if (mSceneCurrent != scene) {
mSceneCurrent = scene;
if (mListView != null) {
mListView.clearOnScrollListeners();
}
TransitionManager.go(scene, mDefaultTransition);
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onEventSearchResults(@NonNull final EventSearchResults results) {
mSearchResults = results.results;
mSearching = false;
bindProgress();
bindScene();
}
private final View.OnClickListener mOnInputClickListener = v -> {
final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
final List<ResolveInfo> resolveInfos = getActivity().getPackageManager()
.queryIntentActivities(intent, 0);
if (resolveInfos != null && !resolveInfos.isEmpty()) {
mSearchQuery = null;
mBtnInput.setClickable(false);
startActivityForResult(intent, REQUEST_CODE_SPEECH);
} else {
if (mToastSpeechNotSupported == null) {
mToastSpeechNotSupported = Toast.makeText(getActivity(),
R.string.Speech_recognition_not_supported, Toast.LENGTH_LONG);
}
if (mToastSpeechNotSupported.getView() == null
|| mToastSpeechNotSupported.getView().getWindowToken() == null) {
mToastSpeechNotSupported.show();
}
}
};
@Override
public void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_CODE_SPEECH && resultCode == Activity.RESULT_OK) {
final List<String> results = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
if (results != null && !results.isEmpty()) {
mSearchQuery = results.get(0);
}
mSearchResults = null;
mSearching = true;
RemoteControl.getInstance().search(mSearchQuery);
bindProgress();
}
// } else if (requestCode == REQUEST_CODE_SPEECH) {
// // TODO remove this hardcode
// mSearchQuery = "Death";
// mSearching = true;
// RemoteControl.getInstance().search(mSearchQuery);
// bindProgress();
// }
}
private void showBtnSearch() {
if (!mBtnSearchVisible) {
mBtnSearchVisible = true;
mBtnInput.animate().scaleX(1f).scaleY(1f)
.setListener(mAnimatorListenerBtnInputReveal)
.start();
}
}
private void hideBtnSearch() {
if (mBtnSearchVisible) {
mBtnSearchVisible = false;
mBtnInput.animate().scaleX(0f).scaleY(0f)
.setListener(mAnimatorListenerBtnInputHide)
.start();
}
}
private void goToNowPlaying() {
final Activity activity = getActivity();
if (activity instanceof RootActivity) {
((RootActivity) activity).goToNowPlaying();
}
}
private final Animator.AnimatorListener mAnimatorListenerBtnInputReveal
= new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
if (mBtnInput != null) {
mBtnInput.setVisibility(View.VISIBLE);
}
}
};
private final Animator.AnimatorListener mAnimatorListenerBtnInputHide
= new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
if (mBtnInput != null) {
mBtnInput.setVisibility(View.GONE);
}
}
};
private final SearchResultsAdapter.OnItemClickListener mOnItemClickListener
= new SearchResultsAdapter.OnItemClickListener() {
@Override
public void onAlbumClick(@NonNull final WearSearchData.Album album) {
RemoteControl.getInstance().playAlbum(album.id);
goToNowPlaying();
}
@Override
public void onArtistClick(@NonNull final WearSearchData.Artist artist) {
RemoteControl.getInstance().playArtist(artist.id);
goToNowPlaying();
}
@Override
public void onTrackClick(@NonNull final WearSearchData.Track[] tracks,
final long trackId) {
RemoteControl.getInstance().playTrack(tracks, trackId);
goToNowPlaying();
}
};
private final RecyclerView.OnScrollListener mOnScrollListener
= new RecyclerView.OnScrollListener() {
private final int mThreshold = (int) (1f * Resources.getSystem()
.getDisplayMetrics().density);
private int mScrollState;
@Override
public void onScrollStateChanged(final RecyclerView recyclerView, final int newState) {
super.onScrollStateChanged(recyclerView, newState);
mScrollState = newState;
}
@Override
public void onScrolled(final RecyclerView recyclerView, final int dx, final int dy) {
super.onScrolled(recyclerView, dx, dy);
if (mBtnInput != null && mScrollState == RecyclerView.SCROLL_STATE_DRAGGING) {
int targetVisibility = -1; // -1 for unchanged
if (dy > mThreshold) {
targetVisibility = View.GONE;
} else if (-dy > mThreshold) {
targetVisibility = View.VISIBLE;
}
if (targetVisibility != -1) {
if (targetVisibility == View.VISIBLE) {
showBtnSearch();
} else {
hideBtnSearch();
}
}
}
}
};
private final class TransitionListenerImpl implements Transition.TransitionListener {
@Override
public void onTransitionStart(final Transition transition) {
}
@Override
public void onTransitionEnd(final Transition transition) {
findViews();
bindViews();
bindScene();
}
@Override
public void onTransitionCancel(final Transition transition) {
}
@Override
public void onTransitionPause(final Transition transition) {
}
@Override
public void onTransitionResume(final Transition transition) {
}
}
}
| wear/src/main/java/com/doctoror/fuckoffmusicplayer/search/SearchFragment.java | package com.doctoror.fuckoffmusicplayer.search;
import com.doctoror.commons.wear.nano.WearSearchData;
import com.doctoror.fuckoffmusicplayer.R;
import com.doctoror.fuckoffmusicplayer.eventbus.EventSearchResults;
import com.doctoror.fuckoffmusicplayer.remote.RemoteControl;
import com.doctoror.fuckoffmusicplayer.root.RootActivity;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.app.Activity;
import android.app.Fragment;
import android.content.Intent;
import android.content.pm.ResolveInfo;
import android.content.res.Resources;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.widget.RecyclerView;
import android.transition.AutoTransition;
import android.transition.Scene;
import android.transition.Transition;
import android.transition.TransitionManager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import java.util.List;
/**
* Created by Yaroslav Mytkalyk on 22.11.16.
*/
public final class SearchFragment extends Fragment {
private static final int REQUEST_CODE_SPEECH = 1;
private final Transition mDefaultTransition = new AutoTransition();
private WearSearchData.Results mSearchResults;
private Scene mSceneCurrent;
private Scene mSceneMessage;
private Scene mSceneResults;
private ViewGroup mSceneRoot;
private View mBtnInput;
private View mProgress;
private TextView mTextViewMessage;
private RecyclerView mListView;
private SearchResultsAdapter mAdapter;
private boolean mBtnSearchVisible = true;
private boolean mSearching;
private String mSearchQuery;
private Toast mToastSpeechNotSupported;
public SearchFragment() {
mDefaultTransition.addListener(new TransitionListenerImpl());
}
@Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
EventBus.getDefault().register(this);
}
@Override
public void onDestroy() {
super.onDestroy();
EventBus.getDefault().unregister(this);
}
@Nullable
@Override
public View onCreateView(final LayoutInflater inflater, final ViewGroup container,
final Bundle savedInstanceState) {
final View view = inflater.inflate(R.layout.fragment_search, container, false);
mSceneRoot = (ViewGroup) view.findViewById(R.id.sceneRoot);
mSceneMessage = new Scene(mSceneRoot, mSceneRoot.findViewById(R.id.container));
mSceneResults = Scene.getSceneForLayout(mSceneRoot, R.layout.fragment_search_results,
getActivity());
return view;
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
if (savedInstanceState == null) {
mSceneCurrent = mSceneMessage;
TransitionManager.go(mSceneMessage, mDefaultTransition);
}
}
@Override
public void onResume() {
super.onResume();
if (mBtnInput != null) {
mBtnInput.setClickable(true);
}
}
private void findViews() {
mBtnInput = mSceneRoot.findViewById(R.id.btnInput);
mProgress = mSceneRoot.findViewById(android.R.id.progress);
mListView = (RecyclerView) mSceneRoot.findViewById(android.R.id.list);
mTextViewMessage = (TextView) mSceneRoot.findViewById(android.R.id.message);
}
private void bindViews() {
mBtnInput.setClickable(true);
mBtnInput.setOnClickListener(mOnInputClickListener);
if (mListView != null) {
mListView.clearOnScrollListeners();
mListView.addOnScrollListener(mOnScrollListener);
if (mAdapter != null) {
mListView.setAdapter(mAdapter);
}
}
bindProgress();
}
private void bindProgress() {
if (mProgress != null) {
mProgress.setVisibility(mSearching ? View.VISIBLE : View.GONE);
}
}
private void bindScene() {
if (mSearchResults == null) {
if (mSceneCurrent != mSceneMessage) {
goToScene(mSceneMessage);
} else {
mTextViewMessage.setText(R.string.Search_for_artists_albums_and_songs);
}
} else {
if (areSearchResultsEmpty()) {
if (mSceneCurrent != mSceneMessage) {
goToScene(mSceneMessage);
} else {
if (mSearchQuery == null) {
mSearchQuery = "";
}
mTextViewMessage
.setText(getString(R.string.No_media_found_for_s, mSearchQuery));
}
} else {
if (mSceneCurrent != mSceneResults) {
goToScene(mSceneResults);
} else {
if (mAdapter == null) {
mAdapter = new SearchResultsAdapter(getActivity(), mSearchResults);
mAdapter.setOnItemClickListener(mOnItemClickListener);
mListView.setAdapter(mAdapter);
} else {
mAdapter.setResults(mSearchResults);
}
}
}
}
}
private boolean areSearchResultsEmpty() {
//noinspection SimplifiableIfStatement
if (mSearchResults == null) {
return true;
}
return (mSearchResults.albums == null || mSearchResults.albums.length == 0)
&& (mSearchResults.artists == null || mSearchResults.artists.length == 0)
&& (mSearchResults.tracks == null || mSearchResults.tracks.length == 0);
}
private void goToScene(@NonNull final Scene scene) {
if (mSceneCurrent != scene) {
mSceneCurrent = scene;
if (mListView != null) {
mListView.clearOnScrollListeners();
}
TransitionManager.go(scene, mDefaultTransition);
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onEventSearchResults(@NonNull final EventSearchResults results) {
mSearchResults = results.results;
mSearching = false;
bindProgress();
bindScene();
}
private final View.OnClickListener mOnInputClickListener = v -> {
final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
final List<ResolveInfo> resolveInfos = getActivity().getPackageManager()
.queryIntentActivities(intent, 0);
if (resolveInfos != null && !resolveInfos.isEmpty()) {
mSearchQuery = null;
mBtnInput.setClickable(false);
startActivityForResult(intent, REQUEST_CODE_SPEECH);
} else {
if (mToastSpeechNotSupported == null) {
mToastSpeechNotSupported = Toast.makeText(getActivity(),
R.string.Speech_recognition_not_supported, Toast.LENGTH_LONG);
}
if (mToastSpeechNotSupported.getView() == null
|| mToastSpeechNotSupported.getView().getWindowToken() == null) {
mToastSpeechNotSupported.show();
}
}
};
@Override
public void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_CODE_SPEECH && resultCode == Activity.RESULT_OK) {
final List<String> results = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
if (results != null && !results.isEmpty()) {
mSearchQuery = results.get(0);
}
mSearchResults = null;
mSearching = true;
RemoteControl.getInstance().search(mSearchQuery);
bindProgress();
} else if (requestCode == REQUEST_CODE_SPEECH) {
// TODO remove this hardcode
mSearchQuery = "Death";
mSearching = true;
RemoteControl.getInstance().search(mSearchQuery);
bindProgress();
}
}
private void showBtnSearch() {
if (!mBtnSearchVisible) {
mBtnSearchVisible = true;
mBtnInput.animate().scaleX(1f).scaleY(1f)
.setListener(mAnimatorListenerBtnInputReveal)
.start();
}
}
private void hideBtnSearch() {
if (mBtnSearchVisible) {
mBtnSearchVisible = false;
mBtnInput.animate().scaleX(0f).scaleY(0f)
.setListener(mAnimatorListenerBtnInputHide)
.start();
}
}
private void goToNowPlaying() {
final Activity activity = getActivity();
if (activity instanceof RootActivity) {
((RootActivity) activity).goToNowPlaying();
}
}
private final Animator.AnimatorListener mAnimatorListenerBtnInputReveal
= new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
if (mBtnInput != null) {
mBtnInput.setVisibility(View.VISIBLE);
}
}
};
private final Animator.AnimatorListener mAnimatorListenerBtnInputHide
= new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
if (mBtnInput != null) {
mBtnInput.setVisibility(View.GONE);
}
}
};
private final SearchResultsAdapter.OnItemClickListener mOnItemClickListener
= new SearchResultsAdapter.OnItemClickListener() {
@Override
public void onAlbumClick(@NonNull final WearSearchData.Album album) {
RemoteControl.getInstance().playAlbum(album.id);
goToNowPlaying();
}
@Override
public void onArtistClick(@NonNull final WearSearchData.Artist artist) {
RemoteControl.getInstance().playArtist(artist.id);
goToNowPlaying();
}
@Override
public void onTrackClick(@NonNull final WearSearchData.Track[] tracks,
final long trackId) {
RemoteControl.getInstance().playTrack(tracks, trackId);
goToNowPlaying();
}
};
private final RecyclerView.OnScrollListener mOnScrollListener
= new RecyclerView.OnScrollListener() {
private final int mThreshold = (int) (1f * Resources.getSystem()
.getDisplayMetrics().density);
private int mScrollState;
@Override
public void onScrollStateChanged(final RecyclerView recyclerView, final int newState) {
super.onScrollStateChanged(recyclerView, newState);
mScrollState = newState;
}
@Override
public void onScrolled(final RecyclerView recyclerView, final int dx, final int dy) {
super.onScrolled(recyclerView, dx, dy);
if (mBtnInput != null && mScrollState == RecyclerView.SCROLL_STATE_DRAGGING) {
int targetVisibility = -1; // -1 for unchanged
if (dy > mThreshold) {
targetVisibility = View.GONE;
} else if (-dy > mThreshold) {
targetVisibility = View.VISIBLE;
}
if (targetVisibility != -1) {
if (targetVisibility == View.VISIBLE) {
showBtnSearch();
} else {
hideBtnSearch();
}
}
}
}
};
private final class TransitionListenerImpl implements Transition.TransitionListener {
@Override
public void onTransitionStart(final Transition transition) {
}
@Override
public void onTransitionEnd(final Transition transition) {
findViews();
bindViews();
bindScene();
}
@Override
public void onTransitionCancel(final Transition transition) {
}
@Override
public void onTransitionPause(final Transition transition) {
}
@Override
public void onTransitionResume(final Transition transition) {
}
}
}
| Commented out hardcoded search results.
| wear/src/main/java/com/doctoror/fuckoffmusicplayer/search/SearchFragment.java | Commented out hardcoded search results. |
|
Java | apache-2.0 | 437774db9291be468d3ffb967e08e5100856062b | 0 | zalando-stups/fullstop,zalando-stups/fullstop,zalando-stups/fullstop | /**
* Copyright (C) 2015 Zalando SE (http://tech.zalando.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zalando.stups.fullstop.plugin.count;
import java.util.HashMap;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.Maps;
/**
* @author jbellmann
*/
@Component
public class CountEventsMetric {
private static final String METER = "meter.events.";
private final MetricRegistry metricRegistry;
private final Map<String, Meter> eventMeters = new HashMap<String, Meter>();
@Autowired
public CountEventsMetric(final MetricRegistry metricRegistry) {
this.metricRegistry = metricRegistry;
// ConsoleReporter reporter = ConsoleReporter.forRegistry(this.metricRegistry).convertRatesTo(TimeUnit.SECONDS)
// .convertDurationsTo(TimeUnit.MILLISECONDS).build();
// reporter.start(10, TimeUnit.SECONDS);
}
public Map<String, Meter> getEventMeters() {
return Maps.newHashMap(eventMeters);
}
protected Meter getOrCreateMeter(final Map<String, Meter> meters, final String name) {
Meter m = meters.get(name);
if (m != null) {
return m;
}
synchronized (this) {
m = meters.get(name);
if (m != null) {
return m;
} else {
Meter created = metricRegistry.meter(name);
meters.put(name, created);
return created;
}
}
}
public void markEvent(final String event) {
getOrCreateMeter(eventMeters, METER + event).mark();
}
}
| fullstop-plugins/fullstop-count-events-plugin/src/main/java/org/zalando/stups/fullstop/plugin/count/CountEventsMetric.java | /**
* Copyright (C) 2015 Zalando SE (http://tech.zalando.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zalando.stups.fullstop.plugin.count;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.Maps;
/**
* @author jbellmann
*/
@Component
public class CountEventsMetric {
private static final String METER = "meter.events.";
private final MetricRegistry metricRegistry;
private final Map<String, Meter> eventMeters = new HashMap<String, Meter>();
@Autowired
public CountEventsMetric(final MetricRegistry metricRegistry) {
this.metricRegistry = metricRegistry;
ConsoleReporter reporter = ConsoleReporter.forRegistry(this.metricRegistry).convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS).build();
reporter.start(10, TimeUnit.SECONDS);
}
public Map<String, Meter> getEventMeters() {
return Maps.newHashMap(eventMeters);
}
protected Meter getOrCreateMeter(final Map<String, Meter> meters, final String name) {
Meter m = meters.get(name);
if (m != null) {
return m;
}
synchronized (this) {
m = meters.get(name);
if (m != null) {
return m;
} else {
Meter created = metricRegistry.meter(name);
meters.put(name, created);
return created;
}
}
}
public void markEvent(final String event) {
getOrCreateMeter(eventMeters, METER + event).mark();
}
}
| deactivate console-reporter | fullstop-plugins/fullstop-count-events-plugin/src/main/java/org/zalando/stups/fullstop/plugin/count/CountEventsMetric.java | deactivate console-reporter |
|
Java | apache-2.0 | 89459dbdd831d539f07ff505bff94ce11bf6b295 | 0 | ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma | /*
* The Gemma project
*
* Copyright (c) 2006 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.gemma.search;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.PostConstruct;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheException;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Element;
import net.sf.ehcache.config.CacheConfiguration;
import net.sf.ehcache.config.NonstopConfiguration;
import net.sf.ehcache.config.PersistenceConfiguration;
import net.sf.ehcache.config.PersistenceConfiguration.Strategy;
import net.sf.ehcache.config.TerracottaConfiguration;
import net.sf.ehcache.config.TimeoutBehaviorConfiguration;
import net.sf.ehcache.config.TimeoutBehaviorConfiguration.TimeoutBehaviorType;
import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.queryParser.QueryParser;
import org.compass.core.Compass;
import org.compass.core.CompassCallback;
import org.compass.core.CompassException;
import org.compass.core.CompassHighlightedText;
import org.compass.core.CompassHits;
import org.compass.core.CompassQuery;
import org.compass.core.CompassSession;
import org.compass.core.CompassTemplate;
import org.compass.core.mapping.CompassMapping;
import org.compass.core.mapping.Mapping;
import org.compass.core.mapping.ResourceMapping;
import org.compass.core.mapping.osem.ClassMapping;
import org.compass.core.mapping.osem.ComponentMapping;
import org.compass.core.spi.InternalCompassSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import ubic.basecode.ontology.model.OntologyIndividual;
import ubic.basecode.ontology.model.OntologyTerm;
import ubic.basecode.util.BatchIterator;
import ubic.gemma.annotation.reference.BibliographicReferenceService;
import ubic.gemma.association.phenotype.PhenotypeAssociationManagerService;
import ubic.gemma.expression.experiment.service.ExpressionExperimentService;
import ubic.gemma.expression.experiment.service.ExpressionExperimentSetService;
import ubic.gemma.genome.gene.service.GeneSearchService;
import ubic.gemma.genome.gene.service.GeneService;
import ubic.gemma.genome.gene.service.GeneSetService;
import ubic.gemma.model.analysis.expression.ExpressionExperimentSet;
import ubic.gemma.model.common.Auditable;
import ubic.gemma.model.common.auditAndSecurity.AuditAction;
import ubic.gemma.model.common.auditAndSecurity.AuditEvent;
import ubic.gemma.model.common.auditAndSecurity.AuditTrailService;
import ubic.gemma.model.common.auditAndSecurity.UserQuery;
import ubic.gemma.model.common.description.BibliographicReference;
import ubic.gemma.model.common.description.BibliographicReferenceValueObject;
import ubic.gemma.model.common.description.Characteristic;
import ubic.gemma.model.common.description.CharacteristicService;
import ubic.gemma.model.common.description.VocabCharacteristic;
import ubic.gemma.model.common.search.SearchSettings;
import ubic.gemma.model.common.search.SearchSettingsImpl;
import ubic.gemma.model.common.search.SearchSettingsValueObject;
import ubic.gemma.model.expression.arrayDesign.ArrayDesign;
import ubic.gemma.model.expression.arrayDesign.ArrayDesignService;
import ubic.gemma.model.expression.biomaterial.BioMaterial;
import ubic.gemma.model.expression.biomaterial.Treatment;
import ubic.gemma.model.expression.designElement.CompositeSequence;
import ubic.gemma.model.expression.designElement.CompositeSequenceService;
import ubic.gemma.model.expression.experiment.ExpressionExperiment;
import ubic.gemma.model.expression.experiment.FactorValue;
import ubic.gemma.model.genome.Gene;
import ubic.gemma.model.genome.Taxon;
import ubic.gemma.model.genome.TaxonDao;
import ubic.gemma.model.genome.biosequence.BioSequence;
import ubic.gemma.model.genome.biosequence.BioSequenceService;
import ubic.gemma.model.genome.gene.GeneProductService;
import ubic.gemma.model.genome.gene.GeneSet;
import ubic.gemma.model.genome.gene.phenotype.valueObject.CharacteristicValueObject;
import ubic.gemma.model.genome.gene.phenotype.valueObject.GeneEvidenceValueObject;
import ubic.gemma.model.genome.sequenceAnalysis.BioSequenceValueObject;
import ubic.gemma.ontology.OntologyService;
import ubic.gemma.util.EntityUtils;
import ubic.gemma.util.ReflectionUtil;
import ubic.gemma.util.Settings;
/**
* This service is used for performing searches using free text or exact matches to items in the database. <h2>
* Implementation notes</h2>
* <p>
* Internally, there are generally two kinds of searches performed, precise database searches looking for exact matches
* in the database and compass/lucene searches which look for matches in the stored index.
* <p>
* To add more dependencies to this Service edit the applicationContext-search.xml
*
* @author klc
* @author paul
* @author keshav
* @version $Id$
*/
@Component
public class SearchServiceImpl implements SearchService {
private static final String ONTOLOGY_CHILDREN_CACHE_NAME = "OntologyChildrenCache";
/**
* Penalty applied to all 'index' hits
*/
private static final double COMPASS_HIT_SCORE_PENALTY_FACTOR = 0.9;
/**
* Key for internal in-memory on-the-fly indexes
*/
// private static final String INDEX_KEY = "content";
/**
* Penalty applied to scores on hits for entities that derive from an association. For example, if a hit to an EE
* came from text associated with one of its biomaterials, the score is penalized by this amount.
*/
private static final double INDIRECT_DB_HIT_PENALTY = 0.8;
private static Log log = LogFactory.getLog( SearchServiceImpl.class.getName() );
/**
*
*/
// private static final int MAX_IN_MEMORY_INDEX_HITS = 1000;
private static final int MINIMUM_EE_QUERY_LENGTH = 3;
private static final int MINIMUM_STRING_LENGTH_FOR_FREE_TEXT_SEARCH = 2;
private static final String NCBI_GENE = "ncbi_gene";
/**
* How long after creation before an object is evicted, no matter what.
*/
private static final int ONTOLOGY_CACHE_TIME_TO_DIE = 10000;
/**
* How long an item in the cache lasts when it is not accessed.
*/
private static final int ONTOLOGY_CACHE_TIME_TO_IDLE = 3600;
/**
* How many term children can stay in memory
*/
private static final int ONTOLOGY_INFO_CACHE_SIZE = 30000;
/**
* If fewer than this number of experiments are returned from the a search of experiment characteristics, then
* search for experiments indirectly as well (ex: by finding bioMatierials tagged with the characteristicsand
* getting the experiments associated with them ). See also MAX_CHARACTERISTIC_SEARCH_RESULTS.
*/
private static final int SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS = 100;
@Autowired
private ArrayDesignService arrayDesignService;
@Autowired
private BibliographicReferenceService bibliographicReferenceService;
@Autowired
private BioSequenceService bioSequenceService;
@Autowired
private CacheManager cacheManager;
@Autowired
private CharacteristicService characteristicService;
// direct children of terms.
private Cache childTermCache;
@Autowired
@Qualifier("compassArray")
private Compass compassArray;
@Autowired
@Qualifier("compassBibliographic")
private Compass compassBibliographic;
@Autowired
@Qualifier("compassBiosequence")
private Compass compassBiosequence;
@Autowired
@Qualifier("compassExperimentSet")
private Compass compassExperimentSet;
@Autowired
@Qualifier("compassExpression")
private Compass compassExpression;
@Autowired
@Qualifier("compassGene")
private Compass compassGene;
@Autowired
@Qualifier("compassGeneSet")
private Compass compassGeneSet;
@Autowired
@Qualifier("compassProbe")
private Compass compassProbe;
@Autowired
private CompositeSequenceService compositeSequenceService;
@Autowired
private ExpressionExperimentSetService experimentSetService;
@Autowired
private ExpressionExperimentService expressionExperimentService;
@Autowired
private GeneSearchService geneSearchService;
@Autowired
private GeneProductService geneProductService;
@Autowired
private GeneService geneService;
@Autowired
private GeneSetService geneSetService;
@Autowired
private OntologyService ontologyService;
@Autowired
private PhenotypeAssociationManagerService phenotypeAssociationManagerService;
@Autowired
private TaxonDao taxonDao;
@Autowired
private AuditTrailService auditTrailService;
private static final int MAX_LUCENE_HITS = 750;
private HashMap<String, Taxon> nameToTaxonMap = new LinkedHashMap<String, Taxon>();
/*
* (non-Javadoc)
*
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
*/
@PostConstruct
void initializeSearchService() throws Exception {
try {
if ( cacheManager.cacheExists( ONTOLOGY_CHILDREN_CACHE_NAME ) ) {
return;
}
boolean terracottaEnabled = Settings.getBoolean( "gemma.cache.clustered", false );
int diskExpiryThreadIntervalSeconds = 600;
int maxElementsOnDisk = 10000;
boolean terracottaCoherentReads = false;
boolean clearOnFlush = false;
if ( terracottaEnabled ) {
CacheConfiguration config = new CacheConfiguration( ONTOLOGY_CHILDREN_CACHE_NAME,
ONTOLOGY_INFO_CACHE_SIZE );
config.setStatistics( false );
config.setMemoryStoreEvictionPolicy( MemoryStoreEvictionPolicy.LRU.toString() );
config.addPersistence( new PersistenceConfiguration().strategy( Strategy.NONE ) );
config.setEternal( true );
config.setTimeToIdleSeconds( ONTOLOGY_CACHE_TIME_TO_IDLE );
config.setMaxElementsOnDisk( maxElementsOnDisk );
config.addTerracotta( new TerracottaConfiguration() );
config.getTerracottaConfiguration().setCoherentReads( terracottaCoherentReads );
config.clearOnFlush( clearOnFlush );
config.setTimeToLiveSeconds( ONTOLOGY_CACHE_TIME_TO_DIE );
config.getTerracottaConfiguration().setClustered( true );
config.getTerracottaConfiguration().setValueMode( "SERIALIZATION" );
NonstopConfiguration nonstopConfiguration = new NonstopConfiguration();
TimeoutBehaviorConfiguration tobc = new TimeoutBehaviorConfiguration();
tobc.setType( TimeoutBehaviorType.NOOP.getTypeName() );
nonstopConfiguration.addTimeoutBehavior( tobc );
config.getTerracottaConfiguration().addNonstop( nonstopConfiguration );
childTermCache = new Cache( config );
// childTermCache = new Cache( "OntologyChildrenCache", ONTOLOGY_INFO_CACHE_SIZE,
// MemoryStoreEvictionPolicy.LFU, false, null, false, ONTOLOGY_CACHE_TIME_TO_DIE,
// ONTOLOGY_CACHE_TIME_TO_IDLE, false, diskExpiryThreadIntervalSeconds, null, null,
// maxElementsOnDisk, 10, clearOnFlush, terracottaEnabled, "SERIALIZATION",
// terracottaCoherentReads );
} else {
childTermCache = new Cache( ONTOLOGY_CHILDREN_CACHE_NAME, ONTOLOGY_INFO_CACHE_SIZE,
MemoryStoreEvictionPolicy.LFU, false, null, false, ONTOLOGY_CACHE_TIME_TO_DIE,
ONTOLOGY_CACHE_TIME_TO_IDLE, false, diskExpiryThreadIntervalSeconds, null );
}
cacheManager.addCache( childTermCache );
childTermCache = cacheManager.getCache( ONTOLOGY_CHILDREN_CACHE_NAME );
} catch ( CacheException e ) {
throw new RuntimeException( e );
}
initializeNameToTaxonMap();
}
private void initializeNameToTaxonMap() {
Collection<Taxon> taxonCollection = ( Collection<Taxon> ) taxonDao.loadAll();
for ( Taxon taxon : taxonCollection ) {
if ( taxon.getScientificName() != null )
nameToTaxonMap.put( taxon.getScientificName().trim().toLowerCase(), taxon );
if ( taxon.getCommonName() != null )
nameToTaxonMap.put( taxon.getCommonName().trim().toLowerCase(), taxon );
if ( taxon.getAbbreviation() != null )
nameToTaxonMap.put( taxon.getAbbreviation().trim().toLowerCase(), taxon );
}
// loop through again breaking up multi-word taxon database names and handling some special cases(e.g. salmon,
// rainbow are common to multiple taxa)
// doing this is a separate loop so that these names take lower precedence when matching than the full terms in
// the generated keySet
// some of the special cases the section below may be unnecessary, or more may need to be added
for ( Taxon taxon : taxonCollection ) {
String[] terms;
if ( taxon.getScientificName() != null ) {
terms = taxon.getScientificName().split( "\\s+" );
if ( terms.length > 1 ) {
for ( String s : terms ) {
if ( !s.equalsIgnoreCase( "Oncorhynchus" ) ) {
nameToTaxonMap.put( s.toLowerCase(), taxon );
}
}
}
}
if ( StringUtils.isNotBlank( taxon.getCommonName() ) ) {
if ( taxon.getCommonName().equalsIgnoreCase( "salmonid" ) ) {
nameToTaxonMap.put( "salmon", taxon );
}
terms = taxon.getCommonName().split( "\\s+" );
if ( terms.length > 1 ) {
for ( String s : terms ) {
if ( !s.equalsIgnoreCase( "salmon" ) && !s.equalsIgnoreCase( "pink" )
&& !s.equalsIgnoreCase( "rainbow" ) ) {
nameToTaxonMap.put( s.toLowerCase(), taxon );
}
}
}
}
}
}
@Override
public Map<Class<?>, List<SearchResult>> ajaxSearch( SearchSettingsValueObject settingsValueObject ) {
SearchSettings settings = SearchSettingsValueObject.toEntity( settingsValueObject );
return this.search( settings );
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#search(ubic.gemma.search.SearchSettings)
*/
@Override
public Map<Class<?>, List<SearchResult>> search( SearchSettings settings ) {
Map<Class<?>, List<SearchResult>> searchResults = new HashMap<Class<?>, List<SearchResult>>();
try {
searchResults = this.search( settings, true, false );
} catch ( org.compass.core.engine.SearchEngineQueryParseException qpe ) {
log.error( "Query parse Error: " + settings + "; message=" + qpe.getMessage(), qpe );
} catch ( Exception e ) {
log.error( "Search error on settings: " + settings + "; message=" + e.getMessage(), e );
}
return searchResults;
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#search(ubic.gemma.search.SearchSettings)
*/
@Override
public Map<Class<?>, List<SearchResult>> speedSearch( SearchSettings settings ) {
Map<Class<?>, List<SearchResult>> searchResults = new HashMap<>();
try {
searchResults = this.search( settings, true, true );
} catch ( org.compass.core.engine.SearchEngineQueryParseException qpe ) {
log.error( "Query parse Error: " + settings + "; message=" + qpe.getMessage(), qpe );
} catch ( Exception e ) {
log.error( "Search error on settings: " + settings + "; message=" + e.getMessage(), e );
}
return searchResults;
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#search(ubic.gemma.search.SearchSettings)
*/
@Override
public List<?> search( SearchSettings settings, Class<?> resultClass ) {
Map<Class<?>, List<SearchResult>> searchResults = this.search( settings );
List<Object> resultObjects = new ArrayList<Object>();
List<SearchResult> searchResultObjects = searchResults.get( resultClass );
if ( searchResultObjects == null ) return resultObjects;
for ( SearchResult sr : searchResultObjects ) {
resultObjects.add( sr.getResultObject() );
}
return resultObjects;
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#search(ubic.gemma.search.SearchSettings, boolean)
*/
@Override
public Map<Class<?>, List<SearchResult>> search( SearchSettings settings, boolean fillObjects,
boolean webSpeedSearch ) {
if ( StringUtils.isBlank( settings.getTermUri() ) && !settings.getQuery().startsWith( "http://" ) ) {
return generalSearch( settings, fillObjects, webSpeedSearch );
}
// we only attempt an ontology search if the uri looks remotely like a url.
return ontologyUriSearch( settings );
}
/**
* @param settings
* @return results, if the settings.termUri is populated. This includes gene uris.
*/
private Map<Class<?>, List<SearchResult>> ontologyUriSearch( SearchSettings settings ) {
Map<Class<?>, List<SearchResult>> results = new HashMap<Class<?>, List<SearchResult>>();
// 1st check to see if the query is a URI (from an ontology).
// Do this by seeing if we can find it in the loaded ontologies.
// Escape with general utilities because might not be doing a lucene backed search. (just a hibernate one).
String termUri = settings.getTermUri();
if ( StringUtils.isBlank( termUri ) ) {
termUri = settings.getQuery();
}
if ( !termUri.startsWith( "http://" ) ) {
return results;
}
OntologyTerm matchingTerm = null;
String uriString = null;
uriString = StringEscapeUtils.escapeJava( StringUtils.strip( termUri ) );
if ( StringUtils.containsIgnoreCase( uriString, NCBI_GENE ) ) {
// Perhaps is a valid gene URL. Want to search for the gene in gemma.
// 1st get objects tagged with the given gene identifier
Collection<Class<?>> classesToFilterOn = new HashSet<Class<?>>();
classesToFilterOn.add( ExpressionExperiment.class );
Collection<Characteristic> foundCharacteristics = characteristicService.findByUri( classesToFilterOn,
uriString );
Map<Characteristic, Object> parentMap = characteristicService.getParents( classesToFilterOn,
foundCharacteristics );
Collection<SearchResult> characteristicOwnerResults = filterCharacteristicOwnersByClass( classesToFilterOn,
parentMap );
if ( !characteristicOwnerResults.isEmpty() ) {
results.put( ExpressionExperiment.class, new ArrayList<SearchResult>() );
results.get( ExpressionExperiment.class ).addAll( characteristicOwnerResults );
}
if ( settings.getSearchGenes() ) {
// Get the gene
String ncbiAccessionFromUri = StringUtils.substringAfterLast( uriString, "/" );
Gene g = null;
try {
g = geneService.findByNCBIId( Integer.parseInt( ncbiAccessionFromUri ) );
} catch ( NumberFormatException e ) {
// ok
}
if ( g != null ) {
results.put( Gene.class, new ArrayList<SearchResult>() );
results.get( Gene.class ).add( new SearchResult( g ) );
}
}
return results;
}
/*
* Not searching for a gene.
*/
Collection<SearchResult> matchingResults;
Collection<Class<?>> classesToSearch = new HashSet<Class<?>>();
if ( settings.getSearchExperiments() ) {
classesToSearch.add( ExpressionExperiment.class ); // not sure ...
classesToSearch.add( BioMaterial.class );
classesToSearch.add( FactorValue.class );
}
// this doesn't seem to be implemented yet, LiteratureEvidence and GenericEvidence aren't handled in the
// fillValueObjects method downstream
/*
* if ( settings.getSearchPhenotypes() ) { classesToSearch.add( PhenotypeAssociation.class ); }
*/
matchingTerm = this.ontologyService.getTerm( uriString );
if ( matchingTerm == null || matchingTerm.getUri() == null ) {
/*
* Maybe the ontology isn't loaded. Look anyway.
*/
Map<Characteristic, Object> parentMap = characteristicService.getParents( classesToSearch,
characteristicService.findByUri( classesToSearch, uriString ) );
matchingResults = filterCharacteristicOwnersByClass( classesToSearch, parentMap );
} else {
log.info( "Found ontology term: " + matchingTerm );
// Was a URI from a loaded ontology soo get the children.
Collection<OntologyTerm> terms2Search4 = matchingTerm.getChildren( true );
terms2Search4.add( matchingTerm );
matchingResults = this.databaseCharacteristicExactUriSearchForOwners( classesToSearch, terms2Search4 );
}
for ( SearchResult searchR : matchingResults ) {
if ( results.containsKey( searchR.getResultClass() ) ) {
results.get( searchR.getResultClass() ).add( searchR );
} else {
List<SearchResult> rs = new ArrayList<SearchResult>();
rs.add( searchR );
results.put( searchR.getResultClass(), rs );
}
}
return results;
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#searchExpressionExperiments(java.lang.String, java.lang.Long)
*/
@Override
public Collection<Long> searchExpressionExperiments( String query, Long taxonId ) {
Taxon taxon = taxonDao.load( taxonId );
Collection<Long> eeIds = new HashSet<Long>();
if ( StringUtils.isNotBlank( query ) ) {
if ( query.length() < MINIMUM_EE_QUERY_LENGTH ) return eeIds;
// Initial list
List<SearchResult> results = this.search( SearchSettingsImpl.expressionExperimentSearch( query ), false,
false ).get( ExpressionExperiment.class );
for ( SearchResult result : results ) {
eeIds.add( result.getId() );
}
// Filter by taxon
if ( taxon != null ) {
Collection<Long> eeIdsToKeep = new HashSet<Long>();
Collection<ExpressionExperiment> ees = expressionExperimentService.findByTaxon( taxon );
for ( ExpressionExperiment ee : ees ) {
if ( eeIds.contains( ee.getId() ) ) eeIdsToKeep.add( ee.getId() );
}
eeIds.retainAll( eeIdsToKeep );
}
} else {
Collection<ExpressionExperiment> ees = ( taxon != null ) ? expressionExperimentService.findByTaxon( taxon )
: expressionExperimentService.loadAll();
for ( ExpressionExperiment ee : ees ) {
eeIds.add( ee.getId() );
}
}
return eeIds;
}
/**
* Add results.
*
* @param rawResults To add to
* @param newResults To be added
*/
private void accreteResults( List<SearchResult> rawResults, Collection<SearchResult> newResults ) {
for ( SearchResult sr : newResults ) {
if ( !rawResults.contains( sr ) ) {
/*
* We do this because we don't want to clobber results, when the same object comes up more than once in
* different searches. FIXME - perhaps check if the score of the existing one is lower?
*/
rawResults.add( sr );
}
}
}
/**
* Returns children one step down.
*
* @param term starting point
*/
private Collection<OntologyTerm> getDirectChildTerms( OntologyTerm term ) {
String uri = term.getUri();
/*
* getChildren can be very slow for 'high-level' classes like "neoplasm", so we use a cache.
*/
Collection<OntologyTerm> children = null;
if ( StringUtils.isBlank( uri ) ) {
// shouldn't happen, but just in case
if ( log.isDebugEnabled() ) log.debug( "Blank uri for " + term );
}
Element cachedChildren = this.childTermCache.get( uri );
// log.debug("Getting children of " + term);
if ( cachedChildren == null ) {
try {
children = term.getChildren( true );
childTermCache.put( new Element( uri, children ) );
} catch ( com.hp.hpl.jena.ontology.ConversionException ce ) {
log.warn( "getting children for term: " + term
+ " caused com.hp.hpl.jena.ontology.ConversionException. " + ce.getMessage() );
}
} else {
children = ( Collection<OntologyTerm> ) cachedChildren.getObjectValue();
}
return children;
}
/**
* A general search for array designs.
* <p>
* This search does both an database search and a compass search. This is also contains an underlying
* {@link CompositeSequence} search, returning the {@link ArrayDesign} collection for the given composite sequence
* search string (the returned collection of array designs does not contain duplicates).
*
* @param searchString
* @param probeResults Collection of results from a previous CompositeSequence search. Can be null; otherwise used
* to avoid a second search for probes. The array designs for the probes are added to the final results.
* @return
*/
private Collection<SearchResult> arrayDesignSearch( SearchSettings settings, Collection<SearchResult> probeResults ) {
StopWatch watch = startTiming();
String searchString = settings.getQuery();
Collection<SearchResult> results = new HashSet<SearchResult>();
ArrayDesign shortNameResult = arrayDesignService.findByShortName( searchString );
if ( shortNameResult != null ) {
results.add( new SearchResult( shortNameResult, 1.0 ) );
} else {
Collection<ArrayDesign> nameResult = arrayDesignService.findByName( searchString );
if ( nameResult != null ) for ( ArrayDesign ad : nameResult ) {
results.add( new SearchResult( ad, 1.0 ) );
}
}
Collection<ArrayDesign> altNameResults = arrayDesignService.findByAlternateName( searchString );
for ( ArrayDesign arrayDesign : altNameResults ) {
results.add( new SearchResult( arrayDesign, 0.9 ) );
}
Collection<ArrayDesign> manufacturerResults = arrayDesignService.findByManufacturer( searchString );
for ( ArrayDesign arrayDesign : manufacturerResults ) {
results.add( new SearchResult( arrayDesign, 0.9 ) );
}
results.addAll( compassArrayDesignSearch( settings ) );
results.addAll( databaseArrayDesignSearch( settings ) );
Collection<SearchResult> probes = null;
if ( probeResults == null ) {
probes = compassCompositeSequenceSearch( settings );
} else {
probes = probeResults;
}
for ( SearchResult r : probes ) {
CompositeSequence cs = ( CompositeSequence ) r.getResultObject();
if ( cs.getArrayDesign() == null ) // This might happen as compass
// might not have indexed the AD
// for the CS
continue;
results.add( r );
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Array Design search for '" + settings + "' took " + watch.getTime() + " ms" );
return results;
}
/**
* *
*
* @param searchString
* @param previousGeneSearchResults Can be null, otherwise used to avoid a second search for genes. The biosequences
* for the genes are added to the final results.
* @return
*/
private Collection<SearchResult> bioSequenceSearch( SearchSettings settings,
Collection<SearchResult> previousGeneSearchResults ) {
StopWatch watch = startTiming();
Collection<SearchResult> searchResults = new HashSet<SearchResult>();
searchResults.addAll( compassBioSequenceSearch( settings, previousGeneSearchResults ) );
searchResults.addAll( databaseBioSequenceSearch( settings ) );
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Biosequence search for '" + settings + "' took " + watch.getTime() + " ms "
+ searchResults.size() + " results." );
return searchResults;
}
/**
* @param settings
*/
private Collection<SearchResult> characteristicExpressionExperimentSearch( final SearchSettings settings ) {
Collection<SearchResult> results = new HashSet<SearchResult>();
Collection<Class<?>> classToSearch = new ArrayList<Class<?>>( 1 ); // this is a collection because of the API
// for characteristicService; could add
// findByUri(Class<?>...)
// order matters.
Queue<Class<?>> orderedClassesToSearch = new LinkedList<Class<?>>();
orderedClassesToSearch.add( ExpressionExperiment.class );
orderedClassesToSearch.add( FactorValue.class );
orderedClassesToSearch.add( BioMaterial.class );
orderedClassesToSearch.add( Treatment.class );
Collection<SearchResult> characterSearchResults = new HashSet<SearchResult>();
while ( characterSearchResults.size() < SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
&& !orderedClassesToSearch.isEmpty() ) {
classToSearch.clear();
classToSearch.add( orderedClassesToSearch.poll() );
// We handle the OR clauses here.
String[] subclauses = settings.getQuery().split( " OR " );
for ( String subclause : subclauses ) {
/*
* Note that the AND is applied only within one entity type. The fix would be to apply AND at this
* level.
*/
Collection<SearchResult> classResults = characteristicSearchWithChildren( classToSearch, subclause );
if ( !classResults.isEmpty() ) {
String msg = "Found " + classResults.size() + " " + classToSearch.iterator().next().getSimpleName()
+ " results from characteristic search.";
if ( characterSearchResults.size() >= SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS ) {
msg += " Total found > " + SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
+ ", will not search for more entities.";
}
log.info( msg );
}
characterSearchResults.addAll( classResults );
}
}
StopWatch watch = new StopWatch();
watch.start();
// filter and get parents...
int numEEs = 0;
Collection<BioMaterial> biomaterials = new HashSet<BioMaterial>();
Collection<FactorValue> factorValues = new HashSet<FactorValue>();
Collection<Treatment> treatments = new HashSet<Treatment>();
// FIXME use this. We lose track of which object went with which EE (except for direct hits)
// Map<Object, String> highlightedText = new HashMap<>();
for ( SearchResult sr : characterSearchResults ) {
Class<?> resultClass = sr.getResultClass();
// highlightedText.put( sr.getResultObject(), sr.getHighlightedText() );
if ( ExpressionExperiment.class.isAssignableFrom( resultClass ) ) {
sr.setHighlightedText( sr.getHighlightedText() + " (characteristic)" );
results.add( sr );
numEEs++;
} else if ( BioMaterial.class.isAssignableFrom( resultClass ) ) {
biomaterials.add( ( BioMaterial ) sr.getResultObject() );
} else if ( FactorValue.class.isAssignableFrom( resultClass ) ) {
factorValues.add( ( FactorValue ) sr.getResultObject() );
} else if ( Treatment.class.isAssignableFrom( resultClass ) ) {
treatments.add( ( Treatment ) sr.getResultObject() );
}
}
/*
* Much faster to batch it...but we loose track of which search result came from which, so we put generic
* highlighted text.
*/
if ( biomaterials.size() > 0 ) {
Collection<ExpressionExperiment> ees = expressionExperimentService.findByBioMaterials( biomaterials );
for ( ExpressionExperiment ee : ees ) {
results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "BioMaterial characteristic" ) );
}
}
if ( factorValues.size() > 0 ) {
Collection<ExpressionExperiment> ees = expressionExperimentService.findByFactorValues( factorValues );
for ( ExpressionExperiment ee : ees ) {
if ( log.isDebugEnabled() ) log.debug( ee );
results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "Factor characteristic" ) );
}
}
if ( treatments.size() > 0 ) {
log.info( "Not processing treatments, but hits were found" );
}
if ( log.isDebugEnabled() ) {
log.debug( "ExpressionExperiment search: " + settings + " -> " + results.size() + " characteristic hits" );
}
if ( watch.getTime() > 1000 ) {
log.info( "Retrieving " + results.size() + " experiments from " + characterSearchResults.size()
+ " retrieved characteristics took " + watch.getTime() + " ms" );
log.info( "Breakdown: " + numEEs + " via direct association with EE; " + biomaterials.size()
+ " via association with Biomaterial; " + factorValues.size() + " via experimental design" );
}
return results;
}
/**
* Search for the query in ontologies, including items that are associated with children of matching query terms.
* That is, 'brain' should return entities tagged as 'hippocampus'. This method will return results only up to
* MAX_CHARACTERISTIC_SEARCH_RESULTS. It can handle AND in searches, so Parkinson's AND neuron finds items tagged
* with both of those terms. The use of OR is handled by the caller.
*
* @param classes Classes of characteristic-bound entities. For example, to get matching characteristics of
* ExpressionExperiments, pass ExpressionExperiments.class in this collection parameter.
* @param settings
* @return SearchResults of CharcteristicObjects. Typically to be useful one needs to retrieve the 'parents'
* (entities which have been 'tagged' with the term) of those Characteristics
*/
private Collection<SearchResult> characteristicSearchWithChildren( Collection<Class<?>> classes, String query ) {
StopWatch timer = startTiming();
/*
* The tricky part here is if the user has entered a boolean query. If they put in
*
* Parkinson's disease AND neuron
*
* Then we want to eventually return entities that are associated with both. We don't expect to find single
* characteristics that match both.
*
* But if they put in
*
* Parkinson's disease
*
* We don't want to do two queries.
*/
List<String> subparts = Arrays.asList( query.split( " AND " ) );
// we would have to first deal with the separate queries, and then apply the logic.
Collection<SearchResult> allResults = new HashSet<SearchResult>();
log.info( "Starting characteristic search: " + query + " for type=" + StringUtils.join( classes, "," ) );
for ( String rawTerm : subparts ) {
String trimmed = StringUtils.strip( rawTerm );
if ( StringUtils.isBlank( trimmed ) ) {
continue;
}
Collection<SearchResult> subqueryResults = characteristicSearchTerm( classes, trimmed );
if ( allResults.isEmpty() ) {
allResults.addAll( subqueryResults );
} else {
// this is our Intersection operation.
allResults.retainAll( subqueryResults );
// aggregate the highlighted text.
Map<SearchResult, String> highlights = new HashMap<>();
for ( SearchResult sqr : subqueryResults ) {
highlights.put( sqr, sqr.getHighlightedText() );
}
for ( SearchResult ar : allResults ) {
String k = highlights.get( ar );
if ( StringUtils.isNotBlank( k ) ) {
String highlightedText = ar.getHighlightedText();
if ( StringUtils.isBlank( highlightedText ) ) {
ar.setHighlightedText( k );
} else {
ar.setHighlightedText( highlightedText + "," + k );
}
}
}
}
if ( timer.getTime() > 1000 ) {
log.info( "Characteristic search for '" + rawTerm + "': " + allResults.size()
+ " hits retained so far; " + timer.getTime() + "ms" );
timer.reset();
timer.start();
}
}
return allResults;
}
/**
* The maximum number of characteristics to search while walking down a ontology graph.
*/
private static int MAX_CHARACTERISTIC_SEARCH_RESULTS = 500;
/**
* Perform a search on a query - it does not have to be one word, it could be "parkinson's disease"
*
* @param classes
* @param matches
* @param query
* @return
*/
private Collection<SearchResult> characteristicSearchTerm( Collection<Class<?>> classes, String query ) {
if ( log.isDebugEnabled() ) log.debug( "Starting search for " + query );
StopWatch watch = startTiming();
Collection<Characteristic> cs = new HashSet<Characteristic>();
Collection<OntologyIndividual> individuals = ontologyService.findIndividuals( query );
for ( Collection<OntologyIndividual> individualbatch : BatchIterator.batches( individuals, 10 ) ) {
Collection<String> uris = new HashSet<String>();
for ( OntologyIndividual individual : individualbatch ) {
uris.add( individual.getUri() );
}
Collection<SearchResult> dbhits = dbHitsToSearchResult( characteristicService.findByUri( classes, uris ) );
for ( SearchResult crs : dbhits ) {
cs.add( ( Characteristic ) crs.getResultObject() );
}
if ( cs.size() >= MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
break;
}
}
if ( individuals.size() > 0 && watch.getTime() > 1000 ) {
log.info( "Found " + individuals.size() + " individuals matching '" + query + "' in " + watch.getTime()
+ "ms" );
}
/*
* Add characteristics that have values matching the query; this pulls in items not associated with ontology
* terms (free text). We do this here so we can apply the query logic to the matches.
*/
if ( cs.size() < MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
String dbQueryString = query.replaceAll( "\\*", "" ); // note I changed the order of search operations so
// this
// might not be wanted.
Collection<Characteristic> valueMatches = characteristicService.findByValue( classes, dbQueryString );
if ( valueMatches != null && !valueMatches.isEmpty() ) {
cs.addAll( valueMatches );
if ( watch.getTime() > 1000 ) {
log.info( "Found " + valueMatches.size() + " characteristics matching value '" + query + "' in "
+ watch.getTime() + "ms" );
}
watch.reset();
watch.start();
}
}
if ( cs.size() < MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
/*
* Identify initial set of matches to the query.
*/
Collection<OntologyTerm> matchingTerms = ontologyService.findTerms( query );
if ( watch.getTime() > 1000 ) {
log.info( "Found " + matchingTerms.size() + " ontology classes matching '" + query + "' in "
+ watch.getTime() + "ms" );
}
/*
* Search for child terms.
*/
if ( !matchingTerms.isEmpty() ) {
for ( OntologyTerm term : matchingTerms ) {
/*
* In this loop, each term is a match directly to our query, and we do a depth-first fetch of the
* children.
*/
String uri = term.getUri();
if ( StringUtils.isBlank( uri ) ) continue;
int sizeBefore = cs.size();
getCharactersticsAnnotatedToChildren( classes, term, cs );
if ( log.isDebugEnabled() && cs.size() > sizeBefore ) {
log.debug( ( cs.size() - sizeBefore ) + " characteristics matching children term of " + term );
}
if ( cs.size() >= MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
break;
}
}
if ( watch.getTime() > 1000 ) {
log.info( "Found " + cs.size() + " characteristics for '" + query + "' including child terms in "
+ watch.getTime() + "ms" );
}
watch.reset();
watch.start();
}
}
/*
* Retrieve the owner objects
*/
watch.reset();
watch.start();
Collection<SearchResult> matchingEntities = getAnnotatedEntities( classes, cs );
if ( watch.getTime() > 1000 ) {
log.info( "Retrieved " + matchingEntities.size() + " entities via characteristics for '" + query + "' in "
+ watch.getTime() + "ms" );
}
if ( log.isDebugEnabled() ) log.debug( "End search for " + query );
return matchingEntities;
}
/**
* Recursively
*
* @param classes
* @param term
* @param results
*/
private void getCharactersticsAnnotatedToChildren( Collection<Class<?>> classes, OntologyTerm term,
Collection<Characteristic> results ) {
Collection<OntologyTerm> children = getDirectChildTerms( term );
/*
* Find occurrences of these terms in our system. This is fast, so long as there aren't too many.
*/
if ( !children.isEmpty() ) {
Collection<String> uris = new ArrayList<String>();
for ( OntologyTerm ontologyTerm : children ) {
if ( ontologyTerm.getUri() == null ) continue;
uris.add( ontologyTerm.getUri() );
}
if ( !uris.isEmpty() ) {
Collection<SearchResult> dbhits = dbHitsToSearchResult( characteristicService.findByUri( classes, uris ) );
for ( SearchResult crs : dbhits ) {
results.add( ( Characteristic ) crs.getResultObject() );
}
}
}
if ( results.size() >= MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
return;
}
for ( OntologyTerm child : children ) {
getCharactersticsAnnotatedToChildren( classes, child, results );
}
}
/**
* A Compass search on array designs.
*
* @param query
* @return {@link Collection}
*/
private Collection<SearchResult> compassArrayDesignSearch( SearchSettings settings ) {
return compassSearch( compassArray, settings );
}
/**
* @param query
* @return
*/
private Collection<SearchResult> compassBibliographicReferenceSearch( SearchSettings settings ) {
return compassSearch( compassBibliographic, settings );
}
/**
* A compass backed search that finds biosequences that match the search string. Searches the gene and probe indexes
* for matches then converts those results to biosequences
*
* @param searchString
* @param previousGeneSearchResults Can be null, otherwise used to avoid a second search for genes. The biosequences
* for the genes are added to the final results.
* @return
* @throws Exception
*/
private Collection<SearchResult> compassBioSequenceSearch( SearchSettings settings,
Collection<SearchResult> previousGeneSearchResults ) {
Collection<SearchResult> results = compassSearch( compassBiosequence, settings );
Collection<SearchResult> geneResults = null;
if ( previousGeneSearchResults == null ) {
log.info( "Biosequence Search: running gene search with " + settings.getQuery() );
geneResults = compassGeneSearch( settings );
} else {
log.info( "Biosequence Search: using previous results" );
geneResults = previousGeneSearchResults;
}
Map<Gene, SearchResult> genes = new HashMap<Gene, SearchResult>();
for ( SearchResult sr : geneResults ) {
Object resultObject = sr.getResultObject();
if ( Gene.class.isAssignableFrom( resultObject.getClass() ) ) {
genes.put( ( Gene ) resultObject, sr );
} else {
// see bug 1774 -- may not be happening any more.
log.warn( "Expected a Gene, got a " + resultObject.getClass() + " on query=" + settings.getQuery() );
}
}
Map<Gene, Collection<BioSequence>> seqsFromDb = bioSequenceService.findByGenes( genes.keySet() );
for ( Gene gene : seqsFromDb.keySet() ) {
List<BioSequence> bs = new ArrayList<BioSequence>( seqsFromDb.get( gene ) );
// bioSequenceService.thaw( bs );
results.addAll( dbHitsToSearchResult( bs, genes.get( gene ), null ) );
}
return results;
}
/**
* @param settings
* @return
*/
private Collection<SearchResult> compassCompositeSequenceSearch( final SearchSettings settings ) {
return compassSearch( compassProbe, settings );
}
/**
* A compass search on expressionExperiments.
*
* @param query
* @return {@link Collection}
*/
private Collection<SearchResult> compassExpressionSearch( SearchSettings settings ) {
return compassSearch( compassExpression, settings );
}
/**
* @param query
* @return
*/
private Collection<SearchResult> compassGeneSearch( final SearchSettings settings ) {
return compassSearch( compassGene, settings );
}
/**
* Generic method for searching Lucene indices for entities (excluding ontology terms, which use the OntologySearch)
*
* @param bean
* @param settings
* @return
*/
private Collection<SearchResult> compassSearch( Compass bean, final SearchSettings settings ) {
if ( !settings.getUseIndices() ) return new HashSet<SearchResult>();
CompassTemplate template = new CompassTemplate( bean );
Collection<SearchResult> searchResults = template.execute( new CompassCallback<Collection<SearchResult>>() {
@Override
public Collection<SearchResult> doInCompass( CompassSession session ) throws CompassException {
return performSearch( settings, session );
}
} );
if ( log.isDebugEnabled() ) {
log.debug( "Compass search via " + bean.getSettings().getSetting( "compass.name" ) + " : " + settings
+ " -> " + searchResults.size() + " hits" );
}
return searchResults;
}
/**
* Search by name of the composite sequence as well as gene.
*
* @return
* @throws Exception
*/
private Collection<SearchResult> compositeSequenceSearch( SearchSettings settings ) {
StopWatch watch = startTiming();
/*
* FIXME: this at least partly ignores any array design that was set as a restriction, especially in a gene
* search.
*/
Collection<SearchResult> allResults = new HashSet<>();
// Skip compass searching of composite sequences because it only bloats the results.
// allResults.addAll( compassCompositeSequenceSearch( settings ) );
allResults.addAll( databaseCompositeSequenceSearch( settings ) );
// allResults.addAll( compositeSequenceByGeneSearch( settings, geneSearchResults ) );
/*
* This last step is needed because the compassSearch for compositeSequences returns bioSequences too.
*/
Collection<SearchResult> finalResults = new HashSet<>();
for ( SearchResult sr : allResults ) {
if ( CompositeSequence.class.isAssignableFrom( sr.getResultClass() ) ) {
finalResults.add( sr );
}
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Composite sequence search for '" + settings + "' took " + watch.getTime() + " ms, "
+ finalResults.size() + " results." );
return finalResults;
}
/**
* @param searchResults
* @return
*/
private List<SearchResult> convertEntitySearchResutsToValueObjectsSearchResults(
Collection<SearchResult> searchResults ) {
List<SearchResult> convertedSearchResults = new ArrayList<SearchResult>();
for ( SearchResult searchResult : searchResults ) {
// this is a special case ... for some reason.
if ( BioSequence.class.isAssignableFrom( searchResult.getResultClass() ) ) {
SearchResult convertedSearchResult = new SearchResult(
BioSequenceValueObject.fromEntity( bioSequenceService.thaw( ( BioSequence ) searchResult
.getResultObject() ) ), searchResult.getScore(), searchResult.getHighlightedText() );
convertedSearchResults.add( convertedSearchResult );
} else {
convertedSearchResults.add( searchResult );
}
}
return convertedSearchResults;
}
/**
* Searches the DB for array designs which have composite sequences whose names match the given search string.
* Because of the underlying database search, this is acl aware. That is, returned array designs are filtered based
* on access control list (ACL) permissions.
*
* @param searchString
* @return
* @throws Exception
*/
private Collection<SearchResult> databaseArrayDesignSearch( SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<SearchResult>();
StopWatch watch = startTiming();
Collection<ArrayDesign> adSet = new HashSet<ArrayDesign>();
// search by exact composite sequence name
Collection<CompositeSequence> matchedCs = compositeSequenceService.findByName( settings.getQuery() );
for ( CompositeSequence sequence : matchedCs ) {
adSet.add( sequence.getArrayDesign() );
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Array Design Compositesequence DB search for " + settings + " took " + watch.getTime() + " ms"
+ " found " + adSet.size() + " Ads" );
return dbHitsToSearchResult( adSet );
}
/**
* A database serach for biosequences. Biosequence names are already indexed by compass...
*
* @param searchString
* @return
*/
private Collection<SearchResult> databaseBioSequenceSearch( SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<SearchResult>();
StopWatch watch = startTiming();
String searchString = settings.getQuery();
// replace * with % for inexact symbol search
String inexactString = searchString;
Pattern pattern = Pattern.compile( "\\*" );
Matcher match = pattern.matcher( inexactString );
inexactString = match.replaceAll( "%" );
Collection<BioSequence> bs = bioSequenceService.findByName( inexactString );
// bioSequenceService.thaw( bs );
Collection<SearchResult> bioSequenceList = new HashSet<SearchResult>( dbHitsToSearchResult( bs ) );
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "BioSequence DB search for " + searchString + " took " + watch.getTime() + " ms and found"
+ bioSequenceList.size() + " BioSequences" );
return bioSequenceList;
}
/**
* Takes a list of ontology terms, and classes of objects of interest to be returned. Looks through the
* characteristic table for an exact match with the given ontology terms. Only tries to match the uri's.
*
* @param data.clazz Class of objects to restrict the search to (typically ExpressionExperimentImpl.class, for
* example).
* @param terms A list of ontololgy terms to search for
* @return Collection of search results for the objects owning the found characteristics, where the owner is of
* class clazz
*/
private Collection<SearchResult> databaseCharacteristicExactUriSearchForOwners( Collection<Class<?>> classes,
Collection<OntologyTerm> terms ) {
// Collection<Characteristic> characteristicValueMatches = new ArrayList<Characteristic>();
Collection<Characteristic> characteristicURIMatches = new ArrayList<Characteristic>();
for ( OntologyTerm term : terms ) {
// characteristicValueMatches.addAll( characteristicService.findByValue( term.getUri() ));
characteristicURIMatches.addAll( characteristicService.findByUri( classes, term.getUri() ) );
}
Map<Characteristic, Object> parentMap = characteristicService.getParents( classes, characteristicURIMatches );
// parentMap.putAll( characteristicService.getParents(characteristicValueMatches ) );
return filterCharacteristicOwnersByClass( classes, parentMap );
}
/**
* Search the DB for composite sequences and the genes that are matched to them.
*
* @param searchString
* @return
* @throws Exception
*/
private Collection<SearchResult> databaseCompositeSequenceSearch( final SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<>();
StopWatch watch = startTiming();
Set<Gene> geneSet = new HashSet<>();
String searchString = settings.getQuery();
ArrayDesign ad = settings.getPlatformConstraint();
// search by exact composite sequence name
Collection<CompositeSequence> matchedCs = new HashSet<>();
if ( ad != null ) {
CompositeSequence cs = compositeSequenceService.findByName( ad, searchString );
if ( cs != null ) matchedCs.add( cs );
} else {
matchedCs = compositeSequenceService.findByName( searchString );
}
/*
* In case the query _is_ a gene
*/
Collection<SearchResult> rawGeneResults = this.databaseGeneSearch( settings );
for ( SearchResult searchResult : rawGeneResults ) {
Object j = searchResult.getResultObject();
if ( Gene.class.isAssignableFrom( j.getClass() ) ) {
geneSet.add( ( Gene ) j );
}
}
for ( Gene g : geneSet ) {
if ( settings.getPlatformConstraint() != null ) {
matchedCs.addAll( compositeSequenceService.findByGene( g, settings.getPlatformConstraint() ) );
} else {
matchedCs.addAll( compositeSequenceService.findByGene( g ) );
}
}
// search by associated genes.
for ( CompositeSequence sequence : matchedCs ) {
geneSet.addAll( compositeSequenceService.getGenes( sequence ) );
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Gene composite sequence DB search " + searchString + " took " + watch.getTime() + " ms, "
+ geneSet.size() + " items." );
Collection<SearchResult> results = dbHitsToSearchResult( geneSet );
results.addAll( dbHitsToSearchResult( matchedCs ) );
return results;
}
/**
* Does search on exact string by: id, name and short name. This only returns results if these fields match exactly,
* but it's fast.
*
* @param query
* @return {@link Collection}
*/
private Collection<SearchResult> databaseExpressionExperimentSearch( final SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<SearchResult>();
StopWatch watch = startTiming();
Map<ExpressionExperiment, String> results = new HashMap<ExpressionExperiment, String>();
String query = StringEscapeUtils.unescapeJava( settings.getQuery() );
Collection<ExpressionExperiment> ees = expressionExperimentService.findByName( query );
if ( !ees.isEmpty() ) {
for ( ExpressionExperiment ee : ees ) {
results.put( ee, ee.getName() );
}
} else {
ExpressionExperiment ee = expressionExperimentService.findByShortName( query );
if ( ee != null ) {
results.put( ee, ee.getShortName() );
} else {
ees = expressionExperimentService.findByAccession( query );
for ( ExpressionExperiment e : ees ) {
results.put( e, e.getId().toString() );
}
if ( results.isEmpty() ) {
try {
// maybe user put in a primary key value.
ee = expressionExperimentService.load( new Long( query ) );
if ( ee != null ) results.put( ee, ee.getId().toString() );
} catch ( NumberFormatException e ) {
// no-op - it's not an ID.
}
}
}
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "DB Expression Experiment search for " + settings + " took " + watch.getTime() + " ms and found "
+ results.size() + " EEs" );
Collection<SearchResult> r = dbHitsToSearchResult( results );
return r;
}
/**
* Search the DB for genes that exactly match the given search string searches geneProducts, gene and bioSequence
* tables
*
* @param searchString
* @return
* @throws Exception
*/
private Collection<SearchResult> databaseGeneSearch( SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<>();
StopWatch watch = startTiming();
String searchString = StringEscapeUtils.unescapeJava( settings.getQuery() );
if ( StringUtils.isBlank( searchString ) ) return new HashSet<>();
Collection<SearchResult> results = new HashSet<>();
/*
* First search by accession. If we find it, stop.
*/
Gene result = null;
try {
result = geneService.findByNCBIId( Integer.parseInt( searchString ) );
} catch ( NumberFormatException e ) {
//
}
if ( result != null ) {
results.add( this.dbHitToSearchResult( null, result ) );
} else {
result = geneService.findByAccession( searchString, null );
if ( result != null ) {
results.add( this.dbHitToSearchResult( null, result ) );
}
}
if ( results.size() > 0 ) {
filterByTaxon( settings, results, true );
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Gene DB search for " + searchString + " took " + watch.getTime() + " ms and found "
+ results.size() + " genes" );
return results;
}
// replace * at end with % for inexact symbol search
String inexactString = searchString;
Pattern pattern = Pattern.compile( "\\*$" );
Matcher match = pattern.matcher( inexactString );
inexactString = match.replaceAll( "%" );
// note that at this point, the inexactString might not have a wildcard - only if the user asked for it.
String exactString = inexactString.replaceAll( "%", "" );
// if the query is shortish, always do a wild card search. This gives better behavior in 'live
// search' situations. If we do wildcards on very short queries we get too many results.
Collection<Gene> geneSet = new HashSet<Gene>();
if ( searchString.length() <= 2 ) {
// case 0: we got no result syet, or user entered a very short string. We search only for exact matches.
geneSet.addAll( geneService.findByOfficialSymbolInexact( exactString ) );
} else if ( inexactString.endsWith( "%" ) ) {
// case 1: user explicitly asked for wildcard. We allow this on strings of length 3 or more.
geneSet.addAll( geneService.findByOfficialSymbolInexact( inexactString ) );
} else if ( searchString.length() > 3 ) {
// case 2: user did not ask for a wildcard, but we add it anyway, if the string is 4 or 5 characters.
if ( !inexactString.endsWith( "%" ) ) {
inexactString = inexactString + "%";
}
geneSet.addAll( geneService.findByOfficialSymbolInexact( inexactString ) );
} else {
// case 3: string is long enough, and user did not ask for wildcard.
geneSet.addAll( geneService.findByOfficialSymbol( exactString ) );
}
/*
* If we found a match using official symbol or name, don't bother with this
*/
if ( geneSet.isEmpty() ) {
geneSet.addAll( geneService.findByAlias( exactString ) );
geneSet.addAll( geneProductService.getGenesByName( exactString ) );
geneSet.addAll( geneProductService.getGenesByNcbiId( exactString ) );
geneSet.addAll( bioSequenceService.getGenesByAccession( exactString ) );
geneSet.addAll( bioSequenceService.getGenesByName( exactString ) );
geneSet.addAll( geneService.findByEnsemblId( exactString ) );
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Gene DB search for " + searchString + " took " + watch.getTime() + " ms and found "
+ geneSet.size() + " genes" );
results = dbHitsToSearchResult( geneSet );
filterByTaxon( settings, results, true );
return results;
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @return
*/
private Collection<SearchResult> dbHitsToSearchResult( Collection<? extends Object> entities ) {
return this.dbHitsToSearchResult( entities, null, null );
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @return
*/
private Collection<SearchResult> dbHitsToSearchResult( Collection<? extends Object> entities, String matchText ) {
return this.dbHitsToSearchResult( entities, null, matchText );
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @param compassHitDerivedFrom SearchResult that these entities were derived from. For example, if you
* compass-searched for genes, and then used the genes to get sequences from the database, the gene is
* compassHitsDerivedFrom. If null, we treat this as a direct hit.
* @param matchText TODO
* @return
*/
private List<SearchResult> dbHitsToSearchResult( Collection<? extends Object> entities,
SearchResult compassHitDerivedFrom, String matchText ) {
StopWatch timer = startTiming();
List<SearchResult> results = new ArrayList<SearchResult>();
for ( Object e : entities ) {
if ( e == null ) {
log.warn( "Null search result object" );
continue;
}
SearchResult esr = dbHitToSearchResult( compassHitDerivedFrom, e, matchText );
results.add( esr );
}
if ( timer.getTime() > 1000 ) {
log.info( "Unpack " + results.size() + " search resultsS: " + timer.getTime() + "ms" );
}
return results;
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @return
*/
private Collection<SearchResult> dbHitsToSearchResult( Map<? extends Object, String> entities ) {
return this.dbHitsToSearchResult( entities, null );
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @param compassHitDerivedFrom SearchResult that these entities were derived from. For example, if you
* compass-searched for genes, and then used the genes to get sequences from the database, the gene is
* compassHitsDerivedFrom. If null, we treat this as a direct hit.
* @return
*/
private List<SearchResult> dbHitsToSearchResult( Map<? extends Object, String> entities,
SearchResult compassHitDerivedFrom ) {
List<SearchResult> results = new ArrayList<SearchResult>();
for ( Object e : entities.keySet() ) {
SearchResult esr = dbHitToSearchResult( compassHitDerivedFrom, e, entities.get( e ) );
results.add( esr );
}
return results;
}
/**
* @param compassHitDerivedFrom
* @param e
* @return
*/
private SearchResult dbHitToSearchResult( SearchResult compassHitDerivedFrom, Object e ) {
return this.dbHitToSearchResult( compassHitDerivedFrom, e, null );
}
/**
* @param compassHitDerivedFrom
* @param e
* @param text that mached the query (for highlighting)
* @return
*/
private SearchResult dbHitToSearchResult( SearchResult compassHitDerivedFrom, Object e, String text ) {
SearchResult esr = null;
if ( compassHitDerivedFrom != null && text == null ) {
esr = new SearchResult( e, compassHitDerivedFrom.getScore() * INDIRECT_DB_HIT_PENALTY );
esr.setHighlightedText( compassHitDerivedFrom.getHighlightedText() );
} else {
// log.info( e + " " + text );
esr = new SearchResult( e, 1.0, text );
}
return esr;
}
/**
* @param parentMap
*/
private void debugParentFetch( Map<Characteristic, Object> parentMap ) {
/*
* This is purely debugging.
*/
if ( parentMap.size() > 0 ) {
if ( log.isDebugEnabled() )
log.debug( "Found " + parentMap.size() + " owners for " + parentMap.keySet().size()
+ " characteristics:" );
// int maxPrint = 10; int i = 0;
// for ( Map.Entry<Characteristic, Object> entry : parentMap.entrySet()) {
// if(i < maxPrint){
// Object obj = entry.getValue();
// Characteristic charac = entry.getKey();
// if ( obj instanceof Auditable ) {
// if ( log.isDebugEnabled() ) {
// log.debug("Key: Characteristic Name: " + charac.getName() +" Characteristic Desc: " +
// charac.getDescription() +" Characteristic Category: " + charac.getCategory() );
// log.debug("Val: Owner Class: " + obj.getClass()
// +" Owner Name: " + ( ( Auditable ) obj ).getName() +" Owner Desc: " + ( ( Auditable ) obj
// ).getDescription() );
// }
// } else {
// if ( log.isDebugEnabled() ) {
// log.debug( " Owner : " + obj.toString() + " Owner Class: " + obj.getClass() );
// }
// }
// i++;
// }
// }
}
}
/**
* Find phenotypes.
*
* @param settings
* @return
*/
private Collection<SearchResult> phenotypeSearch( SearchSettings settings ) {
Collection<SearchResult> results = this.dbHitsToSearchResult( this.phenotypeAssociationManagerService
.searchInDatabaseForPhenotype( settings.getQuery() ) );
return results;
}
/**
* @param settings
* @return
*/
private Collection<SearchResult> experimentSetSearch( SearchSettings settings ) {
Collection<SearchResult> results = this.dbHitsToSearchResult( this.experimentSetService.findByName( settings
.getQuery() ) );
results.addAll( compassSearch( compassExperimentSet, settings ) );
return results;
}
/**
* A general search for expression experiments. This search does both an database search and a compass search.
* <p>
* A problem with this is that we cap the number of results that can be returned. This could be a limitation for
* applications like building data set groups. Thus MAX_CHARACTERISTIC_SEARCH_RESULTS should not be too low.
*
* @param settings
* @return {@link Collection}
*/
private Collection<SearchResult> expressionExperimentSearch( final SearchSettings settings ) {
StopWatch watch = startTiming();
log.info( "Starting search for " + settings );
Collection<SearchResult> results = new HashSet<SearchResult>();
if ( settings.getUseDatabase() ) {
results.addAll( databaseExpressionExperimentSearch( settings ) );
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment database search for '" + settings + "' took " + watch.getTime()
+ " ms, " + results.size() + " hits." );
watch.reset();
watch.start();
}
if ( settings.getUseIndices() && results.size() < MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
results.addAll( compassExpressionSearch( settings ) );
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment index search for '" + settings + "' took " + watch.getTime() + " ms, "
+ results.size() + " hits." );
watch.reset();
watch.start();
}
if ( results.size() < MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
/*
* Try a more thorough search. This is slower; calls to ontologySearchAnnotatedObject take a long time
*/
if ( settings.getUseCharacteristics() ) {
results.addAll( characteristicExpressionExperimentSearch( settings ) );
}
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment ontology search for '" + settings + "' took " + watch.getTime()
+ " ms, " + results.size() + " hits." );
watch.reset();
watch.start();
}
/*
* Find data sets that match the platform -- TODO make this do something intelligent with GPL570 + brain.
*/
if ( results.size() == 0 ) {
Collection<SearchResult> matchingPlatforms = arrayDesignSearch( settings, null );
for ( SearchResult adRes : matchingPlatforms ) {
if ( adRes.getResultObject() instanceof ArrayDesign ) {
ArrayDesign ad = ( ArrayDesign ) adRes.getResultObject();
Collection<ExpressionExperiment> expressionExperiments = this.arrayDesignService
.getExpressionExperiments( ad );
if ( expressionExperiments.size() > 0 )
results.addAll( dbHitsToSearchResult( expressionExperiments ) );
}
}
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment platform search for '" + settings + "' took " + watch.getTime()
+ " ms, " + results.size() + " hits." );
watch.reset();
watch.start();
}
if ( results.size() == 0 ) {
/*
* Search for bib refs
*/
List<BibliographicReferenceValueObject> bibrefs = bibliographicReferenceService
.search( settings.getQuery() );
if ( !bibrefs.isEmpty() ) {
Collection<BibliographicReference> refs = new HashSet<BibliographicReference>();
Collection<SearchResult> r = this.compassBibliographicReferenceSearch( settings );
for ( SearchResult searchResult : r ) {
refs.add( ( BibliographicReference ) searchResult.getResultObject() );
}
Map<BibliographicReference, Collection<ExpressionExperiment>> relatedExperiments = this.bibliographicReferenceService
.getRelatedExperiments( refs );
for ( Entry<BibliographicReference, Collection<ExpressionExperiment>> e : relatedExperiments.entrySet() ) {
results.addAll( dbHitsToSearchResult( e.getValue() ) );
}
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment publication search for '" + settings + "' took " + watch.getTime()
+ " ms, " + results.size() + " hits." );
watch.reset();
watch.start();
}
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment search for '" + settings + "' took " + watch.getTime() + " ms, "
+ results.size() + " hits." );
return results;
}
/**
* @param settings
* @param results
* @param excludeWithoutTaxon if true: If the SearchResults have no "getTaxon" method then the results will get
* filtered out Results with no taxon associated will also get removed.
*/
private void filterByTaxon( SearchSettings settings, Collection<SearchResult> results, boolean excludeWithoutTaxon ) {
if ( settings.getTaxon() == null ) {
return;
}
Collection<SearchResult> toRemove = new HashSet<SearchResult>();
Taxon t = settings.getTaxon();
if ( results == null ) return;
for ( SearchResult sr : results ) {
Object o = sr.getResultObject();
try {
Taxon currentTaxon = null;
if ( o instanceof ExpressionExperiment ) {
ExpressionExperiment ee = ( ExpressionExperiment ) o;
currentTaxon = expressionExperimentService.getTaxon( ee );
} else if ( o instanceof ExpressionExperimentSet ) {
ExpressionExperimentSet ees = ( ExpressionExperimentSet ) o;
currentTaxon = ees.getTaxon();
} else if ( o instanceof Gene ) {
Gene gene = ( Gene ) o;
currentTaxon = gene.getTaxon();
} else if ( o instanceof GeneSet ) {
GeneSet geneSet = ( GeneSet ) o;
currentTaxon = geneSetService.getTaxon( geneSet ); // FIXME SLOW? I may have fixed this.
} else if ( o instanceof CharacteristicValueObject ) {
CharacteristicValueObject charVO = ( CharacteristicValueObject ) o;
currentTaxon = taxonDao.findByCommonName( charVO.getTaxon() );
} else {
Method m = o.getClass().getMethod( "getTaxon", new Class[] {} );
currentTaxon = ( Taxon ) m.invoke( o, new Object[] {} );
}
if ( currentTaxon == null || !currentTaxon.getId().equals( t.getId() ) ) {
if ( currentTaxon == null ) {
// Sanity check for bad data in db (could happen if EE has no samples). Can happen that
// searchResults have a vaild getTaxon method
// but the method returns null (shouldn't make it this far)
log.debug( "Object has getTaxon method but it returns null. Obj is: " + o );
}
toRemove.add( sr );
}
} catch ( SecurityException e ) {
throw new RuntimeException( e );
} catch ( NoSuchMethodException e ) {
/*
* In case of a programming error where the results don't have a taxon at all, we assume we should
* filter them out but issue a warning.
*/
if ( excludeWithoutTaxon ) {
toRemove.add( sr );
log.warn( "No getTaxon method for: " + o.getClass() + ". Filtering from results. Error was: " + e );
}
} catch ( IllegalArgumentException e ) {
throw new RuntimeException( e );
} catch ( IllegalAccessException e ) {
throw new RuntimeException( e );
} catch ( InvocationTargetException e ) {
throw new RuntimeException( e );
}
}
results.removeAll( toRemove );
}
/**
* @param data.clazz
* @param characteristic2entity
* @return
*/
private Collection<SearchResult> filterCharacteristicOwnersByClass( Collection<Class<?>> classes,
Map<Characteristic, Object> characteristic2entity ) {
Collection<BioMaterial> biomaterials = new HashSet<BioMaterial>();
Collection<FactorValue> factorValues = new HashSet<FactorValue>();
Collection<SearchResult> results = new HashSet<SearchResult>();
for ( Characteristic c : characteristic2entity.keySet() ) {
Object o = characteristic2entity.get( c );
for ( Class<?> clazz : classes ) {
if ( clazz.isAssignableFrom( o.getClass() ) ) {
String matchedText = c.getValue();
if ( o instanceof BioMaterial ) {
biomaterials.add( ( BioMaterial ) o );
} else if ( o instanceof FactorValue ) {
factorValues.add( ( FactorValue ) o );
} else {
if ( c instanceof VocabCharacteristic && ( ( VocabCharacteristic ) c ).getValueUri() != null ) {
matchedText = "Ontology term: <a href=\"/Gemma/searcher.html?query="
+ ( ( VocabCharacteristic ) c ).getValueUri() + "\">" + matchedText + "</a>";
}
results.add( new SearchResult( o, 1.0, matchedText ) );
}
}
}
}
if ( factorValues.size() > 0 ) {
Collection<ExpressionExperiment> ees = expressionExperimentService.findByFactorValues( factorValues );
for ( ExpressionExperiment ee : ees ) {
if ( log.isDebugEnabled() ) log.debug( ee );
results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "Factor characteristic" ) );
}
}
if ( biomaterials.size() > 0 ) {
Collection<ExpressionExperiment> ees = expressionExperimentService.findByBioMaterials( biomaterials );
for ( ExpressionExperiment ee : ees ) {
results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "BioMaterial characteristic" ) );
}
}
return results;
}
/**
* Combines compass style search, the db style search, and the compositeSequence search and returns 1 combined list
* with no duplicates.
*
* @param searchSettings
* @param returnOnDbHit if true and if there is a match for a gene from the database, return immediately - much
* faster
* @return
* @throws Exception
*/
private Collection<SearchResult> geneSearch( final SearchSettings settings, boolean returnOnDbHit ) {
StopWatch watch = startTiming();
String searchString = settings.getQuery();
Collection<SearchResult> geneDbList = databaseGeneSearch( settings );
if ( returnOnDbHit && geneDbList.size() > 0 ) {
return geneDbList;
}
Set<SearchResult> combinedGeneList = new HashSet<SearchResult>();
combinedGeneList.addAll( geneDbList );
Collection<SearchResult> geneCompassList = compassGeneSearch( settings );
combinedGeneList.addAll( geneCompassList );
if ( combinedGeneList.isEmpty() ) {
Collection<SearchResult> geneCsList = databaseCompositeSequenceSearch( settings );
for ( SearchResult res : geneCsList ) {
if ( res.getResultClass().isAssignableFrom( Gene.class ) ) combinedGeneList.add( res );
}
}
/*
* Possibly search for genes linked via a phenotype, but only if we don't have anything here.
*
*
* FIXME possibly always do if results are small.
*/
if ( combinedGeneList.isEmpty() ) {
Collection<CharacteristicValueObject> phenotypeTermHits = this.phenotypeAssociationManagerService
.searchInDatabaseForPhenotype( settings.getQuery() );
// FIXME do it all at once, not one at a time like this
for ( CharacteristicValueObject phenotype : phenotypeTermHits ) {
Set<String> phenotypeUris = new HashSet<String>();
phenotypeUris.add( phenotype.getValueUri() );
// DATABSE HIT!
Collection<GeneEvidenceValueObject> phenotypeGenes = phenotypeAssociationManagerService
.findCandidateGenes( phenotypeUris, settings.getTaxon() );
if ( !phenotypeGenes.isEmpty() ) {
log.info( phenotypeGenes.size() + " genes associated with " + phenotype + " (via query='"
+ settings.getQuery() + "')" );
for ( GeneEvidenceValueObject gvo : phenotypeGenes ) {
Gene g = Gene.Factory.newInstance();
g.setId( gvo.getId() );
g.setTaxon( settings.getTaxon() );
SearchResult sr = new SearchResult( g );
sr.setHighlightedText( phenotype.getValue() + " (" + phenotype.getValueUri() + ")" );
if ( gvo.getScore() != null ) {
/*
* TODO If we get evidence quality, use that in the score.
*/
}
sr.setScore( 1.0 ); // maybe lower, if we do this search when combinedGeneList is nonempty.
combinedGeneList.add( sr );
}
if ( combinedGeneList.size() > 100 /* some limit */) {
break;
}
}
}
}
// filterByTaxon( settings, combinedGeneList); // compass doesn't return filled gene objects, just ids, so do
// this after objects have been filled
if ( watch.getTime() > 1000 )
log.info( "Gene search for " + searchString + " took " + watch.getTime() + " ms; "
+ combinedGeneList.size() + " results." );
return combinedGeneList;
}
/**
* @param settings
* @return
*/
private Collection<SearchResult> geneSetSearch( SearchSettings settings ) {
Collection<SearchResult> hits;
if ( settings.getTaxon() != null ) {
hits = this
.dbHitsToSearchResult( this.geneSetService.findByName( settings.getQuery(), settings.getTaxon() ) );
} else {
hits = this.dbHitsToSearchResult( this.geneSetService.findByName( settings.getQuery() ) );
}
hits.addAll( compassSearch( compassGeneSet, settings ) );
return hits;
}
/**
* Given classes to search and characteristics,
*
* @param classes Which classes of entities to look for
* @param cs
* @return
*/
private Collection<SearchResult> getAnnotatedEntities( Collection<Class<?>> classes, Collection<Characteristic> cs ) {
Map<Characteristic, Object> characterstic2entity = characteristicService.getParents( classes, cs );
Collection<SearchResult> matchedEntities = filterCharacteristicOwnersByClass( classes, characterstic2entity );
if ( log.isDebugEnabled() ) {
debugParentFetch( characterstic2entity );
}
return matchedEntities;
}
/**
* @param searchResults
* @return List of ids for the entities held by the search results.
*/
private List<Long> getIds( List<SearchResult> searchResults ) {
List<Long> list = new ArrayList<>();
for ( SearchResult r : searchResults ) {
list.add( r.getId() );
}
assert list.size() == searchResults.size();
return list;
}
/**
* @param hits
* @return
*/
private Collection<SearchResult> getSearchResults( CompassHits hits ) {
StopWatch timer = new StopWatch();
timer.start();
Collection<SearchResult> results = new HashSet<SearchResult>();
/*
* Note that hits come in decreasing score order.
*/
for ( int i = 0, len = Math.min( MAX_LUCENE_HITS, hits.getLength() ); i < len; i++ ) {
SearchResult r = new SearchResult( hits.data( i ) );
/*
* Always give compass hits a lower score so they can be differentiated from exact database hits.
*/
r.setScore( new Double( hits.score( i ) * COMPASS_HIT_SCORE_PENALTY_FACTOR ) );
getHighlightedText( hits, i, r );
if ( log.isDebugEnabled() ) log.debug( i + " " + hits.score( i ) + " " + r );
results.add( r );
}
if ( timer.getTime() > 100 ) {
log.info( results.size() + " hits retrieved (out of " + Math.min( MAX_LUCENE_HITS, hits.getLength() )
+ " raw hits tested) in " + timer.getTime() + "ms" );
}
if ( timer.getTime() > 5000 ) {
log.info( "****Extremely long Lucene Search processing! " + results.size() + " hits retrieved (out of "
+ Math.min( MAX_LUCENE_HITS, hits.getLength() ) + " raw hits tested) in " + timer.getTime() + "ms" );
}
return results;
}
/**
* @param hits
* @param i
* @param r
*/
private void getHighlightedText( CompassHits hits, int i, SearchResult r ) {
CompassHighlightedText highlightedText = hits.highlightedText( i );
if ( highlightedText != null && highlightedText.getHighlightedText() != null ) {
r.setHighlightedText( highlightedText.getHighlightedText() );
} else {
if ( log.isDebugEnabled() ) log.debug( "No highlighted text for " + r );
}
}
/**
* @param settings
* @param results
* @param rawResults
* @param fillObjects
*/
private Map<Class<?>, List<SearchResult>> getSortedLimitedResults( SearchSettings settings,
List<SearchResult> rawResults, boolean fillObjects ) {
Map<Class<?>, List<SearchResult>> results = new HashMap<Class<?>, List<SearchResult>>();
Collections.sort( rawResults );
results.put( ArrayDesign.class, new ArrayList<SearchResult>() );
results.put( BioSequence.class, new ArrayList<SearchResult>() );
results.put( BibliographicReference.class, new ArrayList<SearchResult>() );
results.put( CompositeSequence.class, new ArrayList<SearchResult>() );
results.put( ExpressionExperiment.class, new ArrayList<SearchResult>() );
results.put( Gene.class, new ArrayList<SearchResult>() );
results.put( GeneSet.class, new ArrayList<SearchResult>() );
results.put( ExpressionExperimentSet.class, new ArrayList<SearchResult>() );
results.put( Characteristic.class, new ArrayList<SearchResult>() );
results.put( CharacteristicValueObject.class, new ArrayList<SearchResult>() );
/*
* Get the top N results, overall (NOT within each class - experimental.)
*/
for ( int i = 0, limit = Math.min( rawResults.size(), settings.getMaxResults() ); i < limit; i++ ) {
SearchResult sr = rawResults.get( i );
/*
* FIXME This is unpleasant and should be removed when BioSequences are correctly detached.
*/
Class<? extends Object> resultClass = EntityUtils.getImplementationForProxy( sr.getResultObject() )
.getClass();
resultClass = ReflectionUtil.getBaseForImpl( resultClass );
// Class<? extends Object> resultClass = sr.getResultClass();
assert results.containsKey( resultClass ) : "Unknown class " + resultClass;
results.get( resultClass ).add( sr );
}
if ( fillObjects ) {
/*
* Now retrieve the entities and put them in the SearchResult. Entities that are filtered out by the
* SecurityInterceptor will be removed at this stage.
*/
for ( Class<? extends Object> clazz : results.keySet() ) {
List<SearchResult> r = results.get( clazz );
if ( r.isEmpty() ) continue;
Map<Long, SearchResult> rMap = new HashMap<Long, SearchResult>();
for ( SearchResult searchResult : r ) {
if ( !rMap.containsKey( searchResult.getId() )
|| ( rMap.get( searchResult.getId() ).getScore() < searchResult.getScore() ) ) {
rMap.put( searchResult.getId(), searchResult );
}
}
Collection<? extends Object> entities = retrieveResultEntities( clazz, r );
List<SearchResult> filteredResults = new ArrayList<SearchResult>();
for ( Object entity : entities ) {
Long id = EntityUtils.getId( entity );
SearchResult keeper = rMap.get( id );
keeper.setResultObject( entity );
filteredResults.add( keeper );
}
filterByTaxon( settings, filteredResults, false );
results.put( clazz, filteredResults );
}
} else {
for ( SearchResult sr : rawResults ) {
sr.setResultObject( null );
}
}
List<SearchResult> convertedResults = convertEntitySearchResutsToValueObjectsSearchResults( results
.get( BioSequence.class ) );
results.put( BioSequenceValueObject.class, convertedResults );
results.remove( BioSequence.class );
return results;
}
/**
* Retrieve entities from the persistent store.
*
* @param entityClass
* @param results
* @return
*/
private Collection<? extends Object> retrieveResultEntities( Class<?> entityClass, List<SearchResult> results ) {
List<Long> ids = getIds( results );
if ( ExpressionExperiment.class.isAssignableFrom( entityClass ) ) {
return expressionExperimentService.loadMultiple( ids );
} else if ( ArrayDesign.class.isAssignableFrom( entityClass ) ) {
return arrayDesignService.loadMultiple( ids );
} else if ( CompositeSequence.class.isAssignableFrom( entityClass ) ) {
return compositeSequenceService.loadMultiple( ids );
} else if ( BibliographicReference.class.isAssignableFrom( entityClass ) ) {
return bibliographicReferenceService.loadMultiple( ids );
} else if ( Gene.class.isAssignableFrom( entityClass ) ) {
return geneService.loadMultiple( ids );
} else if ( BioSequence.class.isAssignableFrom( entityClass ) ) {
Collection<BioSequence> bs = bioSequenceService.loadMultiple( ids );
return bs;
} else if ( GeneSet.class.isAssignableFrom( entityClass ) ) {
return geneSetService.load( ids );
} else if ( ExpressionExperimentSet.class.isAssignableFrom( entityClass ) ) {
return experimentSetService.load( ids );
} else if ( Characteristic.class.isAssignableFrom( entityClass ) ) {
Collection<Characteristic> chars = new ArrayList<Characteristic>();
for ( Long id : ids ) {
chars.add( characteristicService.load( id ) );
}
return chars;
} else if ( CharacteristicValueObject.class.isAssignableFrom( entityClass ) ) {
// TEMP HACK this whole method should not be needed in many cases
Collection<CharacteristicValueObject> chars = new ArrayList<CharacteristicValueObject>();
for ( SearchResult result : results ) {
if ( result.getResultClass().isAssignableFrom( CharacteristicValueObject.class ) ) {
chars.add( ( CharacteristicValueObject ) result.getResultObject() );
}
}
return chars;
} else if ( ExpressionExperimentSet.class.isAssignableFrom( entityClass ) ) {
return experimentSetService.load( ids );
} else {
throw new UnsupportedOperationException( "Don't know how to retrieve objects for class=" + entityClass );
}
}
private StopWatch startTiming() {
StopWatch watch = new StopWatch();
watch.start();
return watch;
}
/**
* Makes no attempt at resolving the search query as a URI. Will tokenize the search query if there are control
* characters in the String. URI's will get parsed into multiple query terms and lead to bad results.
*
* @param settings Will try to resolve general terms like brain --> to appropriate OntologyTerms and search for
* objects tagged with those terms (if isUseCharacte = true)
* @param fillObjects If false, the entities will not be filled in inside the searchsettings; instead, they will be
* nulled (for security purposes). You can then use the id and Class stored in the SearchSettings to load the
* entities at your leisure. If true, the entities are loaded in the usual secure fashion. Setting this to
* false can be an optimization if all you need is the id. Note: filtering by taxon will not be done unless
* objects are filled
* @param webSpeedSearch if true, this call is probably coming from a web app combo box and results will be limited
* to improve speed
* @return
*/
protected Map<Class<?>, List<SearchResult>> generalSearch( SearchSettings settings, boolean fillObjects,
boolean webSpeedSearch ) {
String enhancedQuery = StringUtils.strip( settings.getQuery() );
String searchString = QueryParser.escape( enhancedQuery );
if ( settings.getTaxon() == null ) {
// split the query around whitespace characters, limit the splitting to 4 terms (may be excessive)
String[] searchTerms = searchString.split( "\\s+", 4 );
for ( int i = 0; i < searchTerms.length; i++ ) {
searchTerms[i] = searchTerms[i].toLowerCase();
}
List<String> searchTermsList = Arrays.asList( searchTerms );
// this Set is ordered by insertion order(LinkedHashMap)
Set<String> keywords = nameToTaxonMap.keySet();
// only strip out taxon terms if there is more than one search term in query and if the entire search string
// is not itself a keyword
if ( searchTerms.length > 1 && !keywords.contains( searchString.toLowerCase() ) ) {
for ( String keyword : keywords ) {
int termIndex = searchString.toLowerCase().indexOf( keyword );
// make sure that the keyword occurs in the searchString
if ( termIndex != -1 ) {
// make sure that either the keyword is multi-term or that it occurs as a single term(not as
// part of another word)
if ( keyword.contains( " " ) || searchTermsList.contains( keyword ) ) {
searchString = searchString.replaceFirst( "(?i)" + keyword, "" ).trim();
settings.setTaxon( nameToTaxonMap.get( keyword ) );
// break on first term found in keywords since they should be(more or less) ordered by
// precedence
break;
}
}
}
}
}
List<SearchResult> rawResults = new ArrayList<>();
// do gene first first before we munge the query too much.
Collection<SearchResult> genes = null;
if ( settings.getSearchGenes() ) {
genes = geneSearch( settings, webSpeedSearch );
accreteResults( rawResults, genes );
}
String[] searchTerms = searchString.split( "\\s+" );
// some strings of size 1 cause lucene to barf and they were slipping through in multi-term queries, get rid of
// them
if ( searchTerms.length > 0 ) {
searchString = "";
for ( String sTerm : searchTerms ) {
if ( sTerm.length() > 1 ) {
searchString = searchString + " " + sTerm;
}
}
searchString = searchString.trim();
}
settings.setQuery( searchString );
// If nothing to search return nothing.
if ( StringUtils.isBlank( searchString ) ) {
return new HashMap<Class<?>, List<SearchResult>>();
}
if ( settings.getSearchExperiments() ) {
Collection<SearchResult> foundEEs = expressionExperimentSearch( settings );
rawResults.addAll( foundEEs );
}
// SearchSettings persistent entity does not contain a usePhenotypes property that these logic requires
/*
* if ( settings.getUsePhenotypes() && settings.getSearchGenes() ) {
*
* Collection<SearchResult> phenotypeGenes = dbHitsToSearchResult(
* geneSearchService.getPhenotypeAssociatedGenes( searchString, settings.getTaxon() ),
* "From phenotype association" ); accreteResults( rawResults, phenotypeGenes ); }
*/
Collection<SearchResult> compositeSequences = null;
if ( settings.getSearchProbes() ) {
compositeSequences = compositeSequenceSearch( settings );
accreteResults( rawResults, compositeSequences );
}
if ( settings.getSearchPlatforms() ) {
Collection<SearchResult> foundADs = arrayDesignSearch( settings, compositeSequences );
accreteResults( rawResults, foundADs );
}
if ( settings.getSearchBioSequences() ) {
Collection<SearchResult> bioSequences = bioSequenceSearch( settings, genes );
accreteResults( rawResults, bioSequences );
}
if ( settings.getUseGo() ) {
Collection<SearchResult> ontologyGenes = dbHitsToSearchResult(
geneSearchService.getGOGroupGenes( searchString, settings.getTaxon() ), "From GO group" );
accreteResults( rawResults, ontologyGenes );
}
if ( settings.getSearchBibrefs() ) {
Collection<SearchResult> bibliographicReferences = compassBibliographicReferenceSearch( settings );
accreteResults( rawResults, bibliographicReferences );
}
if ( settings.getSearchGeneSets() ) {
Collection<SearchResult> geneSets = geneSetSearch( settings );
accreteResults( rawResults, geneSets );
}
if ( settings.getSearchExperimentSets() ) {
Collection<SearchResult> experimentSets = experimentSetSearch( settings );
accreteResults( rawResults, experimentSets );
}
if ( settings.getSearchPhenotypes() ) {
Collection<SearchResult> phenotypes = phenotypeSearch( settings );
accreteResults( rawResults, phenotypes );
}
Map<Class<?>, List<SearchResult>> sortedLimitedResults = getSortedLimitedResults( settings, rawResults,
fillObjects );
log.info( "search for: " + settings.getQuery() + " " + rawResults.size()
+ " raw results (final tally may be filtered)" );
return sortedLimitedResults;
}
/**
* Runs inside Compass transaction
*
* @param query
* @param session
* @return
*/
Collection<SearchResult> performSearch( SearchSettings settings, CompassSession session ) {
StopWatch watch = startTiming();
String enhancedQuery = settings.getQuery().trim();
if ( StringUtils.isBlank( enhancedQuery )
|| enhancedQuery.length() < MINIMUM_STRING_LENGTH_FOR_FREE_TEXT_SEARCH || enhancedQuery.equals( "*" ) )
return new ArrayList<SearchResult>();
CompassQuery compassQuery = session.queryBuilder().queryString( enhancedQuery ).toQuery();
log.debug( "Parsed query: " + compassQuery );
CompassHits hits = compassQuery.hits();
// highlighting.
if ( ( ( SearchSettingsImpl ) settings ).getDoHighlighting() ) {
if ( session instanceof InternalCompassSession ) { // always ...
CompassMapping mapping = ( ( InternalCompassSession ) session ).getMapping();
ResourceMapping[] rootMappings = mapping.getRootMappings();
// should only be one rootMapping.
process( rootMappings, hits );
}
}
watch.stop();
if ( watch.getTime() > 100 ) {
log.info( "Getting " + hits.getLength() + " lucene hits for " + enhancedQuery + " took " + watch.getTime()
+ " ms" );
}
if ( watch.getTime() > 5000 ) {
log.info( "*****Extremely long Lucene Index Search! " + hits.getLength() + " lucene hits for "
+ enhancedQuery + " took " + watch.getTime() + " ms" );
}
return getSearchResults( hits );
}
/**
* Recursively cache the highlighted text. This must be done during the search transaction.
*
* @param givenMappings on first call, the root mapping(s)
* @param hits
*/
private void process( ResourceMapping[] givenMappings, CompassHits hits ) {
for ( ResourceMapping resourceMapping : givenMappings ) {
Iterator<Mapping> mappings = resourceMapping.mappingsIt(); // one for each property.
for ( ; mappings.hasNext(); ) {
Mapping m = mappings.next();
if ( m instanceof ComponentMapping ) {
ClassMapping[] refClassMappings = ( ( ComponentMapping ) m ).getRefClassMappings();
process( refClassMappings, hits );
} else {
String name = m.getName();
// log.info( name );
for ( int i = 0; i < hits.getLength(); i++ ) {
try {
// we might want to bail as soon as we find something?
hits.highlighter( i ).fragment( name );
if ( log.isDebugEnabled() ) log.debug( "Cached " + name );
} catch ( Exception e ) {
break; // skip this property entirely...
}
}
}
}
}
}
@Override
public Map<Class<?>, List<SearchResult>> searchForNewlyCreatedUserQueryResults( UserQuery query ) {
Map<Class<?>, List<SearchResult>> searchResults;
Map<Class<?>, List<SearchResult>> finalResults = new HashMap<Class<?>, List<SearchResult>>();
SearchSettings settings = query.getSearchSettings();
if ( StringUtils.isBlank( settings.getTermUri() ) && !settings.getQuery().startsWith( "http://" ) ) {
// fill objects=true, speedySearch=false
searchResults = generalSearch( settings, true, false );
} else {
// we only attempt an ontology search if the uri looks remotely like a url.
searchResults = ontologyUriSearch( settings );
}
if ( searchResults == null ) {
return finalResults;
}
for ( Class<?> clazz : searchResults.keySet() ) {
List<SearchResult> results = searchResults.get( clazz );
List<SearchResult> updatedResults = new ArrayList<SearchResult>();
if ( results.size() == 0 ) continue;
log.info( "Search for newly createdQuery with settings: " + settings + "; result: " + results.size() + " "
+ clazz.getSimpleName() + "s" );
for ( SearchResult sr : results ) {
// Are SearchResults always auditable? maybe put in some error handling in case they are not or
// enforce searchSettings object to be of a certain form
Auditable auditableResult = ( Auditable ) sr.getResultObject();
// this list is ordered by date (not descending)
List<AuditEvent> eventList = auditTrailService.getEvents( auditableResult );
if ( eventList == null || eventList.isEmpty() ) continue;
for ( AuditEvent ae : eventList ) {
// assuming there is only one create event
if ( ae.getAction() == AuditAction.CREATE && ae.getDate().after( query.getLastUsed() ) ) {
updatedResults.add( sr );
break;
}
}
}
if ( !updatedResults.isEmpty() ) {
finalResults.put( clazz, updatedResults );
}
}
return finalResults;
}
}
| gemma-core/src/main/java/ubic/gemma/search/SearchServiceImpl.java | /*
* The Gemma project
*
* Copyright (c) 2006 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.gemma.search;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.PostConstruct;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheException;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Element;
import net.sf.ehcache.config.CacheConfiguration;
import net.sf.ehcache.config.NonstopConfiguration;
import net.sf.ehcache.config.PersistenceConfiguration;
import net.sf.ehcache.config.PersistenceConfiguration.Strategy;
import net.sf.ehcache.config.TerracottaConfiguration;
import net.sf.ehcache.config.TimeoutBehaviorConfiguration;
import net.sf.ehcache.config.TimeoutBehaviorConfiguration.TimeoutBehaviorType;
import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.queryParser.QueryParser;
import org.compass.core.Compass;
import org.compass.core.CompassCallback;
import org.compass.core.CompassException;
import org.compass.core.CompassHighlightedText;
import org.compass.core.CompassHits;
import org.compass.core.CompassQuery;
import org.compass.core.CompassSession;
import org.compass.core.CompassTemplate;
import org.compass.core.mapping.CompassMapping;
import org.compass.core.mapping.Mapping;
import org.compass.core.mapping.ResourceMapping;
import org.compass.core.mapping.osem.ClassMapping;
import org.compass.core.mapping.osem.ComponentMapping;
import org.compass.core.spi.InternalCompassSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import ubic.basecode.ontology.model.OntologyIndividual;
import ubic.basecode.ontology.model.OntologyTerm;
import ubic.basecode.util.BatchIterator;
import ubic.gemma.annotation.reference.BibliographicReferenceService;
import ubic.gemma.association.phenotype.PhenotypeAssociationManagerService;
import ubic.gemma.expression.experiment.service.ExpressionExperimentService;
import ubic.gemma.expression.experiment.service.ExpressionExperimentSetService;
import ubic.gemma.genome.gene.service.GeneSearchService;
import ubic.gemma.genome.gene.service.GeneService;
import ubic.gemma.genome.gene.service.GeneSetService;
import ubic.gemma.model.analysis.expression.ExpressionExperimentSet;
import ubic.gemma.model.common.Auditable;
import ubic.gemma.model.common.auditAndSecurity.AuditAction;
import ubic.gemma.model.common.auditAndSecurity.AuditEvent;
import ubic.gemma.model.common.auditAndSecurity.AuditTrailService;
import ubic.gemma.model.common.auditAndSecurity.UserQuery;
import ubic.gemma.model.common.description.BibliographicReference;
import ubic.gemma.model.common.description.BibliographicReferenceValueObject;
import ubic.gemma.model.common.description.Characteristic;
import ubic.gemma.model.common.description.CharacteristicService;
import ubic.gemma.model.common.description.VocabCharacteristic;
import ubic.gemma.model.common.search.SearchSettings;
import ubic.gemma.model.common.search.SearchSettingsImpl;
import ubic.gemma.model.common.search.SearchSettingsValueObject;
import ubic.gemma.model.expression.arrayDesign.ArrayDesign;
import ubic.gemma.model.expression.arrayDesign.ArrayDesignService;
import ubic.gemma.model.expression.biomaterial.BioMaterial;
import ubic.gemma.model.expression.biomaterial.Treatment;
import ubic.gemma.model.expression.designElement.CompositeSequence;
import ubic.gemma.model.expression.designElement.CompositeSequenceService;
import ubic.gemma.model.expression.experiment.ExpressionExperiment;
import ubic.gemma.model.expression.experiment.FactorValue;
import ubic.gemma.model.genome.Gene;
import ubic.gemma.model.genome.Taxon;
import ubic.gemma.model.genome.TaxonDao;
import ubic.gemma.model.genome.biosequence.BioSequence;
import ubic.gemma.model.genome.biosequence.BioSequenceService;
import ubic.gemma.model.genome.gene.GeneProductService;
import ubic.gemma.model.genome.gene.GeneSet;
import ubic.gemma.model.genome.gene.phenotype.valueObject.CharacteristicValueObject;
import ubic.gemma.model.genome.gene.phenotype.valueObject.GeneEvidenceValueObject;
import ubic.gemma.model.genome.sequenceAnalysis.BioSequenceValueObject;
import ubic.gemma.ontology.OntologyService;
import ubic.gemma.util.EntityUtils;
import ubic.gemma.util.ReflectionUtil;
import ubic.gemma.util.Settings;
/**
* This service is used for performing searches using free text or exact matches to items in the database. <h2>
* Implementation notes</h2>
* <p>
* Internally, there are generally two kinds of searches performed, precise database searches looking for exact matches
* in the database and compass/lucene searches which look for matches in the stored index.
* <p>
* To add more dependencies to this Service edit the applicationContext-search.xml
*
* @author klc
* @author paul
* @author keshav
* @version $Id$
*/
@Component
public class SearchServiceImpl implements SearchService {
private static final String ONTOLOGY_CHILDREN_CACHE_NAME = "OntologyChildrenCache";
/**
* Penalty applied to all 'index' hits
*/
private static final double COMPASS_HIT_SCORE_PENALTY_FACTOR = 0.9;
/**
* Key for internal in-memory on-the-fly indexes
*/
// private static final String INDEX_KEY = "content";
/**
* Penalty applied to scores on hits for entities that derive from an association. For example, if a hit to an EE
* came from text associated with one of its biomaterials, the score is penalized by this amount.
*/
private static final double INDIRECT_DB_HIT_PENALTY = 0.8;
private static Log log = LogFactory.getLog( SearchServiceImpl.class.getName() );
/**
*
*/
// private static final int MAX_IN_MEMORY_INDEX_HITS = 1000;
private static final int MINIMUM_EE_QUERY_LENGTH = 3;
private static final int MINIMUM_STRING_LENGTH_FOR_FREE_TEXT_SEARCH = 2;
private static final String NCBI_GENE = "ncbi_gene";
/**
* How long after creation before an object is evicted, no matter what.
*/
private static final int ONTOLOGY_CACHE_TIME_TO_DIE = 10000;
/**
* How long an item in the cache lasts when it is not accessed.
*/
private static final int ONTOLOGY_CACHE_TIME_TO_IDLE = 3600;
/**
* How many term children can stay in memory
*/
private static final int ONTOLOGY_INFO_CACHE_SIZE = 30000;
/**
* If fewer than this number of experiments are returned from the a search of experiment characteristics, then
* search for experiments indirectly as well (ex: by finding bioMatierials tagged with the characteristicsand
* getting the experiments associated with them ). See also MAX_CHARACTERISTIC_SEARCH_RESULTS.
*/
private static final int SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS = 100;
@Autowired
private ArrayDesignService arrayDesignService;
@Autowired
private BibliographicReferenceService bibliographicReferenceService;
@Autowired
private BioSequenceService bioSequenceService;
@Autowired
private CacheManager cacheManager;
@Autowired
private CharacteristicService characteristicService;
// direct children of terms.
private Cache childTermCache;
@Autowired
@Qualifier("compassArray")
private Compass compassArray;
@Autowired
@Qualifier("compassBibliographic")
private Compass compassBibliographic;
@Autowired
@Qualifier("compassBiosequence")
private Compass compassBiosequence;
@Autowired
@Qualifier("compassExperimentSet")
private Compass compassExperimentSet;
@Autowired
@Qualifier("compassExpression")
private Compass compassExpression;
@Autowired
@Qualifier("compassGene")
private Compass compassGene;
@Autowired
@Qualifier("compassGeneSet")
private Compass compassGeneSet;
@Autowired
@Qualifier("compassProbe")
private Compass compassProbe;
@Autowired
private CompositeSequenceService compositeSequenceService;
@Autowired
private ExpressionExperimentSetService experimentSetService;
@Autowired
private ExpressionExperimentService expressionExperimentService;
@Autowired
private GeneSearchService geneSearchService;
@Autowired
private GeneProductService geneProductService;
@Autowired
private GeneService geneService;
@Autowired
private GeneSetService geneSetService;
@Autowired
private OntologyService ontologyService;
@Autowired
private PhenotypeAssociationManagerService phenotypeAssociationManagerService;
@Autowired
private TaxonDao taxonDao;
@Autowired
private AuditTrailService auditTrailService;
private static final int MAX_LUCENE_HITS = 750;
private HashMap<String, Taxon> nameToTaxonMap = new LinkedHashMap<String, Taxon>();
/*
* (non-Javadoc)
*
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
*/
@PostConstruct
void initializeSearchService() throws Exception {
try {
if ( cacheManager.cacheExists( ONTOLOGY_CHILDREN_CACHE_NAME ) ) {
return;
}
boolean terracottaEnabled = Settings.getBoolean( "gemma.cache.clustered", false );
int diskExpiryThreadIntervalSeconds = 600;
int maxElementsOnDisk = 10000;
boolean terracottaCoherentReads = false;
boolean clearOnFlush = false;
if ( terracottaEnabled ) {
CacheConfiguration config = new CacheConfiguration( ONTOLOGY_CHILDREN_CACHE_NAME,
ONTOLOGY_INFO_CACHE_SIZE );
config.setStatistics( false );
config.setMemoryStoreEvictionPolicy( MemoryStoreEvictionPolicy.LRU.toString() );
config.addPersistence( new PersistenceConfiguration().strategy( Strategy.NONE ) );
config.setEternal( true );
config.setTimeToIdleSeconds( ONTOLOGY_CACHE_TIME_TO_IDLE );
config.setMaxElementsOnDisk( maxElementsOnDisk );
config.addTerracotta( new TerracottaConfiguration() );
config.getTerracottaConfiguration().setCoherentReads( terracottaCoherentReads );
config.clearOnFlush( clearOnFlush );
config.setTimeToLiveSeconds( ONTOLOGY_CACHE_TIME_TO_DIE );
config.getTerracottaConfiguration().setClustered( true );
config.getTerracottaConfiguration().setValueMode( "SERIALIZATION" );
NonstopConfiguration nonstopConfiguration = new NonstopConfiguration();
TimeoutBehaviorConfiguration tobc = new TimeoutBehaviorConfiguration();
tobc.setType( TimeoutBehaviorType.NOOP.getTypeName() );
nonstopConfiguration.addTimeoutBehavior( tobc );
config.getTerracottaConfiguration().addNonstop( nonstopConfiguration );
childTermCache = new Cache( config );
// childTermCache = new Cache( "OntologyChildrenCache", ONTOLOGY_INFO_CACHE_SIZE,
// MemoryStoreEvictionPolicy.LFU, false, null, false, ONTOLOGY_CACHE_TIME_TO_DIE,
// ONTOLOGY_CACHE_TIME_TO_IDLE, false, diskExpiryThreadIntervalSeconds, null, null,
// maxElementsOnDisk, 10, clearOnFlush, terracottaEnabled, "SERIALIZATION",
// terracottaCoherentReads );
} else {
childTermCache = new Cache( ONTOLOGY_CHILDREN_CACHE_NAME, ONTOLOGY_INFO_CACHE_SIZE,
MemoryStoreEvictionPolicy.LFU, false, null, false, ONTOLOGY_CACHE_TIME_TO_DIE,
ONTOLOGY_CACHE_TIME_TO_IDLE, false, diskExpiryThreadIntervalSeconds, null );
}
cacheManager.addCache( childTermCache );
childTermCache = cacheManager.getCache( ONTOLOGY_CHILDREN_CACHE_NAME );
} catch ( CacheException e ) {
throw new RuntimeException( e );
}
initializeNameToTaxonMap();
}
private void initializeNameToTaxonMap() {
Collection<Taxon> taxonCollection = ( Collection<Taxon> ) taxonDao.loadAll();
for ( Taxon taxon : taxonCollection ) {
if ( taxon.getScientificName() != null )
nameToTaxonMap.put( taxon.getScientificName().trim().toLowerCase(), taxon );
if ( taxon.getCommonName() != null )
nameToTaxonMap.put( taxon.getCommonName().trim().toLowerCase(), taxon );
if ( taxon.getAbbreviation() != null )
nameToTaxonMap.put( taxon.getAbbreviation().trim().toLowerCase(), taxon );
}
// loop through again breaking up multi-word taxon database names and handling some special cases(e.g. salmon,
// rainbow are common to multiple taxa)
// doing this is a separate loop so that these names take lower precedence when matching than the full terms in
// the generated keySet
// some of the special cases the section below may be unnecessary, or more may need to be added
for ( Taxon taxon : taxonCollection ) {
String[] terms;
if ( taxon.getScientificName() != null ) {
terms = taxon.getScientificName().split( "\\s+" );
if ( terms.length > 1 ) {
for ( String s : terms ) {
if ( !s.equalsIgnoreCase( "Oncorhynchus" ) ) {
nameToTaxonMap.put( s.toLowerCase(), taxon );
}
}
}
}
if ( StringUtils.isNotBlank( taxon.getCommonName() ) ) {
if ( taxon.getCommonName().equalsIgnoreCase( "salmonid" ) ) {
nameToTaxonMap.put( "salmon", taxon );
}
terms = taxon.getCommonName().split( "\\s+" );
if ( terms.length > 1 ) {
for ( String s : terms ) {
if ( !s.equalsIgnoreCase( "salmon" ) && !s.equalsIgnoreCase( "pink" )
&& !s.equalsIgnoreCase( "rainbow" ) ) {
nameToTaxonMap.put( s.toLowerCase(), taxon );
}
}
}
}
}
}
@Override
public Map<Class<?>, List<SearchResult>> ajaxSearch( SearchSettingsValueObject settingsValueObject ) {
SearchSettings settings = SearchSettingsValueObject.toEntity( settingsValueObject );
return this.search( settings );
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#search(ubic.gemma.search.SearchSettings)
*/
@Override
public Map<Class<?>, List<SearchResult>> search( SearchSettings settings ) {
Map<Class<?>, List<SearchResult>> searchResults = new HashMap<Class<?>, List<SearchResult>>();
try {
searchResults = this.search( settings, true, false );
} catch ( org.compass.core.engine.SearchEngineQueryParseException qpe ) {
log.error( "Query parse Error: " + settings + "; message=" + qpe.getMessage(), qpe );
} catch ( Exception e ) {
log.error( "Search error on settings: " + settings + "; message=" + e.getMessage(), e );
}
return searchResults;
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#search(ubic.gemma.search.SearchSettings)
*/
@Override
public Map<Class<?>, List<SearchResult>> speedSearch( SearchSettings settings ) {
Map<Class<?>, List<SearchResult>> searchResults = new HashMap<>();
try {
searchResults = this.search( settings, true, true );
} catch ( org.compass.core.engine.SearchEngineQueryParseException qpe ) {
log.error( "Query parse Error: " + settings + "; message=" + qpe.getMessage(), qpe );
} catch ( Exception e ) {
log.error( "Search error on settings: " + settings + "; message=" + e.getMessage(), e );
}
return searchResults;
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#search(ubic.gemma.search.SearchSettings)
*/
@Override
public List<?> search( SearchSettings settings, Class<?> resultClass ) {
Map<Class<?>, List<SearchResult>> searchResults = this.search( settings );
List<Object> resultObjects = new ArrayList<Object>();
List<SearchResult> searchResultObjects = searchResults.get( resultClass );
if ( searchResultObjects == null ) return resultObjects;
for ( SearchResult sr : searchResultObjects ) {
resultObjects.add( sr.getResultObject() );
}
return resultObjects;
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#search(ubic.gemma.search.SearchSettings, boolean)
*/
@Override
public Map<Class<?>, List<SearchResult>> search( SearchSettings settings, boolean fillObjects,
boolean webSpeedSearch ) {
if ( StringUtils.isBlank( settings.getTermUri() ) && !settings.getQuery().startsWith( "http://" ) ) {
return generalSearch( settings, fillObjects, webSpeedSearch );
}
// we only attempt an ontology search if the uri looks remotely like a url.
return ontologyUriSearch( settings );
}
/**
* @param settings
* @return results, if the settings.termUri is populated. This includes gene uris.
*/
private Map<Class<?>, List<SearchResult>> ontologyUriSearch( SearchSettings settings ) {
Map<Class<?>, List<SearchResult>> results = new HashMap<Class<?>, List<SearchResult>>();
// 1st check to see if the query is a URI (from an ontology).
// Do this by seeing if we can find it in the loaded ontologies.
// Escape with general utilities because might not be doing a lucene backed search. (just a hibernate one).
String termUri = settings.getTermUri();
if ( StringUtils.isBlank( termUri ) ) {
termUri = settings.getQuery();
}
if ( !termUri.startsWith( "http://" ) ) {
return results;
}
OntologyTerm matchingTerm = null;
String uriString = null;
uriString = StringEscapeUtils.escapeJava( StringUtils.strip( termUri ) );
if ( StringUtils.containsIgnoreCase( uriString, NCBI_GENE ) ) {
// Perhaps is a valid gene URL. Want to search for the gene in gemma.
// 1st get objects tagged with the given gene identifier
Collection<Class<?>> classesToFilterOn = new HashSet<Class<?>>();
classesToFilterOn.add( ExpressionExperiment.class );
Collection<Characteristic> foundCharacteristics = characteristicService.findByUri( classesToFilterOn,
uriString );
Map<Characteristic, Object> parentMap = characteristicService.getParents( classesToFilterOn,
foundCharacteristics );
Collection<SearchResult> characteristicOwnerResults = filterCharacteristicOwnersByClass( classesToFilterOn,
parentMap );
if ( !characteristicOwnerResults.isEmpty() ) {
results.put( ExpressionExperiment.class, new ArrayList<SearchResult>() );
results.get( ExpressionExperiment.class ).addAll( characteristicOwnerResults );
}
if ( settings.getSearchGenes() ) {
// Get the gene
String ncbiAccessionFromUri = StringUtils.substringAfterLast( uriString, "/" );
Gene g = null;
try {
g = geneService.findByNCBIId( Integer.parseInt( ncbiAccessionFromUri ) );
} catch ( NumberFormatException e ) {
// ok
}
if ( g != null ) {
results.put( Gene.class, new ArrayList<SearchResult>() );
results.get( Gene.class ).add( new SearchResult( g ) );
}
}
return results;
}
/*
* Not searching for a gene.
*/
Collection<SearchResult> matchingResults;
Collection<Class<?>> classesToSearch = new HashSet<Class<?>>();
if ( settings.getSearchExperiments() ) {
classesToSearch.add( ExpressionExperiment.class ); // not sure ...
classesToSearch.add( BioMaterial.class );
classesToSearch.add( FactorValue.class );
}
// this doesn't seem to be implemented yet, LiteratureEvidence and GenericEvidence aren't handled in the
// fillValueObjects method downstream
/*
* if ( settings.getSearchPhenotypes() ) { classesToSearch.add( PhenotypeAssociation.class ); }
*/
matchingTerm = this.ontologyService.getTerm( uriString );
if ( matchingTerm == null || matchingTerm.getUri() == null ) {
/*
* Maybe the ontology isn't loaded. Look anyway.
*/
Map<Characteristic, Object> parentMap = characteristicService.getParents( classesToSearch,
characteristicService.findByUri( classesToSearch, uriString ) );
matchingResults = filterCharacteristicOwnersByClass( classesToSearch, parentMap );
} else {
log.info( "Found ontology term: " + matchingTerm );
// Was a URI from a loaded ontology soo get the children.
Collection<OntologyTerm> terms2Search4 = matchingTerm.getChildren( true );
terms2Search4.add( matchingTerm );
matchingResults = this.databaseCharacteristicExactUriSearchForOwners( classesToSearch, terms2Search4 );
}
for ( SearchResult searchR : matchingResults ) {
if ( results.containsKey( searchR.getResultClass() ) ) {
results.get( searchR.getResultClass() ).add( searchR );
} else {
List<SearchResult> rs = new ArrayList<SearchResult>();
rs.add( searchR );
results.put( searchR.getResultClass(), rs );
}
}
return results;
}
/*
* (non-Javadoc)
*
* @see ubic.gemma.search.SearchService#searchExpressionExperiments(java.lang.String, java.lang.Long)
*/
@Override
public Collection<Long> searchExpressionExperiments( String query, Long taxonId ) {
Taxon taxon = taxonDao.load( taxonId );
Collection<Long> eeIds = new HashSet<Long>();
if ( StringUtils.isNotBlank( query ) ) {
if ( query.length() < MINIMUM_EE_QUERY_LENGTH ) return eeIds;
// Initial list
List<SearchResult> results = this.search( SearchSettingsImpl.expressionExperimentSearch( query ), false,
false ).get( ExpressionExperiment.class );
for ( SearchResult result : results ) {
eeIds.add( result.getId() );
}
// Filter by taxon
if ( taxon != null ) {
Collection<Long> eeIdsToKeep = new HashSet<Long>();
Collection<ExpressionExperiment> ees = expressionExperimentService.findByTaxon( taxon );
for ( ExpressionExperiment ee : ees ) {
if ( eeIds.contains( ee.getId() ) ) eeIdsToKeep.add( ee.getId() );
}
eeIds.retainAll( eeIdsToKeep );
}
} else {
Collection<ExpressionExperiment> ees = ( taxon != null ) ? expressionExperimentService.findByTaxon( taxon )
: expressionExperimentService.loadAll();
for ( ExpressionExperiment ee : ees ) {
eeIds.add( ee.getId() );
}
}
return eeIds;
}
/**
* Add results.
*
* @param rawResults To add to
* @param newResults To be added
*/
private void accreteResults( List<SearchResult> rawResults, Collection<SearchResult> newResults ) {
for ( SearchResult sr : newResults ) {
if ( !rawResults.contains( sr ) ) {
/*
* We do this because we don't want to clobber results, when the same object comes up more than once in
* different searches. FIXME - perhaps check if the score of the existing one is lower?
*/
rawResults.add( sr );
}
}
}
/**
* Returns children one step down.
*
* @param term starting point
*/
private Collection<OntologyTerm> getDirectChildTerms( OntologyTerm term ) {
String uri = term.getUri();
/*
* getChildren can be very slow for 'high-level' classes like "neoplasm", so we use a cache.
*/
Collection<OntologyTerm> children = null;
if ( StringUtils.isBlank( uri ) ) {
// shouldn't happen, but just in case
if ( log.isDebugEnabled() ) log.debug( "Blank uri for " + term );
}
Element cachedChildren = this.childTermCache.get( uri );
// log.debug("Getting children of " + term);
if ( cachedChildren == null ) {
try {
children = term.getChildren( true );
childTermCache.put( new Element( uri, children ) );
} catch ( com.hp.hpl.jena.ontology.ConversionException ce ) {
log.warn( "getting children for term: " + term
+ " caused com.hp.hpl.jena.ontology.ConversionException. " + ce.getMessage() );
}
} else {
children = ( Collection<OntologyTerm> ) cachedChildren.getObjectValue();
}
return children;
}
/**
* A general search for array designs.
* <p>
* This search does both an database search and a compass search. This is also contains an underlying
* {@link CompositeSequence} search, returning the {@link ArrayDesign} collection for the given composite sequence
* search string (the returned collection of array designs does not contain duplicates).
*
* @param searchString
* @param probeResults Collection of results from a previous CompositeSequence search. Can be null; otherwise used
* to avoid a second search for probes. The array designs for the probes are added to the final results.
* @return
*/
private Collection<SearchResult> arrayDesignSearch( SearchSettings settings, Collection<SearchResult> probeResults ) {
StopWatch watch = startTiming();
String searchString = settings.getQuery();
Collection<SearchResult> results = new HashSet<SearchResult>();
ArrayDesign shortNameResult = arrayDesignService.findByShortName( searchString );
if ( shortNameResult != null ) {
results.add( new SearchResult( shortNameResult, 1.0 ) );
} else {
Collection<ArrayDesign> nameResult = arrayDesignService.findByName( searchString );
if ( nameResult != null ) for ( ArrayDesign ad : nameResult ) {
results.add( new SearchResult( ad, 1.0 ) );
}
}
Collection<ArrayDesign> altNameResults = arrayDesignService.findByAlternateName( searchString );
for ( ArrayDesign arrayDesign : altNameResults ) {
results.add( new SearchResult( arrayDesign, 0.9 ) );
}
Collection<ArrayDesign> manufacturerResults = arrayDesignService.findByManufacturer( searchString );
for ( ArrayDesign arrayDesign : manufacturerResults ) {
results.add( new SearchResult( arrayDesign, 0.9 ) );
}
results.addAll( compassArrayDesignSearch( settings ) );
results.addAll( databaseArrayDesignSearch( settings ) );
Collection<SearchResult> probes = null;
if ( probeResults == null ) {
probes = compassCompositeSequenceSearch( settings );
} else {
probes = probeResults;
}
for ( SearchResult r : probes ) {
CompositeSequence cs = ( CompositeSequence ) r.getResultObject();
if ( cs.getArrayDesign() == null ) // This might happen as compass
// might not have indexed the AD
// for the CS
continue;
results.add( r );
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Array Design search for '" + settings + "' took " + watch.getTime() + " ms" );
return results;
}
/**
* *
*
* @param searchString
* @param previousGeneSearchResults Can be null, otherwise used to avoid a second search for genes. The biosequences
* for the genes are added to the final results.
* @return
*/
private Collection<SearchResult> bioSequenceSearch( SearchSettings settings,
Collection<SearchResult> previousGeneSearchResults ) {
StopWatch watch = startTiming();
Collection<SearchResult> searchResults = new HashSet<SearchResult>();
searchResults.addAll( compassBioSequenceSearch( settings, previousGeneSearchResults ) );
searchResults.addAll( databaseBioSequenceSearch( settings ) );
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Biosequence search for '" + settings + "' took " + watch.getTime() + " ms "
+ searchResults.size() + " results." );
return searchResults;
}
/**
* @param settings
*/
private Collection<SearchResult> characteristicExpressionExperimentSearch( final SearchSettings settings ) {
Collection<SearchResult> results = new HashSet<SearchResult>();
Collection<Class<?>> classToSearch = new ArrayList<Class<?>>( 1 ); // this is a collection because of the API
// for characteristicService; could add
// findByUri(Class<?>...)
// order matters.
Queue<Class<?>> orderedClassesToSearch = new LinkedList<Class<?>>();
orderedClassesToSearch.add( ExpressionExperiment.class );
orderedClassesToSearch.add( FactorValue.class );
orderedClassesToSearch.add( BioMaterial.class );
orderedClassesToSearch.add( Treatment.class );
Collection<SearchResult> characterSearchResults = new HashSet<SearchResult>();
while ( characterSearchResults.size() < SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
&& !orderedClassesToSearch.isEmpty() ) {
classToSearch.clear();
classToSearch.add( orderedClassesToSearch.poll() );
// We handle the OR clauses here.
String[] subclauses = settings.getQuery().split( " OR " );
for ( String subclause : subclauses ) {
/*
* Note that the AND is applied only within one entity type. The fix would be to apply AND at this
* level.
*/
Collection<SearchResult> classResults = characteristicSearchWithChildren( classToSearch, subclause );
if ( !classResults.isEmpty() ) {
String msg = "Found " + classResults.size() + " " + classToSearch.iterator().next().getSimpleName()
+ " results from characteristic search.";
if ( characterSearchResults.size() >= SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS ) {
msg += " Total found > " + SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
+ ", will not search for more entities.";
}
log.info( msg );
}
characterSearchResults.addAll( classResults );
}
}
StopWatch watch = new StopWatch();
watch.start();
// filter and get parents...
int numEEs = 0;
Collection<BioMaterial> biomaterials = new HashSet<BioMaterial>();
Collection<FactorValue> factorValues = new HashSet<FactorValue>();
Collection<Treatment> treatments = new HashSet<Treatment>();
// FIXME use this. We lose track of which object went with which EE (except for direct hits)
// Map<Object, String> highlightedText = new HashMap<>();
for ( SearchResult sr : characterSearchResults ) {
Class<?> resultClass = sr.getResultClass();
// highlightedText.put( sr.getResultObject(), sr.getHighlightedText() );
if ( ExpressionExperiment.class.isAssignableFrom( resultClass ) ) {
sr.setHighlightedText( sr.getHighlightedText() + " (characteristic)" );
results.add( sr );
numEEs++;
} else if ( BioMaterial.class.isAssignableFrom( resultClass ) ) {
biomaterials.add( ( BioMaterial ) sr.getResultObject() );
} else if ( FactorValue.class.isAssignableFrom( resultClass ) ) {
factorValues.add( ( FactorValue ) sr.getResultObject() );
} else if ( Treatment.class.isAssignableFrom( resultClass ) ) {
treatments.add( ( Treatment ) sr.getResultObject() );
}
}
/*
* Much faster to batch it...but we loose track of which search result came from which, so we put generic
* highlighted text.
*/
if ( biomaterials.size() > 0 ) {
Collection<ExpressionExperiment> ees = expressionExperimentService.findByBioMaterials( biomaterials );
for ( ExpressionExperiment ee : ees ) {
results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "BioMaterial characteristic" ) );
}
}
if ( factorValues.size() > 0 ) {
Collection<ExpressionExperiment> ees = expressionExperimentService.findByFactorValues( factorValues );
for ( ExpressionExperiment ee : ees ) {
if ( log.isDebugEnabled() ) log.debug( ee );
results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "Factor characteristic" ) );
}
}
if ( treatments.size() > 0 ) {
log.info( "Not processing treatments, but hits were found" );
}
if ( log.isDebugEnabled() ) {
log.debug( "ExpressionExperiment search: " + settings + " -> " + results.size() + " characteristic hits" );
}
if ( watch.getTime() > 1000 ) {
log.info( "Retrieving " + results.size() + " experiments from " + characterSearchResults.size()
+ " retrieved characteristics took " + watch.getTime() + " ms" );
log.info( "Breakdown: " + numEEs + " via direct association with EE; " + biomaterials.size()
+ " via association with Biomaterial; " + factorValues.size() + " via experimental design" );
}
return results;
}
/**
* Search for the query in ontologies, including items that are associated with children of matching query terms.
* That is, 'brain' should return entities tagged as 'hippocampus'. This method will return results only up to
* MAX_CHARACTERISTIC_SEARCH_RESULTS. It can handle AND in searches, so Parkinson's AND neuron finds items tagged
* with both of those terms. The use of OR is handled by the caller.
*
* @param classes Classes of characteristic-bound entities. For example, to get matching characteristics of
* ExpressionExperiments, pass ExpressionExperiments.class in this collection parameter.
* @param settings
* @return SearchResults of CharcteristicObjects. Typically to be useful one needs to retrieve the 'parents'
* (entities which have been 'tagged' with the term) of those Characteristics
*/
private Collection<SearchResult> characteristicSearchWithChildren( Collection<Class<?>> classes, String query ) {
StopWatch timer = startTiming();
/*
* The tricky part here is if the user has entered a boolean query. If they put in
*
* Parkinson's disease AND neuron
*
* Then we want to eventually return entities that are associated with both. We don't expect to find single
* characteristics that match both.
*
* But if they put in
*
* Parkinson's disease
*
* We don't want to do two queries.
*/
List<String> subparts = Arrays.asList( query.split( " AND " ) );
// we would have to first deal with the separate queries, and then apply the logic.
Collection<SearchResult> allResults = new HashSet<SearchResult>();
log.info( "Starting characteristic search: " + query + " for type=" + StringUtils.join( classes, "," ) );
for ( String rawTerm : subparts ) {
String trimmed = StringUtils.strip( rawTerm );
if ( StringUtils.isBlank( trimmed ) ) {
continue;
}
Collection<SearchResult> subqueryResults = characteristicSearchTerm( classes, trimmed );
if ( allResults.isEmpty() ) {
allResults.addAll( subqueryResults );
} else {
// this is our Intersection operation.
allResults.retainAll( subqueryResults );
// aggregate the highlighted text.
Map<SearchResult, String> highlights = new HashMap<>();
for ( SearchResult sqr : subqueryResults ) {
highlights.put( sqr, sqr.getHighlightedText() );
}
for ( SearchResult ar : allResults ) {
String k = highlights.get( ar );
if ( StringUtils.isNotBlank( k ) ) {
String highlightedText = ar.getHighlightedText();
if ( StringUtils.isBlank( highlightedText ) ) {
ar.setHighlightedText( k );
} else {
ar.setHighlightedText( highlightedText + "," + k );
}
}
}
}
if ( timer.getTime() > 1000 ) {
log.info( "Characteristic search for '" + rawTerm + "': " + allResults.size()
+ " hits retained so far; " + timer.getTime() + "ms" );
timer.reset();
timer.start();
}
}
return allResults;
}
/**
* The maximum number of characteristics to search while walking down a ontology graph.
*/
private static int MAX_CHARACTERISTIC_SEARCH_RESULTS = 500;
/**
* Perform a search on a query - it does not have to be one word, it could be "parkinson's disease"
*
* @param classes
* @param matches
* @param query
* @return
*/
private Collection<SearchResult> characteristicSearchTerm( Collection<Class<?>> classes, String query ) {
if ( log.isDebugEnabled() ) log.debug( "Starting search for " + query );
StopWatch watch = startTiming();
Collection<Characteristic> cs = new HashSet<Characteristic>();
Collection<OntologyIndividual> individuals = ontologyService.findIndividuals( query );
for ( Collection<OntologyIndividual> individualbatch : BatchIterator.batches( individuals, 10 ) ) {
Collection<String> uris = new HashSet<String>();
for ( OntologyIndividual individual : individualbatch ) {
uris.add( individual.getUri() );
}
Collection<SearchResult> dbhits = dbHitsToSearchResult( characteristicService.findByUri( classes, uris ) );
for ( SearchResult crs : dbhits ) {
cs.add( ( Characteristic ) crs.getResultObject() );
}
if ( cs.size() >= MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
break;
}
}
if ( individuals.size() > 0 && watch.getTime() > 1000 ) {
log.info( "Found " + individuals.size() + " individuals matching '" + query + "' in " + watch.getTime()
+ "ms" );
}
/*
* Add characteristics that have values matching the query; this pulls in items not associated with ontology
* terms (free text). We do this here so we can apply the query logic to the matches.
*/
if ( cs.size() < MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
String dbQueryString = query.replaceAll( "\\*", "" ); // note I changed the order of search operations so
// this
// might not be wanted.
Collection<Characteristic> valueMatches = characteristicService.findByValue( classes, dbQueryString );
if ( valueMatches != null && !valueMatches.isEmpty() ) {
cs.addAll( valueMatches );
if ( watch.getTime() > 1000 ) {
log.info( "Found " + valueMatches.size() + " characteristics matching value '" + query + "' in "
+ watch.getTime() + "ms" );
}
watch.reset();
watch.start();
}
}
if ( cs.size() < MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
/*
* Identify initial set of matches to the query.
*/
Collection<OntologyTerm> matchingTerms = ontologyService.findTerms( query );
if ( watch.getTime() > 1000 ) {
log.info( "Found " + matchingTerms.size() + " ontology classes matching '" + query + "' in "
+ watch.getTime() + "ms" );
}
/*
* Search for child terms.
*/
if ( !matchingTerms.isEmpty() ) {
for ( OntologyTerm term : matchingTerms ) {
/*
* In this loop, each term is a match directly to our query, and we do a depth-first fetch of the
* children.
*/
String uri = term.getUri();
if ( StringUtils.isBlank( uri ) ) continue;
int sizeBefore = cs.size();
getCharactersticsAnnotatedToChildren( classes, term, cs );
if ( log.isDebugEnabled() && cs.size() > sizeBefore ) {
log.debug( ( cs.size() - sizeBefore ) + " characteristics matching children term of " + term );
}
if ( cs.size() >= MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
break;
}
}
if ( watch.getTime() > 1000 ) {
log.info( "Found " + cs.size() + " characteristics for '" + query + "' including child terms in "
+ watch.getTime() + "ms" );
}
watch.reset();
watch.start();
}
}
/*
* Retrieve the owner objects
*/
watch.reset();
watch.start();
Collection<SearchResult> matchingEntities = getAnnotatedEntities( classes, cs );
if ( watch.getTime() > 1000 ) {
log.info( "Retrieved " + matchingEntities.size() + " entities via characteristics for '" + query + "' in "
+ watch.getTime() + "ms" );
}
if ( log.isDebugEnabled() ) log.debug( "End search for " + query );
return matchingEntities;
}
/**
* Recursively
*
* @param classes
* @param term
* @param results
*/
private void getCharactersticsAnnotatedToChildren( Collection<Class<?>> classes, OntologyTerm term,
Collection<Characteristic> results ) {
Collection<OntologyTerm> children = getDirectChildTerms( term );
/*
* Find occurrences of these terms in our system. This is fast, so long as there aren't too many.
*/
if ( !children.isEmpty() ) {
Collection<String> uris = new ArrayList<String>();
for ( OntologyTerm ontologyTerm : children ) {
if ( ontologyTerm.getUri() == null ) continue;
uris.add( ontologyTerm.getUri() );
}
if ( !uris.isEmpty() ) {
Collection<SearchResult> dbhits = dbHitsToSearchResult( characteristicService.findByUri( classes, uris ) );
for ( SearchResult crs : dbhits ) {
results.add( ( Characteristic ) crs.getResultObject() );
}
}
}
if ( results.size() >= MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
return;
}
for ( OntologyTerm child : children ) {
getCharactersticsAnnotatedToChildren( classes, child, results );
}
}
/**
* A Compass search on array designs.
*
* @param query
* @return {@link Collection}
*/
private Collection<SearchResult> compassArrayDesignSearch( SearchSettings settings ) {
return compassSearch( compassArray, settings );
}
/**
* @param query
* @return
*/
private Collection<SearchResult> compassBibliographicReferenceSearch( SearchSettings settings ) {
return compassSearch( compassBibliographic, settings );
}
/**
* A compass backed search that finds biosequences that match the search string. Searches the gene and probe indexes
* for matches then converts those results to biosequences
*
* @param searchString
* @param previousGeneSearchResults Can be null, otherwise used to avoid a second search for genes. The biosequences
* for the genes are added to the final results.
* @return
* @throws Exception
*/
private Collection<SearchResult> compassBioSequenceSearch( SearchSettings settings,
Collection<SearchResult> previousGeneSearchResults ) {
Collection<SearchResult> results = compassSearch( compassBiosequence, settings );
Collection<SearchResult> geneResults = null;
if ( previousGeneSearchResults == null ) {
log.info( "Biosequence Search: running gene search with " + settings.getQuery() );
geneResults = compassGeneSearch( settings );
} else {
log.info( "Biosequence Search: using previous results" );
geneResults = previousGeneSearchResults;
}
Map<Gene, SearchResult> genes = new HashMap<Gene, SearchResult>();
for ( SearchResult sr : geneResults ) {
Object resultObject = sr.getResultObject();
if ( Gene.class.isAssignableFrom( resultObject.getClass() ) ) {
genes.put( ( Gene ) resultObject, sr );
} else {
// see bug 1774 -- may not be happening any more.
log.warn( "Expected a Gene, got a " + resultObject.getClass() + " on query=" + settings.getQuery() );
}
}
Map<Gene, Collection<BioSequence>> seqsFromDb = bioSequenceService.findByGenes( genes.keySet() );
for ( Gene gene : seqsFromDb.keySet() ) {
List<BioSequence> bs = new ArrayList<BioSequence>( seqsFromDb.get( gene ) );
// bioSequenceService.thaw( bs );
results.addAll( dbHitsToSearchResult( bs, genes.get( gene ), null ) );
}
return results;
}
/**
* @param settings
* @return
*/
private Collection<SearchResult> compassCompositeSequenceSearch( final SearchSettings settings ) {
return compassSearch( compassProbe, settings );
}
/**
* A compass search on expressionExperiments.
*
* @param query
* @return {@link Collection}
*/
private Collection<SearchResult> compassExpressionSearch( SearchSettings settings ) {
return compassSearch( compassExpression, settings );
}
/**
* @param query
* @return
*/
private Collection<SearchResult> compassGeneSearch( final SearchSettings settings ) {
return compassSearch( compassGene, settings );
}
/**
* Generic method for searching Lucene indices for entities (excluding ontology terms, which use the OntologySearch)
*
* @param bean
* @param settings
* @return
*/
private Collection<SearchResult> compassSearch( Compass bean, final SearchSettings settings ) {
if ( !settings.getUseIndices() ) return new HashSet<SearchResult>();
CompassTemplate template = new CompassTemplate( bean );
Collection<SearchResult> searchResults = template.execute( new CompassCallback<Collection<SearchResult>>() {
@Override
public Collection<SearchResult> doInCompass( CompassSession session ) throws CompassException {
return performSearch( settings, session );
}
} );
if ( log.isDebugEnabled() ) {
log.debug( "Compass search via " + bean.getSettings().getSetting( "compass.name" ) + " : " + settings
+ " -> " + searchResults.size() + " hits" );
}
return searchResults;
}
/**
* Search by name of the composite sequence as well as gene.
*
* @return
* @throws Exception
*/
private Collection<SearchResult> compositeSequenceSearch( SearchSettings settings ) {
StopWatch watch = startTiming();
/*
* FIXME: this at least partly ignores any array design that was set as a restriction, especially in a gene
* search.
*/
Collection<SearchResult> allResults = new HashSet<>();
// Skip compass searching of composite sequences because it only bloats the results.
// allResults.addAll( compassCompositeSequenceSearch( settings ) );
allResults.addAll( databaseCompositeSequenceSearch( settings ) );
// allResults.addAll( compositeSequenceByGeneSearch( settings, geneSearchResults ) );
/*
* This last step is needed because the compassSearch for compositeSequences returns bioSequences too.
*/
Collection<SearchResult> finalResults = new HashSet<>();
for ( SearchResult sr : allResults ) {
if ( CompositeSequence.class.isAssignableFrom( sr.getResultClass() ) ) {
finalResults.add( sr );
}
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Composite sequence search for '" + settings + "' took " + watch.getTime() + " ms, "
+ finalResults.size() + " results." );
return finalResults;
}
/**
* @param searchResults
* @return
*/
private List<SearchResult> convertEntitySearchResutsToValueObjectsSearchResults(
Collection<SearchResult> searchResults ) {
List<SearchResult> convertedSearchResults = new ArrayList<SearchResult>();
for ( SearchResult searchResult : searchResults ) {
// this is a special case ... for some reason.
if ( BioSequence.class.isAssignableFrom( searchResult.getResultClass() ) ) {
SearchResult convertedSearchResult = new SearchResult(
BioSequenceValueObject.fromEntity( bioSequenceService.thaw( ( BioSequence ) searchResult
.getResultObject() ) ), searchResult.getScore(), searchResult.getHighlightedText() );
convertedSearchResults.add( convertedSearchResult );
} else {
convertedSearchResults.add( searchResult );
}
}
return convertedSearchResults;
}
/**
* Searches the DB for array designs which have composite sequences whose names match the given search string.
* Because of the underlying database search, this is acl aware. That is, returned array designs are filtered based
* on access control list (ACL) permissions.
*
* @param searchString
* @return
* @throws Exception
*/
private Collection<SearchResult> databaseArrayDesignSearch( SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<SearchResult>();
StopWatch watch = startTiming();
Collection<ArrayDesign> adSet = new HashSet<ArrayDesign>();
// search by exact composite sequence name
Collection<CompositeSequence> matchedCs = compositeSequenceService.findByName( settings.getQuery() );
for ( CompositeSequence sequence : matchedCs ) {
adSet.add( sequence.getArrayDesign() );
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Array Design Compositesequence DB search for " + settings + " took " + watch.getTime() + " ms"
+ " found " + adSet.size() + " Ads" );
return dbHitsToSearchResult( adSet );
}
/**
* A database serach for biosequences. Biosequence names are already indexed by compass...
*
* @param searchString
* @return
*/
private Collection<SearchResult> databaseBioSequenceSearch( SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<SearchResult>();
StopWatch watch = startTiming();
String searchString = settings.getQuery();
// replace * with % for inexact symbol search
String inexactString = searchString;
Pattern pattern = Pattern.compile( "\\*" );
Matcher match = pattern.matcher( inexactString );
inexactString = match.replaceAll( "%" );
Collection<BioSequence> bs = bioSequenceService.findByName( inexactString );
// bioSequenceService.thaw( bs );
Collection<SearchResult> bioSequenceList = new HashSet<SearchResult>( dbHitsToSearchResult( bs ) );
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "BioSequence DB search for " + searchString + " took " + watch.getTime() + " ms and found"
+ bioSequenceList.size() + " BioSequences" );
return bioSequenceList;
}
/**
* Takes a list of ontology terms, and classes of objects of interest to be returned. Looks through the
* characteristic table for an exact match with the given ontology terms. Only tries to match the uri's.
*
* @param data.clazz Class of objects to restrict the search to (typically ExpressionExperimentImpl.class, for
* example).
* @param terms A list of ontololgy terms to search for
* @return Collection of search results for the objects owning the found characteristics, where the owner is of
* class clazz
*/
private Collection<SearchResult> databaseCharacteristicExactUriSearchForOwners( Collection<Class<?>> classes,
Collection<OntologyTerm> terms ) {
// Collection<Characteristic> characteristicValueMatches = new ArrayList<Characteristic>();
Collection<Characteristic> characteristicURIMatches = new ArrayList<Characteristic>();
for ( OntologyTerm term : terms ) {
// characteristicValueMatches.addAll( characteristicService.findByValue( term.getUri() ));
characteristicURIMatches.addAll( characteristicService.findByUri( classes, term.getUri() ) );
}
Map<Characteristic, Object> parentMap = characteristicService.getParents( classes, characteristicURIMatches );
// parentMap.putAll( characteristicService.getParents(characteristicValueMatches ) );
return filterCharacteristicOwnersByClass( classes, parentMap );
}
/**
* Search the DB for composite sequences and the genes that are matched to them.
*
* @param searchString
* @return
* @throws Exception
*/
private Collection<SearchResult> databaseCompositeSequenceSearch( final SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<>();
StopWatch watch = startTiming();
Set<Gene> geneSet = new HashSet<>();
String searchString = settings.getQuery();
ArrayDesign ad = settings.getPlatformConstraint();
// search by exact composite sequence name
Collection<CompositeSequence> matchedCs = new HashSet<>();
if ( ad != null ) {
CompositeSequence cs = compositeSequenceService.findByName( ad, searchString );
if ( cs != null ) matchedCs.add( cs );
} else {
matchedCs = compositeSequenceService.findByName( searchString );
}
/*
* In case the query _is_ a gene
*/
Collection<SearchResult> rawGeneResults = this.databaseGeneSearch( settings );
for ( SearchResult searchResult : rawGeneResults ) {
Object j = searchResult.getResultObject();
if ( Gene.class.isAssignableFrom( j.getClass() ) ) {
geneSet.add( ( Gene ) j );
}
}
for ( Gene g : geneSet ) {
if ( settings.getPlatformConstraint() != null ) {
matchedCs.addAll( compositeSequenceService.findByGene( g, settings.getPlatformConstraint() ) );
} else {
matchedCs.addAll( compositeSequenceService.findByGene( g ) );
}
}
// search by associated genes.
for ( CompositeSequence sequence : matchedCs ) {
geneSet.addAll( compositeSequenceService.getGenes( sequence ) );
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Gene composite sequence DB search " + searchString + " took " + watch.getTime() + " ms, "
+ geneSet.size() + " items." );
Collection<SearchResult> results = dbHitsToSearchResult( geneSet );
results.addAll( dbHitsToSearchResult( matchedCs ) );
return results;
}
/**
* Does search on exact string by: id, name and short name. This only returns results if these fields match exactly,
* but it's fast.
*
* @param query
* @return {@link Collection}
*/
private Collection<SearchResult> databaseExpressionExperimentSearch( final SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<SearchResult>();
StopWatch watch = startTiming();
Map<ExpressionExperiment, String> results = new HashMap<ExpressionExperiment, String>();
String query = StringEscapeUtils.unescapeJava( settings.getQuery() );
Collection<ExpressionExperiment> ees = expressionExperimentService.findByName( query );
if ( !ees.isEmpty() ) {
for ( ExpressionExperiment ee : ees ) {
results.put( ee, ee.getName() );
}
} else {
ExpressionExperiment ee = expressionExperimentService.findByShortName( query );
if ( ee != null ) {
results.put( ee, ee.getShortName() );
} else {
ees = expressionExperimentService.findByAccession( query );
for ( ExpressionExperiment e : ees ) {
results.put( e, e.getId().toString() );
}
if ( results.isEmpty() ) {
try {
// maybe user put in a primary key value.
ee = expressionExperimentService.load( new Long( query ) );
if ( ee != null ) results.put( ee, ee.getId().toString() );
} catch ( NumberFormatException e ) {
// no-op - it's not an ID.
}
}
}
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "DB Expression Experiment search for " + settings + " took " + watch.getTime() + " ms and found "
+ results.size() + " EEs" );
Collection<SearchResult> r = dbHitsToSearchResult( results );
return r;
}
/**
* Search the DB for genes that exactly match the given search string searches geneProducts, gene and bioSequence
* tables
*
* @param searchString
* @return
* @throws Exception
*/
private Collection<SearchResult> databaseGeneSearch( SearchSettings settings ) {
if ( !settings.getUseDatabase() ) return new HashSet<>();
StopWatch watch = startTiming();
String searchString = StringEscapeUtils.unescapeJava( settings.getQuery() );
if ( StringUtils.isBlank( searchString ) ) return new HashSet<>();
Collection<SearchResult> results = new HashSet<>();
/*
* First search by accession. If we find it, stop.
*/
Gene result = null;
try {
result = geneService.findByNCBIId( Integer.parseInt( searchString ) );
} catch ( NumberFormatException e ) {
//
}
if ( result != null ) {
results.add( this.dbHitToSearchResult( null, result ) );
} else {
result = geneService.findByAccession( searchString, null );
if ( result != null ) {
results.add( this.dbHitToSearchResult( null, result ) );
}
}
if ( results.size() > 0 ) {
filterByTaxon( settings, results, true );
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Gene DB search for " + searchString + " took " + watch.getTime() + " ms and found "
+ results.size() + " genes" );
return results;
}
// replace * at end with % for inexact symbol search
String inexactString = searchString;
Pattern pattern = Pattern.compile( "\\*$" );
Matcher match = pattern.matcher( inexactString );
inexactString = match.replaceAll( "%" );
// note that at this point, the inexactString might not have a wildcard - only if the user asked for it.
String exactString = inexactString.replaceAll( "%", "" );
// if the query is shortish, always do a wild card search. This gives better behavior in 'live
// search' situations. If we do wildcards on very short queries we get too many results.
Collection<Gene> geneSet = new HashSet<Gene>();
if ( searchString.length() <= 2 ) {
// case 0: user entered a very short string. We search only for exact matches.
geneSet.addAll( geneService.findByOfficialSymbolInexact( exactString ) );
} else if ( searchString.length() > 2 && inexactString.endsWith( "%" ) ) {
// case 1: user asked for wildcard. We allow this on strings of length 3 or more.
geneSet.addAll( geneService.findByOfficialSymbolInexact( inexactString ) );
} else if ( searchString.length() > 3 && searchString.length() < 6 ) {
// case 2: user did not ask for a wildcard, but we add it anyway, if the string is 4 or 5 characters.
if ( !inexactString.endsWith( "%" ) ) {
inexactString = inexactString + "%";
}
geneSet.addAll( geneService.findByOfficialSymbolInexact( inexactString ) );
} else {
// case 3: string is long enough, and user did not ask for wildcard.
geneSet.addAll( geneService.findByOfficialSymbol( exactString ) );
}
/*
* If we found a match using official symbol or name, don't bother with this
*/
if ( geneSet.isEmpty() ) {
geneSet.addAll( geneService.findByAlias( exactString ) );
geneSet.addAll( geneProductService.getGenesByName( exactString ) );
geneSet.addAll( geneProductService.getGenesByNcbiId( exactString ) );
geneSet.addAll( bioSequenceService.getGenesByAccession( exactString ) );
geneSet.addAll( bioSequenceService.getGenesByName( exactString ) );
geneSet.addAll( geneService.findByEnsemblId( exactString ) );
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Gene DB search for " + searchString + " took " + watch.getTime() + " ms and found "
+ geneSet.size() + " genes" );
results = dbHitsToSearchResult( geneSet );
filterByTaxon( settings, results, true );
return results;
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @return
*/
private Collection<SearchResult> dbHitsToSearchResult( Collection<? extends Object> entities ) {
return this.dbHitsToSearchResult( entities, null, null );
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @return
*/
private Collection<SearchResult> dbHitsToSearchResult( Collection<? extends Object> entities, String matchText ) {
return this.dbHitsToSearchResult( entities, null, matchText );
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @param compassHitDerivedFrom SearchResult that these entities were derived from. For example, if you
* compass-searched for genes, and then used the genes to get sequences from the database, the gene is
* compassHitsDerivedFrom. If null, we treat this as a direct hit.
* @param matchText TODO
* @return
*/
private List<SearchResult> dbHitsToSearchResult( Collection<? extends Object> entities,
SearchResult compassHitDerivedFrom, String matchText ) {
StopWatch timer = startTiming();
List<SearchResult> results = new ArrayList<SearchResult>();
for ( Object e : entities ) {
if ( e == null ) {
log.warn( "Null search result object" );
continue;
}
SearchResult esr = dbHitToSearchResult( compassHitDerivedFrom, e, matchText );
results.add( esr );
}
if ( timer.getTime() > 1000 ) {
log.info( "Unpack " + results.size() + " search resultsS: " + timer.getTime() + "ms" );
}
return results;
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @return
*/
private Collection<SearchResult> dbHitsToSearchResult( Map<? extends Object, String> entities ) {
return this.dbHitsToSearchResult( entities, null );
}
/**
* Convert hits from database searches into SearchResults.
*
* @param entities
* @param compassHitDerivedFrom SearchResult that these entities were derived from. For example, if you
* compass-searched for genes, and then used the genes to get sequences from the database, the gene is
* compassHitsDerivedFrom. If null, we treat this as a direct hit.
* @return
*/
private List<SearchResult> dbHitsToSearchResult( Map<? extends Object, String> entities,
SearchResult compassHitDerivedFrom ) {
List<SearchResult> results = new ArrayList<SearchResult>();
for ( Object e : entities.keySet() ) {
SearchResult esr = dbHitToSearchResult( compassHitDerivedFrom, e, entities.get( e ) );
results.add( esr );
}
return results;
}
/**
* @param compassHitDerivedFrom
* @param e
* @return
*/
private SearchResult dbHitToSearchResult( SearchResult compassHitDerivedFrom, Object e ) {
return this.dbHitToSearchResult( compassHitDerivedFrom, e, null );
}
/**
* @param compassHitDerivedFrom
* @param e
* @param text that mached the query (for highlighting)
* @return
*/
private SearchResult dbHitToSearchResult( SearchResult compassHitDerivedFrom, Object e, String text ) {
SearchResult esr = null;
if ( compassHitDerivedFrom != null && text == null ) {
esr = new SearchResult( e, compassHitDerivedFrom.getScore() * INDIRECT_DB_HIT_PENALTY );
esr.setHighlightedText( compassHitDerivedFrom.getHighlightedText() );
} else {
// log.info( e + " " + text );
esr = new SearchResult( e, 1.0, text );
}
return esr;
}
/**
* @param parentMap
*/
private void debugParentFetch( Map<Characteristic, Object> parentMap ) {
/*
* This is purely debugging.
*/
if ( parentMap.size() > 0 ) {
if ( log.isDebugEnabled() )
log.debug( "Found " + parentMap.size() + " owners for " + parentMap.keySet().size()
+ " characteristics:" );
// int maxPrint = 10; int i = 0;
// for ( Map.Entry<Characteristic, Object> entry : parentMap.entrySet()) {
// if(i < maxPrint){
// Object obj = entry.getValue();
// Characteristic charac = entry.getKey();
// if ( obj instanceof Auditable ) {
// if ( log.isDebugEnabled() ) {
// log.debug("Key: Characteristic Name: " + charac.getName() +" Characteristic Desc: " +
// charac.getDescription() +" Characteristic Category: " + charac.getCategory() );
// log.debug("Val: Owner Class: " + obj.getClass()
// +" Owner Name: " + ( ( Auditable ) obj ).getName() +" Owner Desc: " + ( ( Auditable ) obj
// ).getDescription() );
// }
// } else {
// if ( log.isDebugEnabled() ) {
// log.debug( " Owner : " + obj.toString() + " Owner Class: " + obj.getClass() );
// }
// }
// i++;
// }
// }
}
}
/**
* Find phenotypes.
*
* @param settings
* @return
*/
private Collection<SearchResult> phenotypeSearch( SearchSettings settings ) {
Collection<SearchResult> results = this.dbHitsToSearchResult( this.phenotypeAssociationManagerService
.searchInDatabaseForPhenotype( settings.getQuery() ) );
return results;
}
/**
* @param settings
* @return
*/
private Collection<SearchResult> experimentSetSearch( SearchSettings settings ) {
Collection<SearchResult> results = this.dbHitsToSearchResult( this.experimentSetService.findByName( settings
.getQuery() ) );
results.addAll( compassSearch( compassExperimentSet, settings ) );
return results;
}
/**
* A general search for expression experiments. This search does both an database search and a compass search.
* <p>
* A problem with this is that we cap the number of results that can be returned. This could be a limitation for
* applications like building data set groups. Thus MAX_CHARACTERISTIC_SEARCH_RESULTS should not be too low.
*
* @param settings
* @return {@link Collection}
*/
private Collection<SearchResult> expressionExperimentSearch( final SearchSettings settings ) {
StopWatch watch = startTiming();
log.info( "Starting search for " + settings );
Collection<SearchResult> results = new HashSet<SearchResult>();
if ( settings.getUseDatabase() ) {
results.addAll( databaseExpressionExperimentSearch( settings ) );
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment database search for '" + settings + "' took " + watch.getTime()
+ " ms, " + results.size() + " hits." );
watch.reset();
watch.start();
}
if ( settings.getUseIndices() && results.size() < MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
results.addAll( compassExpressionSearch( settings ) );
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment index search for '" + settings + "' took " + watch.getTime() + " ms, "
+ results.size() + " hits." );
watch.reset();
watch.start();
}
if ( results.size() < MAX_CHARACTERISTIC_SEARCH_RESULTS ) {
/*
* Try a more thorough search. This is slower; calls to ontologySearchAnnotatedObject take a long time
*/
if ( settings.getUseCharacteristics() ) {
results.addAll( characteristicExpressionExperimentSearch( settings ) );
}
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment ontology search for '" + settings + "' took " + watch.getTime()
+ " ms, " + results.size() + " hits." );
watch.reset();
watch.start();
}
/*
* Find data sets that match the platform -- TODO make this do something intelligent with GPL570 + brain.
*/
if ( results.size() == 0 ) {
Collection<SearchResult> matchingPlatforms = arrayDesignSearch( settings, null );
for ( SearchResult adRes : matchingPlatforms ) {
if ( adRes.getResultObject() instanceof ArrayDesign ) {
ArrayDesign ad = ( ArrayDesign ) adRes.getResultObject();
Collection<ExpressionExperiment> expressionExperiments = this.arrayDesignService
.getExpressionExperiments( ad );
if ( expressionExperiments.size() > 0 )
results.addAll( dbHitsToSearchResult( expressionExperiments ) );
}
}
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment platform search for '" + settings + "' took " + watch.getTime()
+ " ms, " + results.size() + " hits." );
watch.reset();
watch.start();
}
if ( results.size() == 0 ) {
/*
* Search for bib refs
*/
List<BibliographicReferenceValueObject> bibrefs = bibliographicReferenceService
.search( settings.getQuery() );
if ( !bibrefs.isEmpty() ) {
Collection<BibliographicReference> refs = new HashSet<BibliographicReference>();
Collection<SearchResult> r = this.compassBibliographicReferenceSearch( settings );
for ( SearchResult searchResult : r ) {
refs.add( ( BibliographicReference ) searchResult.getResultObject() );
}
Map<BibliographicReference, Collection<ExpressionExperiment>> relatedExperiments = this.bibliographicReferenceService
.getRelatedExperiments( refs );
for ( Entry<BibliographicReference, Collection<ExpressionExperiment>> e : relatedExperiments.entrySet() ) {
results.addAll( dbHitsToSearchResult( e.getValue() ) );
}
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment publication search for '" + settings + "' took " + watch.getTime()
+ " ms, " + results.size() + " hits." );
watch.reset();
watch.start();
}
}
watch.stop();
if ( watch.getTime() > 1000 )
log.info( "Expression Experiment search for '" + settings + "' took " + watch.getTime() + " ms, "
+ results.size() + " hits." );
return results;
}
/**
* @param settings
* @param results
* @param excludeWithoutTaxon if true: If the SearchResults have no "getTaxon" method then the results will get
* filtered out Results with no taxon associated will also get removed.
*/
private void filterByTaxon( SearchSettings settings, Collection<SearchResult> results, boolean excludeWithoutTaxon ) {
if ( settings.getTaxon() == null ) {
return;
}
Collection<SearchResult> toRemove = new HashSet<SearchResult>();
Taxon t = settings.getTaxon();
if ( results == null ) return;
for ( SearchResult sr : results ) {
Object o = sr.getResultObject();
try {
Taxon currentTaxon = null;
if ( o instanceof ExpressionExperiment ) {
ExpressionExperiment ee = ( ExpressionExperiment ) o;
currentTaxon = expressionExperimentService.getTaxon( ee );
} else if ( o instanceof ExpressionExperimentSet ) {
ExpressionExperimentSet ees = ( ExpressionExperimentSet ) o;
currentTaxon = ees.getTaxon();
} else if ( o instanceof Gene ) {
Gene gene = ( Gene ) o;
currentTaxon = gene.getTaxon();
} else if ( o instanceof GeneSet ) {
GeneSet geneSet = ( GeneSet ) o;
currentTaxon = geneSetService.getTaxon( geneSet ); // FIXME SLOW? I may have fixed this.
} else if ( o instanceof CharacteristicValueObject ) {
CharacteristicValueObject charVO = ( CharacteristicValueObject ) o;
currentTaxon = taxonDao.findByCommonName( charVO.getTaxon() );
} else {
Method m = o.getClass().getMethod( "getTaxon", new Class[] {} );
currentTaxon = ( Taxon ) m.invoke( o, new Object[] {} );
}
if ( currentTaxon == null || !currentTaxon.getId().equals( t.getId() ) ) {
if ( currentTaxon == null ) {
// Sanity check for bad data in db (could happen if EE has no samples). Can happen that
// searchResults have a vaild getTaxon method
// but the method returns null (shouldn't make it this far)
log.debug( "Object has getTaxon method but it returns null. Obj is: " + o );
}
toRemove.add( sr );
}
} catch ( SecurityException e ) {
throw new RuntimeException( e );
} catch ( NoSuchMethodException e ) {
/*
* In case of a programming error where the results don't have a taxon at all, we assume we should
* filter them out but issue a warning.
*/
if ( excludeWithoutTaxon ) {
toRemove.add( sr );
log.warn( "No getTaxon method for: " + o.getClass() + ". Filtering from results. Error was: " + e );
}
} catch ( IllegalArgumentException e ) {
throw new RuntimeException( e );
} catch ( IllegalAccessException e ) {
throw new RuntimeException( e );
} catch ( InvocationTargetException e ) {
throw new RuntimeException( e );
}
}
results.removeAll( toRemove );
}
/**
* @param data.clazz
* @param characteristic2entity
* @return
*/
private Collection<SearchResult> filterCharacteristicOwnersByClass( Collection<Class<?>> classes,
Map<Characteristic, Object> characteristic2entity ) {
Collection<BioMaterial> biomaterials = new HashSet<BioMaterial>();
Collection<FactorValue> factorValues = new HashSet<FactorValue>();
Collection<SearchResult> results = new HashSet<SearchResult>();
for ( Characteristic c : characteristic2entity.keySet() ) {
Object o = characteristic2entity.get( c );
for ( Class<?> clazz : classes ) {
if ( clazz.isAssignableFrom( o.getClass() ) ) {
String matchedText = c.getValue();
if ( o instanceof BioMaterial ) {
biomaterials.add( ( BioMaterial ) o );
} else if ( o instanceof FactorValue ) {
factorValues.add( ( FactorValue ) o );
} else {
if ( c instanceof VocabCharacteristic && ( ( VocabCharacteristic ) c ).getValueUri() != null ) {
matchedText = "Ontology term: <a href=\"/Gemma/searcher.html?query="
+ ( ( VocabCharacteristic ) c ).getValueUri() + "\">" + matchedText + "</a>";
}
results.add( new SearchResult( o, 1.0, matchedText ) );
}
}
}
}
if ( factorValues.size() > 0 ) {
Collection<ExpressionExperiment> ees = expressionExperimentService.findByFactorValues( factorValues );
for ( ExpressionExperiment ee : ees ) {
if ( log.isDebugEnabled() ) log.debug( ee );
results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "Factor characteristic" ) );
}
}
if ( biomaterials.size() > 0 ) {
Collection<ExpressionExperiment> ees = expressionExperimentService.findByBioMaterials( biomaterials );
for ( ExpressionExperiment ee : ees ) {
results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "BioMaterial characteristic" ) );
}
}
return results;
}
/**
* Combines compass style search, the db style search, and the compositeSequence search and returns 1 combined list
* with no duplicates.
*
* @param searchSettings
* @param returnOnDbHit if true and if there is a match for a gene from the database, return immediately - much
* faster
* @return
* @throws Exception
*/
private Collection<SearchResult> geneSearch( final SearchSettings settings, boolean returnOnDbHit ) {
StopWatch watch = startTiming();
String searchString = settings.getQuery();
Collection<SearchResult> geneDbList = databaseGeneSearch( settings );
if ( returnOnDbHit && geneDbList.size() > 0 ) {
return geneDbList;
}
Set<SearchResult> combinedGeneList = new HashSet<SearchResult>();
combinedGeneList.addAll( geneDbList );
Collection<SearchResult> geneCompassList = compassGeneSearch( settings );
combinedGeneList.addAll( geneCompassList );
if ( combinedGeneList.isEmpty() ) {
Collection<SearchResult> geneCsList = databaseCompositeSequenceSearch( settings );
for ( SearchResult res : geneCsList ) {
if ( res.getResultClass().isAssignableFrom( Gene.class ) ) combinedGeneList.add( res );
}
}
/*
* Possibly search for genes linked via a phenotype, but only if we don't have anything here.
*
*
* FIXME possibly always do if results are small.
*/
if ( combinedGeneList.isEmpty() ) {
Collection<CharacteristicValueObject> phenotypeTermHits = this.phenotypeAssociationManagerService
.searchInDatabaseForPhenotype( settings.getQuery() );
// FIXME do it all at once, not one at a time like this
for ( CharacteristicValueObject phenotype : phenotypeTermHits ) {
Set<String> phenotypeUris = new HashSet<String>();
phenotypeUris.add( phenotype.getValueUri() );
// DATABSE HIT!
Collection<GeneEvidenceValueObject> phenotypeGenes = phenotypeAssociationManagerService
.findCandidateGenes( phenotypeUris, settings.getTaxon() );
if ( !phenotypeGenes.isEmpty() ) {
log.info( phenotypeGenes.size() + " genes associated with " + phenotype + " (via query='"
+ settings.getQuery() + "')" );
for ( GeneEvidenceValueObject gvo : phenotypeGenes ) {
Gene g = Gene.Factory.newInstance();
g.setId( gvo.getId() );
g.setTaxon( settings.getTaxon() );
SearchResult sr = new SearchResult( g );
sr.setHighlightedText( phenotype.getValue() + " (" + phenotype.getValueUri() + ")" );
if ( gvo.getScore() != null ) {
/*
* TODO If we get evidence quality, use that in the score.
*/
}
sr.setScore( 1.0 ); // maybe lower, if we do this search when combinedGeneList is nonempty.
combinedGeneList.add( sr );
}
if ( combinedGeneList.size() > 100 /* some limit */) {
break;
}
}
}
}
// filterByTaxon( settings, combinedGeneList); // compass doesn't return filled gene objects, just ids, so do
// this after objects have been filled
if ( watch.getTime() > 1000 )
log.info( "Gene search for " + searchString + " took " + watch.getTime() + " ms; "
+ combinedGeneList.size() + " results." );
return combinedGeneList;
}
/**
* @param settings
* @return
*/
private Collection<SearchResult> geneSetSearch( SearchSettings settings ) {
Collection<SearchResult> hits;
if ( settings.getTaxon() != null ) {
hits = this
.dbHitsToSearchResult( this.geneSetService.findByName( settings.getQuery(), settings.getTaxon() ) );
} else {
hits = this.dbHitsToSearchResult( this.geneSetService.findByName( settings.getQuery() ) );
}
hits.addAll( compassSearch( compassGeneSet, settings ) );
return hits;
}
/**
* Given classes to search and characteristics,
*
* @param classes Which classes of entities to look for
* @param cs
* @return
*/
private Collection<SearchResult> getAnnotatedEntities( Collection<Class<?>> classes, Collection<Characteristic> cs ) {
Map<Characteristic, Object> characterstic2entity = characteristicService.getParents( classes, cs );
Collection<SearchResult> matchedEntities = filterCharacteristicOwnersByClass( classes, characterstic2entity );
if ( log.isDebugEnabled() ) {
debugParentFetch( characterstic2entity );
}
return matchedEntities;
}
/**
* @param searchResults
* @return List of ids for the entities held by the search results.
*/
private List<Long> getIds( List<SearchResult> searchResults ) {
List<Long> list = new ArrayList<>();
for ( SearchResult r : searchResults ) {
list.add( r.getId() );
}
assert list.size() == searchResults.size();
return list;
}
/**
* @param hits
* @return
*/
private Collection<SearchResult> getSearchResults( CompassHits hits ) {
StopWatch timer = new StopWatch();
timer.start();
Collection<SearchResult> results = new HashSet<SearchResult>();
/*
* Note that hits come in decreasing score order.
*/
for ( int i = 0, len = Math.min( MAX_LUCENE_HITS, hits.getLength() ); i < len; i++ ) {
SearchResult r = new SearchResult( hits.data( i ) );
/*
* Always give compass hits a lower score so they can be differentiated from exact database hits.
*/
r.setScore( new Double( hits.score( i ) * COMPASS_HIT_SCORE_PENALTY_FACTOR ) );
getHighlightedText( hits, i, r );
if ( log.isDebugEnabled() ) log.debug( i + " " + hits.score( i ) + " " + r );
results.add( r );
}
if ( timer.getTime() > 100 ) {
log.info( results.size() + " hits retrieved (out of " + Math.min( MAX_LUCENE_HITS, hits.getLength() )
+ " raw hits tested) in " + timer.getTime() + "ms" );
}
if ( timer.getTime() > 5000 ) {
log.info( "****Extremely long Lucene Search processing! " + results.size() + " hits retrieved (out of "
+ Math.min( MAX_LUCENE_HITS, hits.getLength() ) + " raw hits tested) in " + timer.getTime() + "ms" );
}
return results;
}
/**
* @param hits
* @param i
* @param r
*/
private void getHighlightedText( CompassHits hits, int i, SearchResult r ) {
CompassHighlightedText highlightedText = hits.highlightedText( i );
if ( highlightedText != null && highlightedText.getHighlightedText() != null ) {
r.setHighlightedText( highlightedText.getHighlightedText() );
} else {
if ( log.isDebugEnabled() ) log.debug( "No highlighted text for " + r );
}
}
/**
* @param settings
* @param results
* @param rawResults
* @param fillObjects
*/
private Map<Class<?>, List<SearchResult>> getSortedLimitedResults( SearchSettings settings,
List<SearchResult> rawResults, boolean fillObjects ) {
Map<Class<?>, List<SearchResult>> results = new HashMap<Class<?>, List<SearchResult>>();
Collections.sort( rawResults );
results.put( ArrayDesign.class, new ArrayList<SearchResult>() );
results.put( BioSequence.class, new ArrayList<SearchResult>() );
results.put( BibliographicReference.class, new ArrayList<SearchResult>() );
results.put( CompositeSequence.class, new ArrayList<SearchResult>() );
results.put( ExpressionExperiment.class, new ArrayList<SearchResult>() );
results.put( Gene.class, new ArrayList<SearchResult>() );
results.put( GeneSet.class, new ArrayList<SearchResult>() );
results.put( ExpressionExperimentSet.class, new ArrayList<SearchResult>() );
results.put( Characteristic.class, new ArrayList<SearchResult>() );
results.put( CharacteristicValueObject.class, new ArrayList<SearchResult>() );
/*
* Get the top N results, overall (NOT within each class - experimental.)
*/
for ( int i = 0, limit = Math.min( rawResults.size(), settings.getMaxResults() ); i < limit; i++ ) {
SearchResult sr = rawResults.get( i );
/*
* FIXME This is unpleasant and should be removed when BioSequences are correctly detached.
*/
Class<? extends Object> resultClass = EntityUtils.getImplementationForProxy( sr.getResultObject() )
.getClass();
resultClass = ReflectionUtil.getBaseForImpl( resultClass );
// Class<? extends Object> resultClass = sr.getResultClass();
assert results.containsKey( resultClass ) : "Unknown class " + resultClass;
results.get( resultClass ).add( sr );
}
if ( fillObjects ) {
/*
* Now retrieve the entities and put them in the SearchResult. Entities that are filtered out by the
* SecurityInterceptor will be removed at this stage.
*/
for ( Class<? extends Object> clazz : results.keySet() ) {
List<SearchResult> r = results.get( clazz );
if ( r.isEmpty() ) continue;
Map<Long, SearchResult> rMap = new HashMap<Long, SearchResult>();
for ( SearchResult searchResult : r ) {
if ( !rMap.containsKey( searchResult.getId() )
|| ( rMap.get( searchResult.getId() ).getScore() < searchResult.getScore() ) ) {
rMap.put( searchResult.getId(), searchResult );
}
}
Collection<? extends Object> entities = retrieveResultEntities( clazz, r );
List<SearchResult> filteredResults = new ArrayList<SearchResult>();
for ( Object entity : entities ) {
Long id = EntityUtils.getId( entity );
SearchResult keeper = rMap.get( id );
keeper.setResultObject( entity );
filteredResults.add( keeper );
}
filterByTaxon( settings, filteredResults, false );
results.put( clazz, filteredResults );
}
} else {
for ( SearchResult sr : rawResults ) {
sr.setResultObject( null );
}
}
List<SearchResult> convertedResults = convertEntitySearchResutsToValueObjectsSearchResults( results
.get( BioSequence.class ) );
results.put( BioSequenceValueObject.class, convertedResults );
results.remove( BioSequence.class );
return results;
}
/**
* Retrieve entities from the persistent store.
*
* @param entityClass
* @param results
* @return
*/
private Collection<? extends Object> retrieveResultEntities( Class<?> entityClass, List<SearchResult> results ) {
List<Long> ids = getIds( results );
if ( ExpressionExperiment.class.isAssignableFrom( entityClass ) ) {
return expressionExperimentService.loadMultiple( ids );
} else if ( ArrayDesign.class.isAssignableFrom( entityClass ) ) {
return arrayDesignService.loadMultiple( ids );
} else if ( CompositeSequence.class.isAssignableFrom( entityClass ) ) {
return compositeSequenceService.loadMultiple( ids );
} else if ( BibliographicReference.class.isAssignableFrom( entityClass ) ) {
return bibliographicReferenceService.loadMultiple( ids );
} else if ( Gene.class.isAssignableFrom( entityClass ) ) {
return geneService.loadMultiple( ids );
} else if ( BioSequence.class.isAssignableFrom( entityClass ) ) {
Collection<BioSequence> bs = bioSequenceService.loadMultiple( ids );
return bs;
} else if ( GeneSet.class.isAssignableFrom( entityClass ) ) {
return geneSetService.load( ids );
} else if ( ExpressionExperimentSet.class.isAssignableFrom( entityClass ) ) {
return experimentSetService.load( ids );
} else if ( Characteristic.class.isAssignableFrom( entityClass ) ) {
Collection<Characteristic> chars = new ArrayList<Characteristic>();
for ( Long id : ids ) {
chars.add( characteristicService.load( id ) );
}
return chars;
} else if ( CharacteristicValueObject.class.isAssignableFrom( entityClass ) ) {
// TEMP HACK this whole method should not be needed in many cases
Collection<CharacteristicValueObject> chars = new ArrayList<CharacteristicValueObject>();
for ( SearchResult result : results ) {
if ( result.getResultClass().isAssignableFrom( CharacteristicValueObject.class ) ) {
chars.add( ( CharacteristicValueObject ) result.getResultObject() );
}
}
return chars;
} else if ( ExpressionExperimentSet.class.isAssignableFrom( entityClass ) ) {
return experimentSetService.load( ids );
} else {
throw new UnsupportedOperationException( "Don't know how to retrieve objects for class=" + entityClass );
}
}
private StopWatch startTiming() {
StopWatch watch = new StopWatch();
watch.start();
return watch;
}
/**
* Makes no attempt at resolving the search query as a URI. Will tokenize the search query if there are control
* characters in the String. URI's will get parsed into multiple query terms and lead to bad results.
*
* @param settings Will try to resolve general terms like brain --> to appropriate OntologyTerms and search for
* objects tagged with those terms (if isUseCharacte = true)
* @param fillObjects If false, the entities will not be filled in inside the searchsettings; instead, they will be
* nulled (for security purposes). You can then use the id and Class stored in the SearchSettings to load the
* entities at your leisure. If true, the entities are loaded in the usual secure fashion. Setting this to
* false can be an optimization if all you need is the id. Note: filtering by taxon will not be done unless
* objects are filled
* @param webSpeedSearch if true, this call is probably coming from a web app combo box and results will be limited
* to improve speed
* @return
*/
protected Map<Class<?>, List<SearchResult>> generalSearch( SearchSettings settings, boolean fillObjects,
boolean webSpeedSearch ) {
String enhancedQuery = StringUtils.strip( settings.getQuery() );
String searchString = QueryParser.escape( enhancedQuery );
if ( settings.getTaxon() == null ) {
// split the query around whitespace characters, limit the splitting to 4 terms (may be excessive)
String[] searchTerms = searchString.split( "\\s+", 4 );
for ( int i = 0; i < searchTerms.length; i++ ) {
searchTerms[i] = searchTerms[i].toLowerCase();
}
List<String> searchTermsList = Arrays.asList( searchTerms );
// this Set is ordered by insertion order(LinkedHashMap)
Set<String> keywords = nameToTaxonMap.keySet();
// only strip out taxon terms if there is more than one search term in query and if the entire search string
// is not itself a keyword
if ( searchTerms.length > 1 && !keywords.contains( searchString.toLowerCase() ) ) {
for ( String keyword : keywords ) {
int termIndex = searchString.toLowerCase().indexOf( keyword );
// make sure that the keyword occurs in the searchString
if ( termIndex != -1 ) {
// make sure that either the keyword is multi-term or that it occurs as a single term(not as
// part of another word)
if ( keyword.contains( " " ) || searchTermsList.contains( keyword ) ) {
searchString = searchString.replaceFirst( "(?i)" + keyword, "" ).trim();
settings.setTaxon( nameToTaxonMap.get( keyword ) );
// break on first term found in keywords since they should be(more or less) ordered by
// precedence
break;
}
}
}
}
}
String[] searchTerms = searchString.split( "\\s+" );
// some strings of size 1 cause lucene to barf and they were slipping through in multi-term queries, get rid of
// them
if ( searchTerms.length > 0 ) {
searchString = "";
for ( String sTerm : searchTerms ) {
if ( sTerm.length() > 1 ) {
searchString = searchString + " " + sTerm;
}
}
searchString = searchString.trim();
}
settings.setQuery( searchString );
// If nothing to search return nothing.
if ( StringUtils.isBlank( searchString ) ) {
return new HashMap<Class<?>, List<SearchResult>>();
}
List<SearchResult> rawResults = new ArrayList<SearchResult>();
if ( settings.getSearchExperiments() ) {
Collection<SearchResult> foundEEs = expressionExperimentSearch( settings );
rawResults.addAll( foundEEs );
}
Collection<SearchResult> genes = null;
if ( settings.getSearchGenes() ) {
genes = geneSearch( settings, webSpeedSearch );
accreteResults( rawResults, genes );
}
// SearchSettings persistent entity does not contain a usePhenotypes property that these logic requires
/*
* if ( settings.getUsePhenotypes() && settings.getSearchGenes() ) {
*
* Collection<SearchResult> phenotypeGenes = dbHitsToSearchResult(
* geneSearchService.getPhenotypeAssociatedGenes( searchString, settings.getTaxon() ),
* "From phenotype association" ); accreteResults( rawResults, phenotypeGenes ); }
*/
Collection<SearchResult> compositeSequences = null;
if ( settings.getSearchProbes() ) {
compositeSequences = compositeSequenceSearch( settings );
accreteResults( rawResults, compositeSequences );
}
if ( settings.getSearchPlatforms() ) {
Collection<SearchResult> foundADs = arrayDesignSearch( settings, compositeSequences );
accreteResults( rawResults, foundADs );
}
if ( settings.getSearchBioSequences() ) {
Collection<SearchResult> bioSequences = bioSequenceSearch( settings, genes );
accreteResults( rawResults, bioSequences );
}
if ( settings.getUseGo() ) {
Collection<SearchResult> ontologyGenes = dbHitsToSearchResult(
geneSearchService.getGOGroupGenes( searchString, settings.getTaxon() ), "From GO group" );
accreteResults( rawResults, ontologyGenes );
}
if ( settings.getSearchBibrefs() ) {
Collection<SearchResult> bibliographicReferences = compassBibliographicReferenceSearch( settings );
accreteResults( rawResults, bibliographicReferences );
}
if ( settings.getSearchGeneSets() ) {
Collection<SearchResult> geneSets = geneSetSearch( settings );
accreteResults( rawResults, geneSets );
}
if ( settings.getSearchExperimentSets() ) {
Collection<SearchResult> experimentSets = experimentSetSearch( settings );
accreteResults( rawResults, experimentSets );
}
if ( settings.getSearchPhenotypes() ) {
Collection<SearchResult> phenotypes = phenotypeSearch( settings );
accreteResults( rawResults, phenotypes );
}
Map<Class<?>, List<SearchResult>> sortedLimitedResults = getSortedLimitedResults( settings, rawResults,
fillObjects );
log.info( "search for: " + settings.getQuery() + " " + rawResults.size()
+ " raw results (final tally may be filtered)" );
return sortedLimitedResults;
}
/**
* Runs inside Compass transaction
*
* @param query
* @param session
* @return
*/
Collection<SearchResult> performSearch( SearchSettings settings, CompassSession session ) {
StopWatch watch = startTiming();
String enhancedQuery = settings.getQuery().trim();
if ( StringUtils.isBlank( enhancedQuery )
|| enhancedQuery.length() < MINIMUM_STRING_LENGTH_FOR_FREE_TEXT_SEARCH || enhancedQuery.equals( "*" ) )
return new ArrayList<SearchResult>();
CompassQuery compassQuery = session.queryBuilder().queryString( enhancedQuery ).toQuery();
log.debug( "Parsed query: " + compassQuery );
CompassHits hits = compassQuery.hits();
// highlighting.
if ( ( ( SearchSettingsImpl ) settings ).getDoHighlighting() ) {
if ( session instanceof InternalCompassSession ) { // always ...
CompassMapping mapping = ( ( InternalCompassSession ) session ).getMapping();
ResourceMapping[] rootMappings = mapping.getRootMappings();
// should only be one rootMapping.
process( rootMappings, hits );
}
}
watch.stop();
if ( watch.getTime() > 100 ) {
log.info( "Getting " + hits.getLength() + " lucene hits for " + enhancedQuery + " took " + watch.getTime()
+ " ms" );
}
if ( watch.getTime() > 5000 ) {
log.info( "*****Extremely long Lucene Index Search! " + hits.getLength() + " lucene hits for "
+ enhancedQuery + " took " + watch.getTime() + " ms" );
}
return getSearchResults( hits );
}
/**
* Recursively cache the highlighted text. This must be done during the search transaction.
*
* @param givenMappings on first call, the root mapping(s)
* @param hits
*/
private void process( ResourceMapping[] givenMappings, CompassHits hits ) {
for ( ResourceMapping resourceMapping : givenMappings ) {
Iterator<Mapping> mappings = resourceMapping.mappingsIt(); // one for each property.
for ( ; mappings.hasNext(); ) {
Mapping m = mappings.next();
if ( m instanceof ComponentMapping ) {
ClassMapping[] refClassMappings = ( ( ComponentMapping ) m ).getRefClassMappings();
process( refClassMappings, hits );
} else {
String name = m.getName();
// log.info( name );
for ( int i = 0; i < hits.getLength(); i++ ) {
try {
// we might want to bail as soon as we find something?
hits.highlighter( i ).fragment( name );
if ( log.isDebugEnabled() ) log.debug( "Cached " + name );
} catch ( Exception e ) {
break; // skip this property entirely...
}
}
}
}
}
}
@Override
public Map<Class<?>, List<SearchResult>> searchForNewlyCreatedUserQueryResults( UserQuery query ) {
Map<Class<?>, List<SearchResult>> searchResults;
Map<Class<?>, List<SearchResult>> finalResults = new HashMap<Class<?>, List<SearchResult>>();
SearchSettings settings = query.getSearchSettings();
if ( StringUtils.isBlank( settings.getTermUri() ) && !settings.getQuery().startsWith( "http://" ) ) {
// fill objects=true, speedySearch=false
searchResults = generalSearch( settings, true, false );
} else {
// we only attempt an ontology search if the uri looks remotely like a url.
searchResults = ontologyUriSearch( settings );
}
if ( searchResults == null ) {
return finalResults;
}
for ( Class<?> clazz : searchResults.keySet() ) {
List<SearchResult> results = searchResults.get( clazz );
List<SearchResult> updatedResults = new ArrayList<SearchResult>();
if ( results.size() == 0 ) continue;
log.info( "Search for newly createdQuery with settings: " + settings + "; result: " + results.size() + " "
+ clazz.getSimpleName() + "s" );
for ( SearchResult sr : results ) {
// Are SearchResults always auditable? maybe put in some error handling in case they are not or
// enforce searchSettings object to be of a certain form
Auditable auditableResult = ( Auditable ) sr.getResultObject();
// this list is ordered by date (not descending)
List<AuditEvent> eventList = auditTrailService.getEvents( auditableResult );
if ( eventList == null || eventList.isEmpty() ) continue;
for ( AuditEvent ae : eventList ) {
// assuming there is only one create event
if ( ae.getAction() == AuditAction.CREATE && ae.getDate().after( query.getLastUsed() ) ) {
updatedResults.add( sr );
break;
}
}
}
if ( !updatedResults.isEmpty() ) {
finalResults.put( clazz, updatedResults );
}
}
return finalResults;
}
}
| some tweaks to gene search
| gemma-core/src/main/java/ubic/gemma/search/SearchServiceImpl.java | some tweaks to gene search |
|
Java | apache-2.0 | 6ad974308a3e391bf7eca4f4022c5b9ccff5b7d6 | 0 | sagarsane/incubator-groovy,jwagenleitner/groovy,EPadronU/incubator-groovy,genqiang/incubator-groovy,adjohnson916/groovy-core,ChanJLee/incubator-groovy,adjohnson916/groovy-core,paulk-asert/groovy,rlovtangen/groovy-core,yukangguo/incubator-groovy,pledbrook/incubator-groovy,paulk-asert/groovy,jwagenleitner/groovy,bsideup/incubator-groovy,ChanJLee/incubator-groovy,christoph-frick/groovy-core,sagarsane/groovy-core,taoguan/incubator-groovy,guangying945/incubator-groovy,PascalSchumacher/incubator-groovy,i55ac/incubator-groovy,ebourg/incubator-groovy,alien11689/groovy-core,dpolivaev/groovy,groovy/groovy-core,nobeans/incubator-groovy,aim-for-better/incubator-groovy,PascalSchumacher/incubator-groovy,shils/groovy,eginez/incubator-groovy,apache/groovy,paulk-asert/incubator-groovy,graemerocher/incubator-groovy,bsideup/incubator-groovy,adjohnson916/groovy-core,adjohnson916/groovy-core,paplorinc/incubator-groovy,genqiang/incubator-groovy,pickypg/incubator-groovy,ebourg/groovy-core,kenzanmedia/incubator-groovy,PascalSchumacher/incubator-groovy,paulk-asert/incubator-groovy,russel/groovy,rlovtangen/groovy-core,aaronzirbes/incubator-groovy,antoaravinth/incubator-groovy,gillius/incubator-groovy,traneHead/groovy-core,bsideup/groovy-core,antoaravinth/incubator-groovy,dpolivaev/groovy,rabbitcount/incubator-groovy,ebourg/groovy-core,russel/incubator-groovy,fpavageau/groovy,genqiang/incubator-groovy,pledbrook/incubator-groovy,rabbitcount/incubator-groovy,mariogarcia/groovy-core,russel/groovy,graemerocher/incubator-groovy,yukangguo/incubator-groovy,rlovtangen/groovy-core,adjohnson916/incubator-groovy,dpolivaev/groovy,aaronzirbes/incubator-groovy,alien11689/incubator-groovy,nobeans/incubator-groovy,yukangguo/incubator-groovy,shils/incubator-groovy,kidaa/incubator-groovy,traneHead/groovy-core,samanalysis/incubator-groovy,kenzanmedia/incubator-groovy,jwagenleitner/groovy,apache/incubator-groovy,sagarsane/groovy-core,EPadronU/incubator-groovy,aim-for-better/incubator-groovy,armsargis/groovy,pickypg/incubator-groovy,fpavageau/groovy,armsargis/groovy,alien11689/incubator-groovy,dpolivaev/groovy,upadhyayap/incubator-groovy,apache/groovy,kenzanmedia/incubator-groovy,ebourg/groovy-core,groovy/groovy-core,gillius/incubator-groovy,ebourg/incubator-groovy,antoaravinth/incubator-groovy,rlovtangen/groovy-core,taoguan/incubator-groovy,shils/incubator-groovy,aaronzirbes/incubator-groovy,nkhuyu/incubator-groovy,samanalysis/incubator-groovy,antoaravinth/incubator-groovy,jwagenleitner/incubator-groovy,eginez/incubator-groovy,apache/incubator-groovy,groovy/groovy-core,apache/groovy,jwagenleitner/incubator-groovy,avafanasiev/groovy,taoguan/incubator-groovy,russel/incubator-groovy,ChanJLee/incubator-groovy,EPadronU/incubator-groovy,traneHead/groovy-core,tkruse/incubator-groovy,adjohnson916/groovy-core,groovy/groovy-core,apache/groovy,jwagenleitner/incubator-groovy,bsideup/incubator-groovy,guangying945/incubator-groovy,eginez/incubator-groovy,mariogarcia/groovy-core,sagarsane/groovy-core,christoph-frick/groovy-core,adjohnson916/incubator-groovy,avafanasiev/groovy,paplorinc/incubator-groovy,adjohnson916/incubator-groovy,apache/incubator-groovy,traneHead/groovy-core,i55ac/incubator-groovy,christoph-frick/groovy-core,shils/groovy,mariogarcia/groovy-core,mariogarcia/groovy-core,ebourg/groovy-core,pledbrook/incubator-groovy,apache/incubator-groovy,rabbitcount/incubator-groovy,groovy/groovy-core,avafanasiev/groovy,ebourg/groovy-core,paulk-asert/incubator-groovy,i55ac/incubator-groovy,paulk-asert/incubator-groovy,paulk-asert/groovy,gillius/incubator-groovy,paulk-asert/incubator-groovy,samanalysis/incubator-groovy,christoph-frick/groovy-core,jwagenleitner/groovy,russel/groovy,christoph-frick/groovy-core,guangying945/incubator-groovy,nkhuyu/incubator-groovy,samanalysis/incubator-groovy,bsideup/groovy-core,ebourg/incubator-groovy,russel/groovy,kenzanmedia/incubator-groovy,aim-for-better/incubator-groovy,paplorinc/incubator-groovy,nobeans/incubator-groovy,aaronzirbes/incubator-groovy,alien11689/incubator-groovy,nkhuyu/incubator-groovy,pickypg/incubator-groovy,eginez/incubator-groovy,gillius/incubator-groovy,armsargis/groovy,shils/groovy,paulk-asert/groovy,nobeans/incubator-groovy,alien11689/incubator-groovy,i55ac/incubator-groovy,sagarsane/groovy-core,bsideup/groovy-core,graemerocher/incubator-groovy,upadhyayap/incubator-groovy,upadhyayap/incubator-groovy,rabbitcount/incubator-groovy,ebourg/incubator-groovy,paplorinc/incubator-groovy,yukangguo/incubator-groovy,sagarsane/incubator-groovy,upadhyayap/incubator-groovy,alien11689/groovy-core,mariogarcia/groovy-core,graemerocher/incubator-groovy,bsideup/groovy-core,genqiang/incubator-groovy,kidaa/incubator-groovy,alien11689/groovy-core,russel/incubator-groovy,fpavageau/groovy,nkhuyu/incubator-groovy,guangying945/incubator-groovy,tkruse/incubator-groovy,bsideup/incubator-groovy,shils/incubator-groovy,EPadronU/incubator-groovy,ChanJLee/incubator-groovy,pickypg/incubator-groovy,sagarsane/incubator-groovy,rlovtangen/groovy-core,aim-for-better/incubator-groovy,tkruse/incubator-groovy,PascalSchumacher/incubator-groovy,shils/incubator-groovy,sagarsane/incubator-groovy,alien11689/groovy-core,PascalSchumacher/incubator-groovy,taoguan/incubator-groovy,adjohnson916/incubator-groovy,russel/incubator-groovy,alien11689/groovy-core,avafanasiev/groovy,kidaa/incubator-groovy,kidaa/incubator-groovy,shils/groovy,sagarsane/groovy-core,fpavageau/groovy,pledbrook/incubator-groovy,tkruse/incubator-groovy,jwagenleitner/incubator-groovy,armsargis/groovy | /*
* Copyright 2003-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.runtime.callsite;
import java.util.Map;
import java.util.Map.Entry;
import org.codehaus.groovy.runtime.ExceptionUtils;
import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation;
import groovy.lang.Closure;
/**
* Helper class for internal use only. This allows to call a {@link Closure} and
* convert the result to a boolean. It will do this by caching the possible "doCall"
* as well as the "asBoolean" in CallSiteArray fashion. "asBoolean" will not be
* called if the result is null or a Boolean. In case of null we return false and
* in case of a Boolean we simply unbox. This logic is designed after the one present
* in {@link DefaultTypeTransformation#castToBoolean(Object)}. The purpose of
* this class is to avoid the slow "asBoolean" call in that method.
* @author <a href="mailto:[email protected]">Jochen "blackdrag" Theodorou</a>
*
*/
public class BooleanClosureWrapper {
private final CallSiteArray csa = new CallSiteArray(BooleanClosureWrapper.class, new String[]{"call", "asBoolean"});
private final Closure wrapped;
private final int numberOfArguments;
public BooleanClosureWrapper(Closure wrapped) {
this.wrapped = wrapped;
numberOfArguments = wrapped.getMaximumNumberOfParameters();
}
/**
* normal closure call
*/
public boolean call(Object... args) {
try {
// make cached call to doCall:
Object ret = csa.array[0].call(wrapped, args);
// handle conversion to boolean
if (ret == null) return false;
if (ret instanceof Boolean) {
return ((Boolean) ret).booleanValue();
}
// it was not null and not boolean, so call asBoolean
ret = csa.array[1].call(ret, CallSiteArray.NOPARAM);
return ((Boolean) ret).booleanValue();
} catch (Throwable t) {
// ExceptionUtils is a bytecode generated helper class
// to allow throwing checked exceptions
ExceptionUtils.sneakyThrow(t);
return false;
}
}
/**
* Bridge for a call based on a map entry. If the call is done on a {@link Closure}
* taking one argument, then we give in the {@link Entry}, otherwise we will
* give in the key and value.
*/
public <K,V> boolean callForMap(Map.Entry<K, V> entry) {
if (numberOfArguments==2) {
return call(entry.getKey(), entry.getValue());
} else {
return call(entry);
}
}
}
| src/main/org/codehaus/groovy/runtime/callsite/BooleanClosureWrapper.java | /*
* Copyright 2003-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.runtime.callsite;
import java.util.Map;
import java.util.Map.Entry;
import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation;
import groovy.lang.Closure;
/**
* Helper class for internal use only. This allows to call a {@link Closure} and
* convert the result to a boolean. It will do this by caching the possible "doCall"
* as well as the "asBoolean" in CallSiteArray fashion. "asBoolean" will not be
* called if the result is null or a Boolean. In case of null we return false and
* in case of a Boolean we simply unbox. This logic is designed after the one present
* in {@link DefaultTypeTransformation#castToBoolean(Object)}. The purpose of
* this class is to avoid the slow "asBoolean" call in that method.
* @author <a href="mailto:[email protected]">Jochen "blackdrag" Theodorou</a>
*
*/
public class BooleanClosureWrapper {
private final CallSiteArray csa = new CallSiteArray(BooleanClosureWrapper.class, new String[]{"call", "asBoolean"});
private final Closure wrapped;
private final int numberOfArguments;
public BooleanClosureWrapper(Closure wrapped) {
this.wrapped = wrapped;
numberOfArguments = wrapped.getMaximumNumberOfParameters();
}
/**
* normal closure call
*/
public boolean call(Object... args) {
try {
// make cached call to doCall:
Object ret = csa.array[0].call(wrapped, args);
// handle conversion to boolean
if (ret == null) return false;
if (ret instanceof Boolean) {
return ((Boolean) ret).booleanValue();
}
// it was not null and not boolean, so call asBoolean
ret = csa.array[1].call(ret, CallSiteArray.NOPARAM);
return ((Boolean) ret).booleanValue();
} catch (Throwable t) {
sun.misc.Unsafe.getUnsafe().throwException(t);
return false;
}
}
/**
* Bridge for a call based on a map entry. If the call is done on a {@link Closure}
* taking one argument, then we give in the {@link Entry}, otherwise we will
* give in the key and value.
*/
public <K,V> boolean callForMap(Map.Entry<K, V> entry) {
if (numberOfArguments==2) {
return call(entry.getKey(), entry.getValue());
} else {
return call(entry);
}
}
}
| add sneaky exception thrower usage
| src/main/org/codehaus/groovy/runtime/callsite/BooleanClosureWrapper.java | add sneaky exception thrower usage |
|
Java | apache-2.0 | a7a9765e4c4b9ac58983aeed8cafa5df8e7f6376 | 0 | daneshk/carbon-governance,daneshk/carbon-governance,prasa7/carbon-governance,laki88/carbon-governance,prasa7/carbon-governance,laki88/carbon-governance,daneshk/carbon-governance,jranabahu/carbon-governance,cnapagoda/carbon-governance,denuwanthi/carbon-governance,cnapagoda/carbon-governance,denuwanthi/carbon-governance,jranabahu/carbon-governance,thushara35/carbon-governance,cnapagoda/carbon-governance,Rajith90/carbon-governance,jranabahu/carbon-governance,jranabahu/carbon-governance,Rajith90/carbon-governance,thushara35/carbon-governance,thushara35/carbon-governance,laki88/carbon-governance,prasa7/carbon-governance,cnapagoda/carbon-governance,denuwanthi/carbon-governance,Rajith90/carbon-governance,Rajith90/carbon-governance,wso2/carbon-governance,wso2/carbon-governance,thushara35/carbon-governance,denuwanthi/carbon-governance,wso2/carbon-governance,prasa7/carbon-governance,laki88/carbon-governance,wso2/carbon-governance,daneshk/carbon-governance | /*
* Copyright (c) 2008, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.governance.api.util;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.impl.builder.StAXOMBuilder;
import org.apache.axiom.om.util.AXIOMUtil;
import org.apache.axiom.om.xpath.AXIOMXPath;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jaxen.JaxenException;
import org.wso2.carbon.base.CarbonContextHolderBase;
import org.wso2.carbon.base.UnloadTenantTask;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.governance.api.cache.*;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifactImpl;
import org.wso2.carbon.governance.api.common.util.ApproveItemBean;
import org.wso2.carbon.governance.api.common.util.CheckListItemBean;
import org.wso2.carbon.governance.api.endpoints.dataobjects.Endpoint;
import org.wso2.carbon.governance.api.endpoints.dataobjects.EndpointImpl;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifactImpl;
import org.wso2.carbon.governance.api.policies.dataobjects.Policy;
import org.wso2.carbon.governance.api.policies.dataobjects.PolicyImpl;
import org.wso2.carbon.governance.api.schema.dataobjects.Schema;
import org.wso2.carbon.governance.api.schema.dataobjects.SchemaImpl;
import org.wso2.carbon.governance.api.wsdls.dataobjects.Wsdl;
import org.wso2.carbon.governance.api.wsdls.dataobjects.WsdlImpl;
import org.wso2.carbon.registry.common.AttributeSearchService;
import org.wso2.carbon.registry.common.ResourceData;
import org.wso2.carbon.registry.common.TermData;
import org.wso2.carbon.registry.core.Association;
import org.wso2.carbon.registry.core.Registry;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.config.RegistryContext;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext;
import org.wso2.carbon.registry.core.pagination.PaginationContext;
import org.wso2.carbon.registry.core.secure.AuthorizationFailedException;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.registry.core.utils.MediaTypesUtils;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import org.wso2.carbon.registry.core.utils.UUIDGenerator;
import org.wso2.carbon.registry.extensions.utils.CommonUtil;
import org.wso2.carbon.registry.indexing.IndexingConstants;
import org.wso2.carbon.registry.indexing.service.TermsSearchService;
import org.wso2.carbon.utils.component.xml.config.ManagementPermission;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utilities used by various Governance API related functionality.
*/
public class GovernanceUtils {
private static final Log log = LogFactory.getLog(GovernanceUtils.class);
private static final String OVERVIEW = "overview";
private static final String UNDERSCORE = "_";
private static RegistryService registryService;
//private static final String SEPARATOR = ":";
private final static Map<Integer, List<GovernanceArtifactConfiguration>>
artifactConfigurations = new HashMap<Integer, List<GovernanceArtifactConfiguration>>();
private static Map<Integer, Map<String, Boolean>> lifecycleAspects =
new HashMap<Integer, Map<String, Boolean>>();
private static final Object ASPECT_MAP_LOCK = new Object();
private static AttributeSearchService attributeSearchService;
private static final ThreadLocal<Registry> tenantGovernanceSystemRegistry = new ThreadLocal<Registry>();
private final static RXTConfigCacheEntryCreatedListener<String, Boolean> entryCreatedListener = new RXTConfigCacheEntryCreatedListener<String, Boolean>();
private final static RXTConfigCacheEntryRemovedListener<String, Boolean> entryRemovedListener = new RXTConfigCacheEntryRemovedListener<String, Boolean>();
private final static RXTConfigCacheEntryUpdatedListener<String, Boolean> entryUpdatedListener = new RXTConfigCacheEntryUpdatedListener<String, Boolean>();
private static boolean rxtCacheInitiated = false;
private static TermsSearchService termsSearchService;
/**
* Setting the registry service.
*
* @param registryService the registryService.
*/
public static void setRegistryService(RegistryService registryService) {
GovernanceUtils.registryService = registryService;
}
// Method to register a list of governance artifact configurations.
private static void registerArtifactConfigurations(int tenantId,
List<GovernanceArtifactConfiguration> configurations) {
artifactConfigurations.put(tenantId, configurations);
CarbonContextHolderBase.registerUnloadTenantTask(new UnloadTenantTask() {
public void register(int tenantId, Object registration) {
// Nothing to register in here.
}
public void cleanup(int tenantId) {
if (artifactConfigurations.get(tenantId) != null) {
artifactConfigurations.remove(tenantId);
}
if (lifecycleAspects.get(tenantId) != null) {
lifecycleAspects.remove(tenantId);
}
}
});
}
/**
* This method is used to add artifact configurations into the artifact configuration map given the registry path
*
* @param registry registry instance
* @param tenantId tenant id
* @param path path of the resource
* @throws RegistryException
*/
public static void registerArtifactConfigurationByPath(Registry registry, int tenantId, String path) throws RegistryException {
GovernanceArtifactConfiguration governanceArtifactConfiguration;
Resource resource = registry.get(path);
Object content = resource.getContent();
String elementString;
if (content instanceof String) {
elementString = (String) content;
} else {
elementString = RegistryUtils.decodeBytes((byte[]) content);
}
governanceArtifactConfiguration = getGovernanceArtifactConfiguration(elementString);
List<GovernanceArtifactConfiguration> configurations = artifactConfigurations.get(tenantId);
if (configurations != null) {
configurations.add(governanceArtifactConfiguration);
} else {
configurations = new LinkedList<GovernanceArtifactConfiguration>();
configurations.add(governanceArtifactConfiguration);
}
artifactConfigurations.put(tenantId, configurations);
}
/**
* Query to search for governance artifacts.
*
* @param mediaType the media type of the artifacts to be searched for.
* @param registry the registry instance to run query on.
* @return the list of artifact paths.
* @throws RegistryException if the operation failed.
*/
public static String[] findGovernanceArtifacts(String mediaType, Registry registry)
throws RegistryException {
String[] paths = MediaTypesUtils.getResultPaths(registry, mediaType);
if (paths == null) {
paths = new String[0];
}
Arrays.sort(paths, new Comparator<String>() {
public int compare(String o1, String o2) {
int result = RegistryUtils.getResourceName(o1)
.compareToIgnoreCase(RegistryUtils.getResourceName(o2));
if (result == 0) {
return o1.compareToIgnoreCase(o2);
}
return result;
}
});
return paths;
}
/**
* Query to search for a governance artifact configuration.
*
* @param mediaType the media type of the artifact configuration.
* @param registry the registry instance to run query on.
* @return the artifact configuration.
* @throws RegistryException if the operation failed.
*/
public static GovernanceArtifactConfiguration findGovernanceArtifactConfigurationByMediaType(
String mediaType, Registry registry)
throws RegistryException {
List<GovernanceArtifactConfiguration> governanceArtifactConfigurations = artifactConfigurations.get(((UserRegistry) registry).getTenantId());
if (governanceArtifactConfigurations == null) {
governanceArtifactConfigurations = findGovernanceArtifactConfigurations(registry);
}
for (GovernanceArtifactConfiguration configuration : governanceArtifactConfigurations) {
if (mediaType.equals(configuration.getMediaType())) {
return configuration;
}
}
return null;
}
/**
* Query to search for a governance artifact configuration.
*
* @param key the key of the artifact configuration.
* @param registry the registry instance to run query on.
* @return the artifact configuration.
* @throws RegistryException if the operation failed.
*/
public static GovernanceArtifactConfiguration findGovernanceArtifactConfiguration(
String key, Registry registry)
throws RegistryException {
List<GovernanceArtifactConfiguration> governanceArtifactConfigurations = artifactConfigurations.get(((UserRegistry) registry).getTenantId());
if (governanceArtifactConfigurations == null) {
governanceArtifactConfigurations = findGovernanceArtifactConfigurations(registry);
}
for (GovernanceArtifactConfiguration configuration : governanceArtifactConfigurations) {
if (key.equals(configuration.getKey())) {
return configuration;
}
}
return null;
}
/**
*
* @param key short name of the artifact type.
* @param contextName context name of which lifecycle is needed
* @param registry registry instance
* @return lifecycle of the name associated with the context, null otherwise
* @throws RegistryException if the operation failed
*/
public static String getLifeCycleOfContext(String key, String contextName, Registry registry) throws RegistryException {
GovernanceArtifactConfiguration configuration = findGovernanceArtifactConfiguration(key, registry);
if(configuration != null) {
return configuration.getLifeCycleOfContext(contextName);
}
return null ;
}
/**
* Method to obtain a list of paths having resources of the given media type.
*
* @param registry the registry instance to run query on.
* @param mediaType the media type.
* @return an array of resource paths.
* @throws GovernanceException if the operation failed.
*/
public static String[] getResultPaths(Registry registry, String mediaType) throws GovernanceException {
try {
return MediaTypesUtils.getResultPaths(registry, mediaType);
} catch (RegistryException e) {
String msg = "Error in getting the result for media type: " + mediaType + ".";
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
// remove symbolic links in search items.
@SuppressWarnings("unused")
private static String[] removeSymbolicLinks(String[] paths, Registry governanceRegistry) {
if (paths == null) {
return new String[0];
}
List<String> fixedPaths = new LinkedList<String>();
for (String path : paths) {
try {
if ((governanceRegistry.get(path).getProperty(RegistryConstants.REGISTRY_LINK) ==
null || governanceRegistry.get(path).getProperty(
RegistryConstants.REGISTRY_REAL_PATH) != null) &&
!path.contains(RegistryConstants.SYSTEM_MOUNT_PATH)) {
fixedPaths.add(path);
}
} catch (RegistryException ignored) {
}
}
return fixedPaths.toArray(new String[fixedPaths.size()]);
}
/**
* Method to load the Governance Artifacts to be used by the API operations.
*
* @param registry the registry instance used to search for artifacts.
* @param configurations the artifact configurations to load.
* @throws RegistryException if the operation failed.
*/
public static void loadGovernanceArtifacts(UserRegistry registry,
List<GovernanceArtifactConfiguration> configurations)
throws RegistryException {
registerArtifactConfigurations(registry.getTenantId(), configurations);
}
/**
* Method to load the Governance Artifacts to be used by the API operations.
*
* @param registry the registry instance used to search for artifacts.
* @throws RegistryException if the operation failed.
*/
public static void loadGovernanceArtifacts(UserRegistry registry) throws RegistryException {
if (!artifactConfigurations.containsKey(registry.getTenantId())) {
loadGovernanceArtifacts(registry, Collections.unmodifiableList(findGovernanceArtifactConfigurations(registry)));
}
}
public static GovernanceArtifactConfiguration getGovernanceArtifactConfiguration(String elementString) {
GovernanceArtifactConfiguration configuration = null;
try {
OMElement configElement = AXIOMUtil.stringToOM(elementString);
if (configElement != null) {
configuration = new GovernanceArtifactConfiguration();
OMElement artifactNameAttributeElement = configElement.getFirstChildWithName(
new QName("nameAttribute"));
if (artifactNameAttributeElement != null) {
configuration.setArtifactNameAttribute(
artifactNameAttributeElement.getText());
}
OMElement artifactNamespaceAttributeElement =
configElement.getFirstChildWithName(
new QName("namespaceAttribute"));
if (artifactNamespaceAttributeElement != null) {
configuration.setArtifactNamespaceAttribute(
artifactNamespaceAttributeElement.getText());
} else if (Boolean.toString(false).equals(
configElement.getAttributeValue(new QName("hasNamespace")))) {
configuration.setArtifactNamespaceAttribute(null);
} else {
configuration.setHasNamespace(true);
}
OMElement artifactElementRootElement = configElement.getFirstChildWithName(
new QName("elementRoot"));
if (artifactElementRootElement != null) {
configuration.setArtifactElementRoot(
artifactElementRootElement.getText());
}
OMElement artifactElementNamespaceElement = configElement.getFirstChildWithName(
new QName("elementNamespace"));
if (artifactElementNamespaceElement != null) {
configuration.setArtifactElementNamespace(
artifactElementNamespaceElement.getText());
}
configuration.setKey(configElement.getAttributeValue(new QName("shortName")));
configuration.setSingularLabel(
configElement.getAttributeValue(new QName("singularLabel")));
configuration.setPluralLabel(
configElement.getAttributeValue(new QName("pluralLabel")));
configuration.setMediaType(
configElement.getAttributeValue(new QName("type")));
configuration.setExtension(
configElement.getAttributeValue(new QName("fileExtension")));
String iconSetString = configElement.getAttributeValue(new QName("iconSet"));
if (iconSetString != null) {
configuration.setIconSet(Integer.parseInt(iconSetString));
}
OMElement pathExpressionElement = configElement.getFirstChildWithName(
new QName("storagePath"));
if (pathExpressionElement != null) {
configuration.setPathExpression(pathExpressionElement.getText());
} else {
configuration.setPathExpression("/@{name}");
}
OMElement lifecycleElement = configElement.getFirstChildWithName(
new QName("lifecycle"));
if (lifecycleElement != null) {
configuration.setLifecycle(lifecycleElement.getText());
}
OMElement groupingAttributeElement = configElement.getFirstChildWithName(
new QName("groupingAttribute"));
if (groupingAttributeElement != null) {
configuration.setGroupingAttribute(groupingAttributeElement.getText());
}
OMElement lifecycleContextsElement = configElement.getFirstChildWithName(new QName("lifecycleContexts"));
if(lifecycleContextsElement != null) {
Iterator lifecycleContextsIterator = lifecycleContextsElement.getChildrenWithName(new QName("lifecycleContext"));
while(lifecycleContextsIterator.hasNext()) {
OMElement lifecycleContextElement = (OMElement) lifecycleContextsIterator.next();
String lcName = lifecycleContextElement.getAttributeValue(new QName("lcName"));
String contextsStr = lifecycleContextElement.getText();
String[] contexts = contextsStr.split(",");
for(String context : contexts) {
configuration.addLifeCycleToContext(context, lcName);
}
}
}
OMElement contentDefinition = configElement.getFirstChildWithName(
new QName("content"));
if (contentDefinition != null) {
String href = contentDefinition.getAttributeValue(new QName("href"));
if (href != null) {
configuration.setContentURL(href);
}
configuration.setContentDefinition(contentDefinition);
}
OMElement associationDefinitions = configElement.getFirstChildWithName(
new QName("relationships"));
if (associationDefinitions != null) {
List<Association> associations =
new LinkedList<Association>();
Iterator associationElements =
associationDefinitions.getChildrenWithName(
new QName("association"));
while (associationElements.hasNext()) {
OMElement associationElement = (OMElement) associationElements.next();
String type = associationElement.getAttributeValue(new QName("type"));
String source =
associationElement.getAttributeValue(new QName("source"));
String target =
associationElement.getAttributeValue(new QName("target"));
associations.add(new Association(source, target, type));
}
associationElements =
associationDefinitions.getChildrenWithName(
new QName("dependency"));
while (associationElements.hasNext()) {
OMElement associationElement = (OMElement) associationElements.next();
String source =
associationElement.getAttributeValue(new QName("source"));
String target =
associationElement.getAttributeValue(new QName("target"));
associations.add(new Association(source, target, "depends"));
}
configuration.setRelationshipDefinitions(associations.toArray(
new Association[associations.size()]));
}
OMElement uiConfigurations = configElement.getFirstChildWithName(
new QName("ui"));
if (uiConfigurations != null) {
configuration.setUIConfigurations(uiConfigurations);
OMElement uiListConfigurations = uiConfigurations.getFirstChildWithName(
new QName("list"));
if (uiListConfigurations != null) {
configuration.setUIListConfigurations(uiListConfigurations);
}
}
OMElement uiPermissions = configElement.getFirstChildWithName(
new QName("permissions"));
if (uiPermissions != null) {
Iterator permissionElements =
uiPermissions.getChildrenWithName(
new QName("permission"));
List<ManagementPermission> managementPermissions =
new LinkedList<ManagementPermission>();
while (permissionElements.hasNext()) {
OMElement permissionElement = (OMElement) permissionElements.next();
OMElement nameElement =
permissionElement.getFirstChildWithName(
new QName("name"));
String name = (nameElement != null) ? nameElement.getText() : null;
OMElement idElement =
permissionElement.getFirstChildWithName(
new QName("id"));
String id = (idElement != null) ? idElement.getText() : null;
if (name != null && id != null) {
managementPermissions.add(new ManagementPermission(name, id));
}
}
configuration.setUIPermissions(managementPermissions.toArray(
new ManagementPermission[managementPermissions.size()]));
} else {
// if no permission definitions were present, define the default ones.
List<ManagementPermission> managementPermissions =
new LinkedList<ManagementPermission>();
String idPrefix = "/permission/admin/manage/resources/govern/" +
configuration.getKey();
managementPermissions.add(
new ManagementPermission(configuration.getPluralLabel(), idPrefix));
managementPermissions.add(
new ManagementPermission("Add", idPrefix + "/add"));
managementPermissions.add(
new ManagementPermission("List", idPrefix + "/list"));
managementPermissions.add(
new ManagementPermission(configuration.getPluralLabel(),
"/permission/admin/configure/governance/" +
configuration.getKey() + "-ui"
)
);
configuration.setUIPermissions(managementPermissions.toArray(
new ManagementPermission[managementPermissions.size()]));
}
}
} catch (XMLStreamException ignored) {
} catch (NumberFormatException ignored) {
}
return configuration;
}
/**
* Method to locate Governance Artifact configurations.
*
* @param registry the registry instance to run query on.
* @return an array of resource paths.
* @throws GovernanceException if the operation failed.
*/
public static List<GovernanceArtifactConfiguration> findGovernanceArtifactConfigurations(
Registry registry) throws RegistryException {
String[] artifactConfigurations = findGovernanceArtifacts(
GovernanceConstants.GOVERNANCE_ARTIFACT_CONFIGURATION_MEDIA_TYPE, registry);
List<GovernanceArtifactConfiguration> configurations =
new LinkedList<GovernanceArtifactConfiguration>();
for (String artifactConfiguration : artifactConfigurations) {
Resource resource = registry.get(artifactConfiguration);
Object content = resource.getContent();
String elementString;
if (content instanceof String) {
elementString = (String) content;
} else {
elementString = RegistryUtils.decodeBytes((byte[]) content);
}
configurations.add(getGovernanceArtifactConfiguration(elementString));
}
return configurations;
}
/**
* Method to return the GovernanceArtifactConfiguration for a given media type
*
* @param registry the registry instance to run query on.
* @param mediaType mediatype of the needed artifact configuration
* @return GovernanceArtifactConfiguration
* @throws RegistryException exception thorown if something goes wrong
*/
public static GovernanceArtifactConfiguration getArtifactConfigurationByMediaType(Registry registry, String mediaType) throws RegistryException {
List<GovernanceArtifactConfiguration> configurations = findGovernanceArtifactConfigurations(registry);
for(GovernanceArtifactConfiguration configuration : configurations) {
if(configuration.getMediaType().equals(mediaType)) {
return configuration;
}
}
return null;
}
@SuppressWarnings("unused")
public static void setTenantGovernanceSystemRegistry(final int tenantId) throws RegistryException {
if (registryService != null) {
tenantGovernanceSystemRegistry.set(
registryService.getGovernanceSystemRegistry(tenantId));
}
}
@SuppressWarnings("unused")
public static void unsetTenantGovernanceSystemRegistry() throws RegistryException {
tenantGovernanceSystemRegistry.remove();
}
/**
* Returns the system governance registry.
*
* @param registry the user registry.
* @return the system registry.
* @throws RegistryException throws if an error occurs
*/
public static Registry getGovernanceSystemRegistry(Registry registry) throws RegistryException {
if (tenantGovernanceSystemRegistry.get() != null) {
return tenantGovernanceSystemRegistry.get();
}
if (registryService == null) {
return null;
}
UserRegistry userRegistry;
if (!(registry instanceof UserRegistry)) {
return null;
}
userRegistry = (UserRegistry) registry;
return registryService.getGovernanceSystemRegistry(userRegistry.getTenantId());
}
/**
* Obtains the governance user registry from the given root registry instance. This is useful
* when creating a governance user registry out of a remote client registry instance.
*
* @param registry the remote client registry instance.
* @param username the name of the user to connect as.
* @return the system registry.
* @throws RegistryException throws if an error occurs
*/
@SuppressWarnings("unused")
public static Registry getGovernanceUserRegistry(Registry registry, String username)
throws RegistryException {
if (RegistryContext.getBaseInstance() == null) {
RegistryContext.getBaseInstance(null, false);
}
return new UserRegistry(username, MultitenantConstants.SUPER_TENANT_ID, registry, null,
RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
}
/**
* Obtains the governance user registry from the given root registry instance. This is useful
* when creating a tenant aware governance user registry out of a remote client registry instance.
*
* @param registry registry the remote client registry instance.
* @param username username the name of the user to connect as.
* @param tenantId tenant id
* @return the system registry
* @throws RegistryException throws if an error occurs
*/
public static Registry getGovernanceUserRegistry(Registry registry, String username, int tenantId)
throws RegistryException {
if (RegistryContext.getBaseInstance() == null) {
RegistryContext.getBaseInstance(null, false);
}
return new UserRegistry(username, tenantId, registry, null,
RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
}
public static String parameterizeString(RequestContext requestContext, String parameterString) {
String parameterizedString = parameterString;
Pattern pattern = Pattern.compile("\\{@(\\w)*\\}");
Matcher matcher = pattern.matcher(parameterString);
GovernanceArtifact governanceArtifact;
Registry registry = requestContext.getRegistry();
String resourcePath = requestContext.getResourcePath().getPath();
Set<String> matchSet = new HashSet<String>();
while (matcher.find()) {
matchSet.add(matcher.group());
}
for (String current : matchSet) {
String name = current.substring(2, current.length() - 1);
//To replace special values such as {@resourcePath}
if (name.equals("resourcePath")) {
parameterizedString = parameterizedString.replaceAll("\\" + current.replace("}", "\\}"), resourcePath);
}
try {
governanceArtifact = GovernanceUtils.retrieveGovernanceArtifactByPath(requestContext.getSystemRegistry(), resourcePath);
if (governanceArtifact != null && governanceArtifact.getAttribute(name) != null) {
parameterizedString = parameterizedString.replaceAll("\\" + current.replace("}", "\\}"), governanceArtifact.getAttribute(name));
} else if (registry.get(resourcePath).getProperty(name) != null) {
parameterizedString = parameterizedString.replaceAll("\\" + current.replace("}", "\\}"), registry.get(resourcePath).getProperty(name));
} else {
log.error("Unable to locate the given value in properties or attributes");
}
} catch (RegistryException e) {
log.error(e.getMessage(), e);
}
}
return parameterizedString;
}
/**
* Method to remove a governance artifact from the registry.
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @throws GovernanceException if the operation failed.
*/
public static void removeArtifact(Registry registry, String artifactId)
throws GovernanceException {
try {
String path = getArtifactPath(registry, artifactId);
if (registry.resourceExists(path)) {
registry.delete(path);
}
ArtifactCache artifactCache =
ArtifactCacheManager.getCacheManager().getTenantArtifactCache(((UserRegistry) registry).getTenantId());
if (artifactCache != null && path != null) {
artifactCache.invalidateArtifact(path);
}
} catch (RegistryException e) {
String msg = "Error in deleting the the artifact id:" + artifactId + ".";
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
/**
* Method to obtain the artifact path of a governance artifact on the registry.
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @return the artifact path.
* @throws GovernanceException if the operation failed.
*/
public static String getArtifactPath(Registry registry, String artifactId)
throws GovernanceException {
Cache<String, String> cache;
UserRegistry userRegistry = (UserRegistry) registry;
//This is temp fix to identify remote calls. Will move cache initialization logic into registry core
// with next major carbon(ex:4.5.0) release.
if (userRegistry.getUserRealm() == null) {
return getDirectArtifactPath(registry, artifactId);
}
try{
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(userRegistry.getTenantId());
String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain(true);
if (tenantDomain == null) {
tenantDomain = MultitenantUtils.getTenantDomain(((UserRegistry) registry).getUserName());
}
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain);
cache = RegistryUtils.getUUIDCache(RegistryConstants.UUID_CACHE_ID);
if(cache.containsKey(artifactId)){
return cache.get(artifactId);
}
try {
String sql = "SELECT REG_PATH_ID, REG_NAME FROM REG_RESOURCE WHERE REG_UUID = ?";
String[] result;
Map<String, String> parameter = new HashMap<String, String>();
parameter.put("1", artifactId);
parameter.put("query", sql);
result = registry.executeQuery(null, parameter).getChildren();
if (result != null && result.length == 1) {
cache.put(artifactId, result[0]);
return result[0];
}
return null;
} catch (RegistryException e) {
String msg =
"Error in getting the path from the registry. Execute query failed with message : " +
e.getMessage();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
} finally {
PrivilegedCarbonContext.endTenantFlow();
}
}
/**
* Method to obtain the artifact path of a governance artifact on the registry.
* without going through the UUID cache
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @return the artifact path.
* @throws GovernanceException if the operation failed.
* TODO: This method is added since UUID cache cannot be properly implemented without proper
* TODO: changes in the registry core. getArtifactPath needs to be moved into the registry core
* TODO: and UUID caching should be handled by the cacheBackedRegistry and cachingHandler
*/
public static String getDirectArtifactPath(Registry registry, String artifactId)
throws GovernanceException {
try {
String sql = "SELECT REG_PATH_ID, REG_NAME FROM REG_RESOURCE WHERE REG_UUID = ?";
String[] result;
Map<String, String> parameter = new HashMap<String, String>();
parameter.put("1", artifactId);
parameter.put("query", sql);
result = registry.executeQuery(null, parameter).getChildren();
if (result != null && result.length == 1) {
return result[0];
}
return null;
} catch (RegistryException e) {
String msg = "Error in getting the path from the registry. Execute query failed with message : "
+ e.getMessage();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
/**
* Retrieve all the governance artifact paths which associated with the given lifecycle
*
* @param registry registry instance
* @param lcName lifecycle name
* @param mediaType mediatype of the artifacts
* @return String array of all the artifact paths
* @throws GovernanceException if the operation failed.
*/
public static String[] getAllArtifactPathsByLifecycle(Registry registry, String lcName, String mediaType) throws GovernanceException {
String sql = "SELECT R.REG_PATH_ID, R.REG_NAME FROM REG_RESOURCE R, REG_PROPERTY PP, " +
"REG_RESOURCE_PROPERTY RP WHERE R.REG_VERSION=RP.REG_VERSION AND RP.REG_PROPERTY_ID=PP.REG_ID " +
"AND PP.REG_NAME = ? AND PP.REG_VALUE = ? AND R.REG_MEDIA_TYPE = ?";
Map<String, String> parameter = new HashMap<String, String>();
parameter.put("1", "registry.LC.name");
parameter.put("2", lcName);
parameter.put("3", mediaType);
parameter.put("query", sql);
try {
return (String[]) registry.executeQuery(null, parameter).getContent();
} catch (RegistryException e) {
String msg = "Error occured while executing custom query";
throw new GovernanceException(msg, e);
}
}
/**
* Retrieve all the governance artifact paths which associated with the given lifecycle in the given lifecycle state
*
* @param registry registry instance
* @param lcName lifecycle name
* @param lcState lifecycle state
* @param mediaType mediatype of the artifacts
* @return String array of all the artifact paths
* @throws GovernanceException if the operation failed.
*/
public static String[] getAllArtifactPathsByLifecycleState(
Registry registry, String lcName, String lcState, String mediaType) throws GovernanceException {
String sql = "SELECT R.REG_PATH_ID, R.REG_NAME FROM REG_RESOURCE R, REG_PROPERTY PP, " +
"REG_RESOURCE_PROPERTY RP WHERE R.REG_VERSION=RP.REG_VERSION AND RP.REG_PROPERTY_ID=PP.REG_ID " +
"AND PP.REG_NAME = ? AND PP.REG_VALUE = ? AND R.REG_MEDIA_TYPE = ?";
Map<String, String> parameter = new HashMap<String, String>();
parameter.put("1", "registry.lifecycle." + lcName + ".state");
parameter.put("2", lcState);
parameter.put("3", mediaType);
parameter.put("query", sql);
try {
return (String[]) registry.executeQuery(null, parameter).getContent();
} catch (RegistryException e) {
String msg = "Error occured while executing custom query";
throw new GovernanceException(msg, e);
}
}
/**
* Method to obtain the value of a governance attribute.
*
* @param element the payload element.
* @param name the attribute name.
* @param namespace the namespace of the payload element.
* @return the value of the attribute by the given name if it exists or an empty string.
*/
public static String getAttributeValue(OMElement element, String name, String namespace) {
String[] parts = name.split("_");
OMElement attributeElement = element;
for (String part : parts) {
attributeElement = attributeElement.getFirstChildWithName(new QName(namespace, part));
if (attributeElement == null) {
return "";
}
}
return attributeElement.getText();
}
/**
* @param registry the registry instance.
* @return list of governance artifact identifiers.
* @throws GovernanceException if the operation failed.
* @deprecated Method to obtain all indexed governance artifact identifiers on the provided registry
* instance.
*/
public static String[] getAllArtifactIds(Registry registry)
throws GovernanceException {
throw new UnsupportedOperationException();
}
/**
* @param registry the registry instance.
* @return list of governance artifacts
* @throws GovernanceException if the operation failed.
* @deprecated Method to obtain all indexed governance artifacts on the provided registry instance.
*/
@SuppressWarnings("unused")
public static GovernanceArtifact[] getAllArtifacts(Registry registry)
throws GovernanceException {
throw new UnsupportedOperationException();
}
/**
* Method to obtain a governance artifact on the registry.
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @return the artifact.
* @throws GovernanceException if the operation failed.
*/
public static GovernanceArtifact retrieveGovernanceArtifactById(Registry registry,
String artifactId)
throws GovernanceException {
String artifactPath = getArtifactPath(registry, artifactId);
if (artifactPath == null) {
String msg = "Governance artifact is not found for id: " + artifactId + ".";
if (log.isDebugEnabled()) {
log.debug(msg);
}
return null;
}
return retrieveGovernanceArtifactByPath(registry, artifactPath);
}
/**
* Method to obtain a governance artifact on the registry by the artifact path.
*
* @param registry the registry instance.
* @param artifactPath the path of the artifact.
* @return the artifact.
* @throws GovernanceException if the operation failed.
*/
public static GovernanceArtifact retrieveGovernanceArtifactByPath(Registry registry,
String artifactPath)
throws GovernanceException {
UserRegistry userRegistry = (UserRegistry) registry;
String currentUser = userRegistry.getUserName();
ArtifactCache artifactCache =
ArtifactCacheManager.getCacheManager().getTenantArtifactCache(userRegistry.getTenantId());
if (artifactPath != null && artifactCache != null) {
GovernanceArtifact governanceArtifact = artifactCache.getArtifact(artifactPath);
if (governanceArtifact != null) {
return governanceArtifact;
}
}
String artifactLC;
String artifactLCState = null;
try {
Resource artifactResource;
if (registry.resourceExists(artifactPath)) {
try {
artifactResource = registry.get(artifactPath);
} catch (AuthorizationFailedException e) {
// if the the user does not have access to the specified path, we are returning null.
if (log.isDebugEnabled()) {
String msg = "User does not have access to path " + artifactPath + ".";
log.debug(msg);
}
return null;
}
artifactLC = artifactResource.getProperty("registry.LC.name");
if (artifactLC != null) {
artifactLCState = artifactResource.getProperty("registry.lifecycle." + artifactLC + ".state");
}
} else {
// if the artifact path doesn't exist we are returning null.
if (log.isDebugEnabled()) {
String msg = "The artifact path doesn't exists at " + artifactPath + ".";
log.debug(msg);
}
return null;
}
String artifactId =
artifactResource.getUUID();
String mediaType = artifactResource.getMediaType();
List<String> uniqueAttributes = getUniqueAttributesNames(registry, mediaType);
if (GovernanceConstants.WSDL_MEDIA_TYPE
.equals(mediaType)) {
Wsdl wsdl = new WsdlImpl(artifactId, registry);
((WsdlImpl) wsdl).setLcName(artifactLC);
((WsdlImpl) wsdl).setLcState(artifactLCState);
((WsdlImpl) wsdl).setArtifactPath(artifactPath);
((WsdlImpl) wsdl).setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, wsdl);
}
return wsdl;
} else if (GovernanceConstants.SCHEMA_MEDIA_TYPE
.equals(mediaType)) {
Schema schema = new SchemaImpl(artifactId, registry);
((SchemaImpl) schema).setLcName(artifactLC);
((SchemaImpl) schema).setLcState(artifactLCState);
((SchemaImpl) schema).setArtifactPath(artifactPath);
((SchemaImpl) schema).setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, schema);
}
return schema;
} else if (GovernanceConstants.POLICY_XML_MEDIA_TYPE
.equals(mediaType)) {
Policy policy = new PolicyImpl(artifactId, registry);
((PolicyImpl) policy).setLcName(artifactLC);
((PolicyImpl) policy).setLcState(artifactLCState);
((PolicyImpl) policy).setArtifactPath(artifactPath);
((PolicyImpl) policy).setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, policy);
}
return policy;
} else if (GovernanceConstants.ENDPOINT_MEDIA_TYPE
.equals(mediaType)) {
Endpoint endpoint = new EndpointImpl(artifactId, registry);
((EndpointImpl) endpoint).setLcName(artifactLC);
((EndpointImpl) endpoint).setLcState(artifactLCState);
((EndpointImpl) endpoint).setArtifactPath(artifactPath);
((EndpointImpl) endpoint).setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, endpoint);
}
return endpoint;
} else if (mediaType != null && mediaType.matches("application/[a-zA-Z0-9.+-]+")) {
if (registry instanceof UserRegistry) {
List<GovernanceArtifactConfiguration> configurations =
artifactConfigurations.get(((UserRegistry) registry).getTenantId());
if (configurations != null) {
for (GovernanceArtifactConfiguration configuration :
configurations) {
if (mediaType.equals(configuration.getMediaType())) {
GenericArtifactImpl artifact;
if (mediaType.matches("application/vnd\\.[a-zA-Z0-9.-]+\\+xml")) {
byte[] contentBytes = (byte[]) artifactResource.getContent();
if (contentBytes == null || contentBytes.length == 0) {
throw new GovernanceException(
"Unable to read payload of governance artifact " +
"at path: " + artifactPath
);
}
OMElement contentElement = buildOMElement(contentBytes);
artifact = new GenericArtifactImpl(
artifactId, contentElement,
configuration.getArtifactNameAttribute(),
configuration.getArtifactNamespaceAttribute(),
configuration.getArtifactElementNamespace(),
configuration.getMediaType());
artifact.associateRegistry(registry);
artifact.setArtifactPath(artifactPath);
} else {
artifact = new GenericArtifactImpl(artifactId, registry);
}
artifact.setLcState(artifactLCState);
artifact.setLcName(artifactLC);
artifact.setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, artifact);
}
return artifact;
}
}
}
}
}
/*else if (GovernanceConstants.PEOPLE_MEDIA_TYPE.
equals(artifactResource.getMediaType())) {
// it is a peopleArtifact
byte[] contentBytes = (byte[]) artifactResource.getContent();
OMElement contentElement = null;
if (contentBytes != null) {
contentElement = buildOMElement(contentBytes);
}
String peopleGroup = CommonUtil.getPeopleGroup(contentElement);
PeopleArtifact peopleArtifact = null;
switch (PeopleGroup.valueOf(peopleGroup.toUpperCase())) {
case ORGANIZATION:
peopleArtifact = new Organization(artifactId, contentElement);
break;
case DEPARTMENT:
peopleArtifact = new Department(artifactId, contentElement);
break;
case PROJECT_GROUP:
peopleArtifact = new ProjectGroup(artifactId, contentElement);
break;
case PERSON:
peopleArtifact = new Person(artifactId, contentElement);
break;
default:
assert false;
}
peopleArtifact.associateRegistry(registry);
return peopleArtifact;
}*/
} catch (RegistryException e) {
String msg = "Error in retrieving governance artifact by path. path: " + artifactPath + ".";
//log.error(msg, e);
throw new GovernanceException(msg, e);
}
return null;
}
/**
* Extracts all CheckListItemBeans from the resource
* and update the lifecycle name and state of the artifact
*
* @param artifactResource resource related to the artifact
* @param artifact artifact which related to the resource
* @param artifactLC aspect name of which check list item bean is needed
* @return CheckListItemBean array extracted from the resource
* @throws GovernanceException GovernanceException if the operation failed.
*/
public static CheckListItemBean[] getAllCheckListItemBeans(Resource artifactResource, GovernanceArtifact artifact,
String artifactLC) throws GovernanceException {
String defaultLC = artifactResource.getProperty("registry.LC.name");
String artifactLCState = artifactResource.getProperty("registry.lifecycle." + artifactLC + ".state");
if (artifactLC.equals(defaultLC)) {
((GovernanceArtifactImpl) artifact).setLcState(artifactLCState);
}
ArrayList<CheckListItemBean> checkListItemList = new ArrayList<CheckListItemBean>();
Properties lifecycleProps = artifactResource.getProperties();
Set propertyKeys = lifecycleProps.keySet();
for (Object propertyObj : propertyKeys) {
String propertyKey = (String) propertyObj;
String checkListPrefix = "registry.custom_lifecycle.checklist.";
String checkListSuffix = ".item";
if (propertyKey.startsWith(checkListPrefix) && propertyKey.endsWith(checkListSuffix) && propertyKey
.contains(GovernanceConstants.DOT + artifactLC + GovernanceConstants.DOT)) {
List<String> propValues = (List<String>) lifecycleProps.get(propertyKey);
CheckListItemBean checkListItem = new CheckListItemBean();
if (propValues != null && propValues.size() > 2) {
for (String param : propValues) {
if ((param.startsWith("status:"))) {
checkListItem.setStatus(param.substring(7));
} else if ((param.startsWith("name:"))) {
checkListItem.setName(param.substring(5));
} else if ((param.startsWith("value:"))) {
checkListItem.setValue(Boolean.parseBoolean(param.substring(6)));
} else if ((param.startsWith("order:"))) {
checkListItem.setOrder(Integer.parseInt(param.substring(6)));
}
}
}
checkListItemList.add(checkListItem);
}
}
CheckListItemBean[] checkListItemBeans = new CheckListItemBean[checkListItemList.size()];
if (checkListItemBeans.length > 0) {
for (CheckListItemBean checkListItemBean : checkListItemList) {
checkListItemBeans[checkListItemBean.getOrder()] = checkListItemBean;
}
return checkListItemBeans;
}
return null;
}
/**
* Extracts all ApproveItemBeans from the resource
*
* @param currentUser current registry user
* @param artifactResource resource related to the artifact
* @param artifact artifact related to the resource
* @return ApproveItemBean array extracted from the resource
* @throws GovernanceException if the operation failed.
*/
public static ApproveItemBean[] getAllApproveItemBeans(
String currentUser, Resource artifactResource, GovernanceArtifact artifact) throws GovernanceException {
String artifactLC = artifactResource.getProperty("registry.LC.name");
if (artifactLC == null) {
throw new GovernanceException("No lifecycle associated with the artifact path " +
artifactResource.getPath());
}
String artifactLCState = artifactResource.getProperty("registry.lifecycle." + artifactLC + ".state");
((GovernanceArtifactImpl) artifact).setLcState(artifactLCState);
ArrayList<ApproveItemBean> approveItemList = new ArrayList<ApproveItemBean>();
Properties lifecycleProps = artifactResource.getProperties();
Set propertyKeys = lifecycleProps.keySet();
for (Object propertyObj : propertyKeys) {
String propertyKey = (String) propertyObj;
String votingPrefix = "registry.custom_lifecycle.votes.";
String votingSuffix = ".vote";
if (propertyKey.startsWith(votingPrefix) && propertyKey.endsWith(votingSuffix)) {
List<String> propValues = (List<String>) lifecycleProps.get(propertyKey);
ApproveItemBean approveItemBean = new ApproveItemBean();
if (propValues != null && propValues.size() > 2) {
for (String param : propValues) {
if ((param.startsWith("status:"))) {
approveItemBean.setStatus(param.substring(7));
} else if ((param.startsWith("name:"))) {
approveItemBean.setName(param.substring(5));
} else if ((param.startsWith("votes:"))) {
approveItemBean.setRequiredVotes(Integer.parseInt(param.substring(6)));
} else if ((param.startsWith("current:"))) {
approveItemBean.setVotes(Integer.parseInt(param.substring(8)));
} else if ((param.startsWith("order:"))) {
approveItemBean.setOrder(Integer.parseInt(param.substring(6)));
} else if ((param.startsWith("users:"))) {
String users = param.substring(6);
if (!users.equals("")) {
List<String> votedUsers = Arrays.asList(users.split(","));
approveItemBean.setVoters(votedUsers);
approveItemBean.setValue(votedUsers.contains(currentUser));
}
}
}
}
approveItemList.add(approveItemBean);
}
}
ApproveItemBean[] approveItemBeans = new ApproveItemBean[approveItemList.size()];
if (approveItemBeans.length > 0) {
for (ApproveItemBean approveItemBean : approveItemList) {
approveItemBeans[approveItemBean.getOrder()] = approveItemBean;
}
return approveItemBeans;
}
return null;
}
/*public static String retrieveGovernanceArtifactPath(Registry registry,
String artifactId) throws GovernanceException {
try {
Resource govIndexResource = registry.get(GovernanceConstants.GOVERNANCE_ARTIFACT_INDEX_PATH);
return govIndexResource.getProperty(artifactId);
} catch (RegistryException e) {
String msg = "Error in adding an entry for the governance artifact. uuid: " + artifactId + ".";
log.error(msg);
throw new GovernanceException(msg, e);
}
}*/
/**
* Method to register a governance artifact.
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @param artifactPath the path of the artifact.
* @throws GovernanceException if the operation failed.
*/
/*
public static void addGovernanceArtifactEntry(Registry registry,
String artifactId,
String artifactPath) throws GovernanceException {
try {
Registry systemGovernanceRegistry = getGovernanceSystemRegistry(registry);
if (systemGovernanceRegistry == null) {
systemGovernanceRegistry = registry;
}
Resource govIndexResource;
if (systemGovernanceRegistry.resourceExists(
GovernanceConstants.GOVERNANCE_ARTIFACT_INDEX_PATH)) {
govIndexResource = systemGovernanceRegistry.get(
GovernanceConstants.GOVERNANCE_ARTIFACT_INDEX_PATH);
} else {
govIndexResource = systemGovernanceRegistry.newResource();
}
govIndexResource.setProperty(artifactId, artifactPath);
govIndexResource.setVersionableChange(false);
systemGovernanceRegistry.put(GovernanceConstants.GOVERNANCE_ARTIFACT_INDEX_PATH,
govIndexResource);
} catch (RegistryException e) {
String msg =
"Error in adding an entry for the governance artifact. path: " + artifactPath +
", uuid: " + artifactId + ".";
log.error(msg);
throw new GovernanceException(msg, e);
}
}
*/
/**
* Method to build an AXIOM element from a byte stream.
*
* @param content the stream of bytes.
* @return the AXIOM element.
* @throws GovernanceException if the operation failed.
*/
public static OMElement buildOMElement(byte[] content) throws RegistryException {
XMLStreamReader parser;
try {
XMLInputFactory factory = XMLInputFactory.newInstance();
factory.setProperty(XMLInputFactory.IS_COALESCING, new Boolean(true));
parser = factory.createXMLStreamReader(new StringReader(
RegistryUtils.decodeBytes(content)));
} catch (XMLStreamException e) {
String msg = "Error in initializing the parser to build the OMElement.";
log.error(msg, e);
throw new GovernanceException(msg, e);
}
//create the builder
StAXOMBuilder builder = new StAXOMBuilder(parser);
//get the root element (in this case the envelope)
return builder.getDocumentElement();
}
/**
* Method to serialize an XML element into a string.
*
* @param element the XML element.
* @return the corresponding String representation
* @throws GovernanceException if the operation failed.
*/
public static String serializeOMElement(OMElement element) throws GovernanceException {
try {
return element.toStringWithConsume();
} catch (XMLStreamException e) {
String msg = "Error in serializing the OMElement.";
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
/**
* Method to convert the expression specified for storing the path with corresponding values
* where the artifact is stored.
*
* @param pathExpression the expression specified for storing the path
* @param artifact the governance artifact
* @param storagePath the storage path of the artifact
* @return the path with corresponding values where the artifact is stored
* @throws GovernanceException if the operation failed.
*/
public static String getPathFromPathExpression(String pathExpression,
GovernanceArtifact artifact,
String storagePath) throws GovernanceException {
return getPathFromPathExpression(
pathExpression.replace("@{storagePath}", storagePath).replace("@{uuid}",
artifact.getId()), artifact
);
}
/**
* Method to convert the expression specified for storing the path with corresponding values
* where the artifact is stored.
*
* @param pathExpression the expression specified for storing the path
* @param artifact the governance artifact
* @return the path with corresponding values where the artifact is stored
* @throws GovernanceException if the operation failed.
*/
public static String getPathFromPathExpression(String pathExpression,
GovernanceArtifact artifact)
throws GovernanceException {
String output = replaceNameAndNamespace(pathExpression, artifact);
String[] elements = output.split("@");
for (int i = 1; i < elements.length; i++) {
if (elements[i].indexOf("}") > 0 && elements[i].indexOf("{") == 0) {
String key = elements[i].split("}")[0].substring(1);
String artifactAttribute = artifact.getAttribute(key);
if (artifactAttribute != null) {
output = output.replace("@{" + key + "}", artifactAttribute);
} else {
String msg = "Value for required attribute " + key + " found empty.";
log.error(msg);
throw new GovernanceException(msg);
}
}
}
return output;
}
/**
* Method to compare the old and new artifact paths
*
* @param pathExpression the expression specified for storing the path
* @param newArtifact updated artifact
* @param oldArtifact existing artifact
* @return whether the paths are same for old artifact and new artifact
* @throws GovernanceException if the operation failed.
*/
@SuppressWarnings("unused")
public static boolean hasSamePath(String pathExpression,
GovernanceArtifact newArtifact, GovernanceArtifact oldArtifact)
throws GovernanceException {
String output = replaceNameAndNamespace(pathExpression, newArtifact);
String[] elements = output.split("@");
for (int i = 1; i < elements.length; i++) {
if (elements[i].indexOf("}") > 0 && elements[i].indexOf("{") == 0) {
String key = elements[i].split("}")[0].substring(1);
String oldArtifactAttribute = oldArtifact.getAttribute(key);
String newArtifactAttribute = newArtifact.getAttribute(key);
if (newArtifactAttribute != null) {
if (newArtifactAttribute.equals(oldArtifactAttribute)) {
continue;
} else {
return false;
}
} else {
String msg = "Value for required attribute " + key + " found empty.";
log.error(msg);
throw new GovernanceException(msg);
}
}
}
return true;
}
/**
* Method to convert the expression specified for storing the path with corresponding values
* where the artifact is stored. This method will return multiple paths.
*
* @param pathExpression the expression specified for storing the path
* @param artifact the governance artifact
* @return the paths with corresponding values where the artifact is stored
* @throws GovernanceException if the operation failed.
*/
public static String[] getPathsFromPathExpression(String pathExpression,
GovernanceArtifact artifact)
throws GovernanceException {
String expression = replaceNameAndNamespace(pathExpression, artifact);
String[] elements = expression.split("@");
for (int i = 1; i < elements.length; i++) {
if (!(elements[i].indexOf(":") > 0) &&
elements[i].indexOf("}") > 0 && elements[i].indexOf("{") == 0) {
String key = elements[i].split("}")[0].substring(1);
String artifactAttribute = artifact.getAttribute(key);
if (artifactAttribute != null) {
expression = expression.replace("@{" + key + "}", artifactAttribute);
}
}
}
List<String> output = fixExpressionForMultiplePaths(artifact, expression);
return output.toArray(new String[output.size()]);
}
private static List<String> fixExpressionForMultiplePaths(GovernanceArtifact artifact,
String expression)
throws GovernanceException {
if (!expression.contains("@")) {
return Collections.singletonList(expression);
}
List<String> output = new LinkedList<String>();
String[] elements = expression.split("@");
for (int i = 1; i < elements.length; i++) {
if (elements[i].indexOf("}") > 0 && elements[i].indexOf("{") == 0) {
String key = elements[i].split("}")[0].substring(1).split(":")[0];
String[] artifactAttributes = artifact.getAttributes(key);
if (artifactAttributes != null) {
for (String artifactAttribute : artifactAttributes) {
String[] parts = artifactAttribute.split(":");
if (parts.length > 1) {
output.addAll(fixExpressionForMultiplePaths(artifact,
expression.replace("@{" + key + ":key}", parts[0])
.replace("@{" + key + ":value}", parts[1])));
}
}
}
break;
}
}
return output;
}
private static String replaceNameAndNamespace(String pathExpression,
GovernanceArtifact artifact) {
String output = pathExpression;
QName qName = artifact.getQName();
if (qName != null) {
output = output.replace("@{name}", qName.getLocalPart());
String replacement =
CommonUtil.derivePathFragmentFromNamespace(qName.getNamespaceURI());
if (replacement.startsWith("/")) {
replacement = replacement.substring(1);
}
if (replacement.endsWith("/")) {
replacement = replacement.substring(0, replacement.length() - 1);
}
output = output.replace("@{namespace}", replacement);
}
return output;
}
/**
* Method to obtain all available aspects for the given tenant.
*
* @return list of available aspects.
* @throws RegistryException if the operation failed.
*/
public static String[] getAvailableAspects() throws RegistryException {
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
Registry systemRegistry = registryService.getConfigSystemRegistry(tenantId);
String[] aspectsToAdd = systemRegistry.getAvailableAspects();
if (aspectsToAdd == null) {
return new String[0];
}
List<String> lifecycleAspectsToAdd = new LinkedList<String>();
boolean isTransactionStarted = false;
String tempResourcePath = "/governance/lcm/" + UUIDGenerator.generateUUID();
for (String aspectToAdd : aspectsToAdd) {
if (systemRegistry.getRegistryContext().isReadOnly()) {
lifecycleAspectsToAdd.add(aspectToAdd);
continue;
}
Map<String, Boolean> aspectsMap;
if (!lifecycleAspects.containsKey(tenantId)) {
synchronized (ASPECT_MAP_LOCK) {
if (!lifecycleAspects.containsKey(tenantId)) {
aspectsMap = new HashMap<String, Boolean>();
lifecycleAspects.put(tenantId, aspectsMap);
} else {
aspectsMap = lifecycleAspects.get(tenantId);
}
}
} else {
aspectsMap = lifecycleAspects.get(tenantId);
}
Boolean isLifecycleAspect = aspectsMap.get(aspectToAdd);
if (isLifecycleAspect == null) {
if (!isTransactionStarted) {
systemRegistry.beginTransaction();
isTransactionStarted = true;
}
systemRegistry.put(tempResourcePath, systemRegistry.newResource());
systemRegistry.associateAspect(tempResourcePath, aspectToAdd);
Resource r = systemRegistry.get(tempResourcePath);
Properties props = r.getProperties();
Set keys = props.keySet();
for (Object key : keys) {
String propKey = (String) key;
if (propKey.startsWith("registry.lifecycle.")
|| propKey.startsWith("registry.custom_lifecycle.checklist.")) {
isLifecycleAspect = Boolean.TRUE;
break;
}
}
if (isLifecycleAspect == null) {
isLifecycleAspect = Boolean.FALSE;
}
aspectsMap.put(aspectToAdd, isLifecycleAspect);
}
if (isLifecycleAspect) {
lifecycleAspectsToAdd.add(aspectToAdd);
}
}
if (isTransactionStarted) {
systemRegistry.delete(tempResourcePath);
systemRegistry.rollbackTransaction();
}
return lifecycleAspectsToAdd.toArray(new String[lifecycleAspectsToAdd.size()]);
}
/**
* Method to obtain a path from a qualified name.
*
* @param qName the qualified name.
* @return the corresponding path.
*/
@SuppressWarnings("unused")
public static String derivePathFromQName(QName qName) {
String serviceName = qName.getLocalPart();
String serviceNamespace = qName.getNamespaceURI();
return (serviceNamespace == null ?
"" : CommonUtil.derivePathFragmentFromNamespace(serviceNamespace)) + serviceName;
}
/**
* Obtain a name that can represent a URL.
*
* @param url the URL.
* @return the name.
*/
public static String getNameFromUrl(String url) {
int slashIndex = url.lastIndexOf('/');
if (slashIndex == -1) {
return url;
}
if (slashIndex == url.length() - 1) {
return url.substring(0, url.length() - 1);
}
return url.substring(slashIndex + 1);
}
@SuppressWarnings("unchecked")
public static List<OMElement> evaluateXPathToElements(String expression,
OMElement root) throws JaxenException {
String[] wsdlPrefixes = {
"wsdl", "http://schemas.xmlsoap.org/wsdl/",
"wsdl2", "http://www.w3.org/ns/wsdl",
"xsd", "http://www.w3.org/2001/XMLSchema",
"soap", "http://schemas.xmlsoap.org/wsdl/soap/",
"soap12", "http://schemas.xmlsoap.org/wsdl/soap12/",
"http", "http://schemas.xmlsoap.org/wsdl/http/",
};
AXIOMXPath xpathExpression = new AXIOMXPath(expression);
for (int j = 0; j < wsdlPrefixes.length; j++) {
xpathExpression.addNamespace(wsdlPrefixes[j++], wsdlPrefixes[j]);
}
return (List<OMElement>) xpathExpression.selectNodes(root);
}
/**
* Method to associate an aspect with a given resource on the registry.
*
* @param path the path of the resource.
* @param aspect the aspect to add.
* @param registry the registry instance on which the resource is available.
* @throws RegistryException if the operation failed.
*/
public static void associateAspect(String path, String aspect, Registry registry)
throws RegistryException {
try {
registry.associateAspect(path, aspect);
Resource resource = registry.get(path);
if(resource.getAspects().size() == 1) {
// Since this is the first life-cycle we make it default
resource.setProperty("registry.LC.name", aspect);
registry.put(path, resource);
}
} catch (RegistryException e) {
String msg = "Failed to associate aspect with the resource " +
path + ". " + e.getMessage();
log.error(msg, e);
throw new RegistryException(msg, e);
}
}
/**
* Method to remove an aspect from a given resource on the registry.
*
* @param path the path of the resource.
* @param aspect the aspect to be removed.
* @param registry the registry instance on which the resource is available.
* @throws RegistryException if the operation failed.
*/
public static void removeAspect(String path, String aspect, Registry registry)
throws RegistryException {
try {
/* set all the variables to the resource */
Resource resource = registry.get(path);
Properties props = resource.getProperties();
//List<Property> propList = new ArrayList<Property>();
Iterator iKeys = props.keySet().iterator();
ArrayList<String> propertiesToRemove = new ArrayList<String>();
while (iKeys.hasNext()) {
String propKey = (String) iKeys.next();
if ((propKey.startsWith("registry.custom_lifecycle.votes.")
|| propKey.startsWith("registry.custom_lifecycle.user.")
|| propKey.startsWith("registry.custom_lifecycle.checklist.")
|| propKey.startsWith("registry.LC.name")
|| propKey.startsWith("registry.lifecycle.")
|| propKey.startsWith("registry.Aspects")) && propKey.contains(aspect)) {
propertiesToRemove.add(propKey);
}
}
for (String propertyName : propertiesToRemove) {
resource.removeProperty(propertyName);
}
// This is needed as we are not removing all the aspects, which was removed when the current method is called
resource.removeAspect(aspect);
if(resource.getProperty("registry.LC.name").equals(aspect)) {
resource.removeProperty("registry.LC.name");
if(resource.getAspects().size() > 0) {
resource.setProperty("registry.LC.name", resource.getAspects().get(0));
}
}
registry.put(path, resource);
} catch (RegistryException e) {
String msg = "Failed to remove aspect " + aspect +
" on resource " + path + ". " + e.getMessage();
log.error(msg, e);
throw new RegistryException(msg, e);
}
}
public static AttributeSearchService getAttributeSearchService() {
return attributeSearchService;
}
public static void setAttributeSearchService(AttributeSearchService attributeSearchService) {
GovernanceUtils.attributeSearchService = attributeSearchService;
}
public static TermsSearchService getTermsSearchService() {
return termsSearchService;
}
public static void setTermsSearchService(TermsSearchService termsSearchService) {
GovernanceUtils.termsSearchService = termsSearchService;
}
/**
* Method to make an aspect to default.
* @param path path of the resource
* @param aspect the aspect to be removed.
* @param registry registry instance to be used
*/
public static void setDefaultLifeCycle(String path, String aspect, Registry registry) throws RegistryException {
Resource resource = registry.get(path);
if(resource != null) {
resource.setProperty("registry.LC.name", aspect);
registry.put(path, resource);
}
}
/**
* Returns a list of governance artifacts found by searching indexes. This method requires an instance of an
* attribute search service.
*
* @param criteria the search criteria
* @param registry the governance registry instance
* @return search result
* @throws GovernanceException if the operation failed
*/
public static List<GovernanceArtifact> findGovernanceArtifacts(Map<String, List<String>> criteria,
Registry registry, String mediaType)
throws GovernanceException {
if (getAttributeSearchService() == null) {
throw new GovernanceException("Attribute Search Service not Found");
}
List<GovernanceArtifact> artifacts = new ArrayList<GovernanceArtifact>();
Map<String, String> fields = new HashMap<String, String>();
if (mediaType != null) {
fields.put("mediaType", mediaType);
}
for (Map.Entry<String, List<String>> e : criteria.entrySet()) {
StringBuilder builder = new StringBuilder();
for (String referenceValue : e.getValue()) {
if (referenceValue != null && !"".equals(referenceValue)) {
String referenceValueModified = referenceValue;
if(referenceValueModified.contains(" ")) {
referenceValueModified = referenceValueModified.replace(" ", "\\ ");
}
builder.append(referenceValueModified.toLowerCase()).append(",");
}
}
if (builder.length() > 0) {
fields.put(e.getKey(), builder.substring(0, builder.length() - 1));
}
}
try {
ResourceData[] results = getAttributeSearchService().search(fields);
int errorCount = 0; // We use this to check how many errors occurred.
for (ResourceData result : results) {
GovernanceArtifact governanceArtifact = null;
String path = result.getResourcePath().substring(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH.length());
try {
governanceArtifact = retrieveGovernanceArtifactByPath(registry, path);
} catch (GovernanceException e) {
// We do not through any exception here. Only logging is done.
// We increase the error count for each error. If all the paths failed, then we throw an error
errorCount++;
log.error("Error occurred while retrieving governance artifact by path : " + path, e);
}
if (governanceArtifact != null) {
artifacts.add(governanceArtifact);
}
} if (errorCount != 0 && errorCount == results.length) {
// This means that all the paths have failed. So we throw an error.
throw new GovernanceException("Error occurred while retrieving all the governance artifacts");
}
} catch (RegistryException e) {
throw new GovernanceException("Unable to search by attribute", e);
}
return artifacts;
}
/**
* @param criteria query string that should be searched for
* @param registry the governance registry instance
* @param mediaType media type to be matched for search. Media type can be specified in the query string too
* @return The list of artifacts. null if the media type and string is empty.
* @throws GovernanceException thrown when an error occurs
*/
public static List<GovernanceArtifact> findGovernanceArtifacts(String criteria,
Registry registry, String mediaType)
throws GovernanceException {
Map<String, String> fields = new HashMap<String, String>();
Map<String, String> possibleProperties = new HashMap<String, String>();
GovernanceArtifactConfiguration artifactConfiguration;
if (mediaType != null && !"".equals(mediaType)) {
fields.put("mediaType", mediaType);
} else if("".equals(criteria)) {
return null;
}
try {
artifactConfiguration = findGovernanceArtifactConfigurationByMediaType(mediaType, registry);
} catch (RegistryException e) {
throw new GovernanceException(e);
}
List<String> possibleKeys = Arrays.asList("createdAfter", "createdBefore", "updatedAfter", "updatedBefore", "author", "author!", "associationType", "associationDest",
"updater", "updater!", "tags", "taxonomy", "content", "mediaType", "mediaType!", "lcName", "lcState");
List<String> finalTempList = new ArrayList<>();
if (StringUtils.isNotEmpty(criteria)) {
String[] tempList = criteria.split("&");
for (int i = 0; i < tempList.length; i++) {
try {
finalTempList.add(URLDecoder.decode(tempList[i], "utf-8"));
} catch (UnsupportedEncodingException e) {
throw new GovernanceException("Error occurred while decoding the query params");
}
}
}
for(String temp : finalTempList) {
String[] subParts = temp.split("=");
if(subParts.length != 2) {
String value = subParts[0].toLowerCase();
if(value.contains(" ")) {
value = value.replace(" ", "\\ ");
}
fields.put("overview_name", value);
} else {
if(possibleKeys.contains(subParts[0])) {
switch(subParts[0]) {
case "author!":
fields.put(subParts[0].substring(0, subParts[0].length() - 1), subParts[1].toLowerCase());
fields.put("authorNameNegate", "on");
break;
case "updater!":
fields.put(subParts[0].substring(0, subParts[0].length() - 1), subParts[1].toLowerCase());
fields.put("updaterNameNegate", "on");
break;
case "mediaType!":
fields.put(subParts[0].substring(0, subParts[0].length() - 1), subParts[1].toLowerCase());
fields.put("mediaTypeNegate", "on");
break;
case "tags":
case "associationType":
case "taxonomy":
case "associationDest":
fields.put(subParts[0], subParts[1]);
break;
default:
fields.put(subParts[0], subParts[1].toLowerCase());
break;
}
} else if(subParts[0].equals("comments")){
fields.put("commentWords", subParts[1].toLowerCase());
} else {
if(subParts[0].contains(":")) {
String value = subParts[1].toLowerCase();
if(value.contains(" or ")){
String[] values = value.split(" or ");
for(int i=0; i<values.length; i++){
values[i] = values[i].trim().replace(" ", "\\ ");
}
value = StringUtils.join(values, " OR ");
} else if(value.contains(" ")) {
value = value.replace(" ", "\\ ");
}
String[] tableParts = subParts[0].split(":");
if ("overview".equals(tableParts[0])) {
possibleProperties.put(tableParts[1], value);
}
fields.put(subParts[0].replace(":", "_"), value);
} else {
String value = subParts[1].toLowerCase();
if(value.contains(" or ")){
String[] values = value.split(" or ");
for(int i=0; i<values.length; i++){
values[i] = values[i].trim().replace(" ", "\\ ");
}
value = StringUtils.join(values, " OR ");
} else if(value.contains(" ")) {
value = value.replace(" ", "\\ ");
}
if(!subParts[0].equals("name")) {
possibleProperties.put(subParts[0], value);
fields.put(OVERVIEW + UNDERSCORE + subParts[0], value.toLowerCase());
} else {
if (artifactConfiguration != null) {
fields.put(artifactConfiguration.getArtifactNameAttribute(), value.toLowerCase());
} else {
fields.put(OVERVIEW + UNDERSCORE + subParts[0], value.toLowerCase());
}
}
}
}
}
}
List<GovernanceArtifact> attributeSearchResults = performAttributeSearch(fields, registry);
// Following check is done since Attribute Search service only has a way to search one property at a time
if(possibleProperties.size() == 1) {
int paginationSizeAtt = 0;
if (PaginationContext.getInstance() != null) {
paginationSizeAtt = PaginationContext.getInstance().getLength();
}
for(Map.Entry<String, String> entry : possibleProperties.entrySet()) {
String propertyName = entry.getKey();
fields.remove("overview_" + propertyName);
fields.put("propertyName", propertyName);
fields.put("rightPropertyValue", entry.getValue());
fields.put("rightOp", "eq");
}
List<GovernanceArtifact> propertySearchResults = performAttributeSearch(fields, registry);
Set<GovernanceArtifact> set = new TreeSet<>(new Comparator<GovernanceArtifact>() {
public int compare(GovernanceArtifact artifact1, GovernanceArtifact artifact2)
{
return artifact1.getId().compareTo(artifact2.getId()) ;
}
});
set.addAll(attributeSearchResults);
set.addAll(propertySearchResults);
List<GovernanceArtifact> mergeListWithoutDuplicates = new ArrayList<>();
mergeListWithoutDuplicates.addAll(set);
if (paginationSizeAtt != 0 && PaginationContext.getInstance() != null){
int paginationSizePros = PaginationContext.getInstance().getLength();
PaginationContext.getInstance().setLength(paginationSizeAtt +paginationSizePros);
}
return mergeListWithoutDuplicates;
}
return attributeSearchResults;
}
private static List<GovernanceArtifact> performAttributeSearch(Map<String, String> fields, Registry registry) throws GovernanceException {
if (getAttributeSearchService() == null) {
throw new GovernanceException("Attribute Search Service not Found");
}
List<GovernanceArtifact> artifacts = new ArrayList<GovernanceArtifact>();
try {
ResourceData[] results = getAttributeSearchService().search(fields);
int errorCount = 0; // We use this to check how many errors occurred.
for (ResourceData result : results) {
GovernanceArtifact governanceArtifact = null;
String path = result.getResourcePath().substring(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH.length());
try {
governanceArtifact = retrieveGovernanceArtifactByPath(registry, path);
} catch (GovernanceException e) {
// We do not through any exception here. Only logging is done.
// We increase the error count for each error. If all the paths failed, then we throw an error
errorCount++;
log.error("Error occurred while retrieving governance artifact by path : " + path, e);
}
if (governanceArtifact != null) {
artifacts.add(governanceArtifact);
}
} if (errorCount != 0 && errorCount == results.length) {
// This means that all the paths have failed. So we throw an error.
throw new GovernanceException("Error occurred while retrieving all the governance artifacts");
}
} catch (RegistryException e) {
throw new GovernanceException("Unable to search by attribute", e);
}
return artifacts;
}
/**
* Find all possible terms and its count for the given facet field and query criteria
* @param criteria the filter criteria to be matched
* @param facetField field used for faceting : required
* @param mediaType artifact type need to filter : optional
* @param authRequired authorization required flag
* @return term results
* @throws GovernanceException
*/
public static List<TermData> getTermDataList(Map<String, List<String>> criteria, String facetField, String mediaType, boolean authRequired) throws GovernanceException {
if (getTermsSearchService() == null) {
throw new GovernanceException("Term Search Service not Found");
}
Map<String, String> fields = new HashMap<>();
if (mediaType != null) {
fields.put(IndexingConstants.FIELD_MEDIA_TYPE, mediaType);
}
for (Map.Entry<String, List<String>> e : criteria.entrySet()) {
StringBuilder builder = new StringBuilder();
for (String referenceValue : e.getValue()) {
if (referenceValue != null && !"".equals(referenceValue)) {
String referenceValueModified = referenceValue.replace(" ", "\\ ");
builder.append(referenceValueModified.toLowerCase()).append(',');
}
}
if (builder.length() > 0) {
fields.put(e.getKey(), builder.substring(0, builder.length() - 1));
}
}
//set whether authorization is required for the facet search.
fields.put(IndexingConstants.AUTH_REQUIRED, String.valueOf(authRequired));
//setting the facet Field which needs grouping. Facet Field is required for searching.
if (facetField != null) {
fields.put(IndexingConstants.FACET_FIELD_NAME, facetField);
} else {
throw new GovernanceException("Facet field is required. field cannot be null");
}
try {
TermData[] termData = getTermsSearchService().search(fields);
return Arrays.asList(termData);
} catch (RegistryException e) {
throw new GovernanceException("Unable to get terms for the given field", e);
}
}
/**
* Method used to retrieve cache object for RXT Configs.
*
* @param name the name of the cache
* @return the cache object for the given cache manger and cache name
*/
public static Cache<String, Boolean> getRXTConfigCache(String name) {
CacheManager manager = getCacheManager();
Cache<String, Boolean> cache = (manager != null) ? manager.<String, Boolean>getCache(name) :
Caching.getCacheManager().<String, Boolean>getCache(name);
if (rxtCacheInitiated) {
cache.registerCacheEntryListener(entryCreatedListener);
cache.registerCacheEntryListener(entryUpdatedListener);
cache.registerCacheEntryListener(entryRemovedListener);
rxtCacheInitiated = true;
}
return cache;
}
/**
* Get the Cache Manager for Registry
*
* @return CacheManager is returned
*/
private static CacheManager getCacheManager() {
return Caching.getCacheManagerFactory().getCacheManager(
RegistryConstants.REGISTRY_CACHE_MANAGER);
}
/*
* This method is used to retrieve departments attached to a given artifact. Applicable to
* ProjectGroup and Person artifacts
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - ProjectGroup or Person artifact to which Departments are attached
* @return Department artifacts attached to <code>artifact</code>
* @throws GovernanceException If operation fails
*/
/* public static Department[] getAffiliatedDepartments(Registry registry, PeopleArtifact artifact)
throws GovernanceException {
List<Department> list = new ArrayList<Department>();
PeopleManager manager = new PeopleManager(registry);
String[] affiliations = artifact.getAttributes(GovernanceConstants.AFFILIATIONS_ATTRIBUTE);
if (affiliations != null) {
for (String deptText : affiliations) {
String deptName = deptText.split(GovernanceConstants.ENTRY_VALUE_SEPARATOR)[1];
*//* We are assuming data consistency at this point and hence, not checking the 0th
element of the above returned array *//*
PeopleArtifact pa = manager.getPeopleArtifactByName(deptName);
if (pa instanceof Department) {
list.add((Department) pa);
}
}
}
return list.toArray(new Department[list.size()]);
}*/
/*
* This method is used to retrieve organizations attached to a given artifact. Applicable to
* ProjectGroup and Person artifacts
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - ProjectGroup or Person artifact to which Organizations are attached
* @return Organization artifacts attached to <code>artifact</code>
* @throws GovernanceException If operation fails
*/
/*public static Organization[] getAffiliatedOrganizations(Registry registry,
PeopleArtifact artifact)
throws GovernanceException {
List<Organization> list = new ArrayList<Organization>();
PeopleManager manager = new PeopleManager(registry);
String[] affiliations = artifact.getAttributes(GovernanceConstants.AFFILIATIONS_ATTRIBUTE);
if (affiliations != null) {
for (String orgText : affiliations) {
String orgName = orgText.split(GovernanceConstants.ENTRY_VALUE_SEPARATOR)[1];
*//* We are assuming data consistency at this point and hence, not checking the 0th
element of the above returned array *//*
PeopleArtifact pa = manager.getPeopleArtifactByName(orgName);
if (pa instanceof Organization) {
list.add((Organization) pa);
}
}
}
return list.toArray(new Organization[list.size()]);
}*/
/*
* This method is used to retrieve project groups attached to a given artifact. Applicable to
* Person artifacts
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - Person artifact to which project groups are attached
* @return ProjectGroup artifacts attached to <code>artifact</code>
* @throws GovernanceException If operation fails
*/
/*public static ProjectGroup[] getAffiliatedProjectGroups(Registry registry,
PeopleArtifact artifact)
throws GovernanceException {
List<ProjectGroup> list = new ArrayList<ProjectGroup>();
PeopleManager manager = new PeopleManager(registry);
String[] affiliations = artifact.getAttributes(GovernanceConstants.AFFILIATIONS_ATTRIBUTE);
if (affiliations != null) {
for (String pgText : affiliations) {
String pgName = pgText.split(GovernanceConstants.ENTRY_VALUE_SEPARATOR)[1];
*//* We are assuming data consistency at this point and hence, not checking the 0th
element of the above returned array *//*
PeopleArtifact pa = manager.getPeopleArtifactByName(pgName);
if (pa instanceof ProjectGroup) {
list.add((ProjectGroup) pa);
}
}
}
return list.toArray(new ProjectGroup[list.size()]);
}*/
/*
* This method is used to retrieve project groups that have the given artifact (Organization or
* Department) as an affiliation
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - Organization/Department artifact
* @return ProjectGroups that have <code>artifact</code> as an affiliation
* @throws GovernanceException
*/
/*
public static ProjectGroup[] getAttachedProjectGroups(Registry registry, PeopleArtifact artifact)
throws GovernanceException {
ProjectGroup[] pgs = new PeopleManager(registry).getAllProjectGroups();
List<ProjectGroup> list = new ArrayList<ProjectGroup>();
for (ProjectGroup pg : pgs) {
for (Department department : pg.getDepartments()) {
if (artifact.getName().equals(department.getName())) {
list.add(pg);
}
}
}
return list.toArray(new ProjectGroup[list.size()]);
}
*/
/*
* This method is used to retrieve persons that have the given artifact (Organization or
* Department) as an affiliation
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - Organization/Department artifact
* @return Person artifacts that have <code>artifact</code> as an affiliation
* @throws GovernanceException
*/
/*
public static Person[] getAttachedPersons(Registry registry, PeopleArtifact artifact)
throws GovernanceException {
Person[] persons = new PeopleManager(registry).getAllPersons();
List<Person> list = new ArrayList<Person>();
for (Person person : persons) {
for (Department department : person.getDepartments()) {
if (artifact.getName().equals(department.getName())) {
list.add(person);
}
}
}
return list.toArray(new Person[list.size()]);
}
*/
/*
* This method writes sub-group associations contained within the given ProjectGroup to the
* registry. Existence of all the sub groups must be validated before calling this method.
*
* @param registry
* @param projectGroup
* @throws GovernanceException
*/
/*
public static void writeSubGroupAssociations(Registry registry, ProjectGroup projectGroup)
throws GovernanceException {
try {
if (!registry.resourceExists(projectGroup.getPath())) {
return;
}
ProjectGroup[] subGroups = projectGroup.getSubGroups();
// Get the existing association list which is related to the current operation
Set<String> existingSet = new HashSet<String>();
for (Association asso : registry.getAllAssociations(projectGroup.getPath())) {
if ((GovernanceConstants.SUB_GROUP.equals(asso.getAssociationType()) &&
asso.getSourcePath().equals(projectGroup.getPath()))
||
(GovernanceConstants.IS_PART_OF.equals(asso.getAssociationType()) &&
asso.getDestinationPath().equals(projectGroup.getPath()))) {
existingSet.add(asso.getSourcePath() + SEPARATOR + asso.getDestinationPath() +
SEPARATOR + asso.getAssociationType());
}
}
// Get the updated association list from the projectGroup object
Set<String> updatedSet = new HashSet<String>();
for (ProjectGroup subGroup : subGroups) {
updatedSet.add(projectGroup.getPath() + SEPARATOR + subGroup.getPath() +
SEPARATOR + GovernanceConstants.SUB_GROUP);
updatedSet.add(subGroup.getPath() + SEPARATOR + projectGroup.getPath() + SEPARATOR +
GovernanceConstants.IS_PART_OF);
}
updateAssociations(registry, existingSet, updatedSet);
} catch (RegistryException e) {
String msg = "Error in writing sub group associations, parent project-group id: " +
projectGroup.getId() + ", path: " + projectGroup.getPath();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
*/
/*
* This method writes owner associations contained within the service object to the registry.
* Existence of all the owners as people artifacts must be validated before calling this method.
*
* @param registry
* @param service
* @throws GovernanceException
*/
/*
public static void writeOwnerAssociations(Registry registry, Service service)
throws GovernanceException {
try {
if (!registry.resourceExists(service.getPath())) {
return;
}
PeopleArtifact[] owners = service.getOwners();
// Remove associations that are not there anymore and add any new associations
Association[] oldAssociations = registry.getAllAssociations(service.getPath());
Set<String> oldSet = new HashSet<String>();
for (Association association : oldAssociations) {
if (GovernanceConstants.OWNED_BY.equals(association.getAssociationType()) ||
GovernanceConstants.OWNS.equals(association.getAssociationType())) {
oldSet.add(association.getSourcePath() + SEPARATOR +
association.getDestinationPath() + SEPARATOR +
association.getAssociationType());
}
}
Set<String> updatedSet = new HashSet<String>();
for (PeopleArtifact owner : owners) {
updatedSet.add(service.getPath() + SEPARATOR + owner.getPath() + SEPARATOR +
GovernanceConstants.OWNED_BY);
updatedSet.add(owner.getPath() + SEPARATOR + service.getPath() + SEPARATOR +
GovernanceConstants.OWNS);
}
updateAssociations(registry, oldSet, updatedSet);
} catch (RegistryException e) {
String msg = "Error in associating owners to service. Id: " + service.getId() +
", path: " + service.getPath();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
*/
/*
* This method writes consumer associations contained within the service object to the registry.
* Existence of all the consumers as people artifacts must be validated before calling this
* method.
*
* @param registry
* @param service
* @throws GovernanceException
*/
/*
public static void writeConsumerAssociations(Registry registry, Service service)
throws GovernanceException {
try {
if (!registry.resourceExists(service.getPath())) {
return;
}
PeopleArtifact[] consumers = service.getConsumers();
// Remove associations that are not there anymore and add any new associations
Association[] oldAssociations = registry.getAllAssociations(service.getPath());
Set<String> oldSet = new HashSet<String>();
for (Association association : oldAssociations) {
if (GovernanceConstants.CONSUMED_BY.equals(association.getAssociationType()) ||
GovernanceConstants.CONSUMES.equals(association.getAssociationType())) {
oldSet.add(association.getSourcePath() + SEPARATOR +
association.getDestinationPath() + SEPARATOR +
association.getAssociationType());
}
}
Set<String> updatedSet = new HashSet<String>();
for (PeopleArtifact consumer : consumers) {
updatedSet.add(service.getPath() + SEPARATOR + consumer.getPath() + SEPARATOR +
GovernanceConstants.CONSUMED_BY);
updatedSet.add(consumer.getPath() + SEPARATOR + service.getPath() + SEPARATOR +
GovernanceConstants.CONSUMES);
}
updateAssociations(registry, oldSet, updatedSet);
} catch (RegistryException e) {
String msg = "Error in associating owners to service. Id: " + service.getId() +
", path: " + service.getPath();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
*/
/*
* This method extracts people names from the given attribute of the given artifact and returns
* an array containing PeopleArtifacts represented by those names.
* Existence of people artifacts listed under the atrribute name must be validated before
* calling this method.
*
* @param registry Associated registry
* @param artifact GovernanceArtifact which stores people list as an attribute
* @param attributeName Name of the attribute which stores people names
* @throws GovernanceException
*/
/*
public static PeopleArtifact[] extractPeopleFromAttribute(Registry registry,
GovernanceArtifact artifact,
String attributeName)
throws GovernanceException {
String[] peopleTexts = artifact.getAttributes(attributeName);
PeopleManager manager = new PeopleManager(registry);
List<PeopleArtifact> list = new ArrayList<PeopleArtifact>();
if (peopleTexts != null) {
for (String peopleText : peopleTexts) {
String name = peopleText.split(GovernanceConstants.ENTRY_VALUE_SEPARATOR)[1];
PeopleArtifact pa = manager.getPeopleArtifactByName(name);
if (pa == null) {
String msg = "Invalid people artifact name is found within the governance " +
"artifact. Path: " + artifact.getPath() + ", Invalid people artifact " +
"name:" + name;
log.error(msg);
throw new GovernanceException(msg);
} else {
list.add(pa);
}
}
}
return list.toArray(new PeopleArtifact[list.size()]);
}
*/
/*
private static void updateAssociations(Registry registry, Set<String> existingAssociationSet,
Set<String> updatedAssociationSet)
throws RegistryException {
Set<String> removedAssociations = new HashSet<String>(existingAssociationSet);
removedAssociations.removeAll(updatedAssociationSet);
Set<String> newAssociations = new HashSet<String>(updatedAssociationSet);
newAssociations.removeAll(existingAssociationSet);
for (String removedAssociation : removedAssociations) {
String[] params = removedAssociation.split(SEPARATOR);
try {
for (int i = 0; i < 2; i++) {
if (GovernanceUtils.retrieveGovernanceArtifactByPath(registry, params[i])
instanceof PeopleArtifact) {
registry.removeAssociation(params[0], params[1], params[2]);
break;
}
}
} catch (GovernanceException ignored) {
}
}
for (String newAssociation : newAssociations) {
String[] params = newAssociation.split(SEPARATOR);
registry.addAssociation(params[0], params[1], params[2]);
}
}
*/
public static void removeArtifactFromPath(Registry registry, String path)
throws GovernanceException {
try {
if (registry.resourceExists(path)) {
registry.delete(path);
}
ArtifactCache artifactCache =
ArtifactCacheManager.getCacheManager().getTenantArtifactCache(((UserRegistry) registry).getTenantId());
if (artifactCache != null && path != null && artifactCache.getArtifact(path) != null) {
artifactCache.invalidateArtifact(path);
}
} catch (RegistryException e) {
String msg = "Error in deleting the the artifact path:" + path + ".";
throw new GovernanceException(msg, e);
}
}
public static List<String> getUniqueAttributesNames(Registry registry,
String mediaType) throws GovernanceException {
try {
GovernanceArtifactConfiguration configuration = findGovernanceArtifactConfigurationByMediaType(mediaType, registry);
if (configuration != null) {
return configuration.getUniqueAttributes();
}
} catch (RegistryException e) {
throw new GovernanceException(e);
}
return null;
}
/**
* Validates a given artifact to ensure all the mandatory fields are filled
* If a mandatory field is left empty this check method will throw an exception
* indicating field name to be filled.
*
* @param registry the instance of the registry.
* @param elementString the short name of the artifact type.
* @param artifact artifact to be checked for mandatory fields.
* @throws GovernanceException
*/
public static void CheckMandatoryFields(Registry registry, String elementString, GovernanceArtifact artifact)
throws GovernanceException {
if (artifact instanceof WsdlImpl || artifact instanceof SchemaImpl || artifact instanceof PolicyImpl) {
return;
}
GovernanceArtifactConfiguration configuration = null;
try {
configuration = GovernanceUtils.findGovernanceArtifactConfiguration(elementString, registry);
} catch (RegistryException e) {
throw new GovernanceException("Retrieving RXT configuration for type :" + elementString + "failed.", e);
}
if (configuration == null) {
throw new GovernanceException("Could not find RXT configuration for type :" + elementString);
}
List<Map> mandatoryAttributes = configuration.getMandatoryAttributes();
if (mandatoryAttributes == null) {
return;
}
Map<String, Object> map;
for (int i = 0; i < mandatoryAttributes.size(); ++i) {
map = mandatoryAttributes.get(i);
String prop = (String) map.get("properties");
List<String> keys = (List<String>) map.get("keys");
if (prop != null && "unbounded".equals(prop)) {
//assume there are only 1 key
String[] values = artifact.getAttributes((String) keys.get(0));
if (values != null) {
for (int j = 0; j < values.length; ++j) {
if (values[j] == null || "".equals(values[j])) {
//return an exception to stop adding artifact
throw new GovernanceException((String) map.get("name") + " is a required field, " +
"Please provide a value for this parameter.");
}
}
}
} else {
String value = "";
for (int j = 0; j < keys.size(); ++j) {
String v = artifact.getAttribute(keys.get(j));
if (j != 0) {
value += ":";
}
value += (v == null ? "" : v);
}
if (value == null || "".equals(value)) {
//return an exception to stop adding artifact
throw new GovernanceException((String) map.get("name") + " is a required field, " +
"Please provide a value for this parameter.");
}
}
}
}
}
| components/governance/org.wso2.carbon.governance.api/src/main/java/org/wso2/carbon/governance/api/util/GovernanceUtils.java | /*
* Copyright (c) 2008, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.governance.api.util;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.impl.builder.StAXOMBuilder;
import org.apache.axiom.om.util.AXIOMUtil;
import org.apache.axiom.om.xpath.AXIOMXPath;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jaxen.JaxenException;
import org.wso2.carbon.base.CarbonContextHolderBase;
import org.wso2.carbon.base.UnloadTenantTask;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.governance.api.cache.*;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifactImpl;
import org.wso2.carbon.governance.api.common.util.ApproveItemBean;
import org.wso2.carbon.governance.api.common.util.CheckListItemBean;
import org.wso2.carbon.governance.api.endpoints.dataobjects.Endpoint;
import org.wso2.carbon.governance.api.endpoints.dataobjects.EndpointImpl;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifactImpl;
import org.wso2.carbon.governance.api.policies.dataobjects.Policy;
import org.wso2.carbon.governance.api.policies.dataobjects.PolicyImpl;
import org.wso2.carbon.governance.api.schema.dataobjects.Schema;
import org.wso2.carbon.governance.api.schema.dataobjects.SchemaImpl;
import org.wso2.carbon.governance.api.wsdls.dataobjects.Wsdl;
import org.wso2.carbon.governance.api.wsdls.dataobjects.WsdlImpl;
import org.wso2.carbon.registry.common.AttributeSearchService;
import org.wso2.carbon.registry.common.ResourceData;
import org.wso2.carbon.registry.common.TermData;
import org.wso2.carbon.registry.core.Association;
import org.wso2.carbon.registry.core.Registry;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.config.RegistryContext;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext;
import org.wso2.carbon.registry.core.pagination.PaginationContext;
import org.wso2.carbon.registry.core.secure.AuthorizationFailedException;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.registry.core.utils.MediaTypesUtils;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import org.wso2.carbon.registry.core.utils.UUIDGenerator;
import org.wso2.carbon.registry.extensions.utils.CommonUtil;
import org.wso2.carbon.registry.indexing.IndexingConstants;
import org.wso2.carbon.registry.indexing.service.TermsSearchService;
import org.wso2.carbon.utils.component.xml.config.ManagementPermission;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utilities used by various Governance API related functionality.
*/
public class GovernanceUtils {
private static final Log log = LogFactory.getLog(GovernanceUtils.class);
private static final String OVERVIEW = "overview";
private static final String UNDERSCORE = "_";
private static RegistryService registryService;
//private static final String SEPARATOR = ":";
private final static Map<Integer, List<GovernanceArtifactConfiguration>>
artifactConfigurations = new HashMap<Integer, List<GovernanceArtifactConfiguration>>();
private static Map<Integer, Map<String, Boolean>> lifecycleAspects =
new HashMap<Integer, Map<String, Boolean>>();
private static final Object ASPECT_MAP_LOCK = new Object();
private static AttributeSearchService attributeSearchService;
private static final ThreadLocal<Registry> tenantGovernanceSystemRegistry = new ThreadLocal<Registry>();
private final static RXTConfigCacheEntryCreatedListener<String, Boolean> entryCreatedListener = new RXTConfigCacheEntryCreatedListener<String, Boolean>();
private final static RXTConfigCacheEntryRemovedListener<String, Boolean> entryRemovedListener = new RXTConfigCacheEntryRemovedListener<String, Boolean>();
private final static RXTConfigCacheEntryUpdatedListener<String, Boolean> entryUpdatedListener = new RXTConfigCacheEntryUpdatedListener<String, Boolean>();
private static boolean rxtCacheInitiated = false;
private static TermsSearchService termsSearchService;
/**
* Setting the registry service.
*
* @param registryService the registryService.
*/
public static void setRegistryService(RegistryService registryService) {
GovernanceUtils.registryService = registryService;
}
// Method to register a list of governance artifact configurations.
private static void registerArtifactConfigurations(int tenantId,
List<GovernanceArtifactConfiguration> configurations) {
artifactConfigurations.put(tenantId, configurations);
CarbonContextHolderBase.registerUnloadTenantTask(new UnloadTenantTask() {
public void register(int tenantId, Object registration) {
// Nothing to register in here.
}
public void cleanup(int tenantId) {
if (artifactConfigurations.get(tenantId) != null) {
artifactConfigurations.remove(tenantId);
}
if (lifecycleAspects.get(tenantId) != null) {
lifecycleAspects.remove(tenantId);
}
}
});
}
/**
* This method is used to add artifact configurations into the artifact configuration map given the registry path
*
* @param registry registry instance
* @param tenantId tenant id
* @param path path of the resource
* @throws RegistryException
*/
public static void registerArtifactConfigurationByPath(Registry registry, int tenantId, String path) throws RegistryException {
GovernanceArtifactConfiguration governanceArtifactConfiguration;
Resource resource = registry.get(path);
Object content = resource.getContent();
String elementString;
if (content instanceof String) {
elementString = (String) content;
} else {
elementString = RegistryUtils.decodeBytes((byte[]) content);
}
governanceArtifactConfiguration = getGovernanceArtifactConfiguration(elementString);
List<GovernanceArtifactConfiguration> configurations = artifactConfigurations.get(tenantId);
if (configurations != null) {
configurations.add(governanceArtifactConfiguration);
} else {
configurations = new LinkedList<GovernanceArtifactConfiguration>();
configurations.add(governanceArtifactConfiguration);
}
artifactConfigurations.put(tenantId, configurations);
}
/**
* Query to search for governance artifacts.
*
* @param mediaType the media type of the artifacts to be searched for.
* @param registry the registry instance to run query on.
* @return the list of artifact paths.
* @throws RegistryException if the operation failed.
*/
public static String[] findGovernanceArtifacts(String mediaType, Registry registry)
throws RegistryException {
String[] paths = MediaTypesUtils.getResultPaths(registry, mediaType);
if (paths == null) {
paths = new String[0];
}
Arrays.sort(paths, new Comparator<String>() {
public int compare(String o1, String o2) {
int result = RegistryUtils.getResourceName(o1)
.compareToIgnoreCase(RegistryUtils.getResourceName(o2));
if (result == 0) {
return o1.compareToIgnoreCase(o2);
}
return result;
}
});
return paths;
}
/**
* Query to search for a governance artifact configuration.
*
* @param mediaType the media type of the artifact configuration.
* @param registry the registry instance to run query on.
* @return the artifact configuration.
* @throws RegistryException if the operation failed.
*/
public static GovernanceArtifactConfiguration findGovernanceArtifactConfigurationByMediaType(
String mediaType, Registry registry)
throws RegistryException {
List<GovernanceArtifactConfiguration> governanceArtifactConfigurations = artifactConfigurations.get(((UserRegistry) registry).getTenantId());
if (governanceArtifactConfigurations == null) {
governanceArtifactConfigurations = findGovernanceArtifactConfigurations(registry);
}
for (GovernanceArtifactConfiguration configuration : governanceArtifactConfigurations) {
if (mediaType.equals(configuration.getMediaType())) {
return configuration;
}
}
return null;
}
/**
* Query to search for a governance artifact configuration.
*
* @param key the key of the artifact configuration.
* @param registry the registry instance to run query on.
* @return the artifact configuration.
* @throws RegistryException if the operation failed.
*/
public static GovernanceArtifactConfiguration findGovernanceArtifactConfiguration(
String key, Registry registry)
throws RegistryException {
List<GovernanceArtifactConfiguration> governanceArtifactConfigurations = artifactConfigurations.get(((UserRegistry) registry).getTenantId());
if (governanceArtifactConfigurations == null) {
governanceArtifactConfigurations = findGovernanceArtifactConfigurations(registry);
}
for (GovernanceArtifactConfiguration configuration : governanceArtifactConfigurations) {
if (key.equals(configuration.getKey())) {
return configuration;
}
}
return null;
}
/**
*
* @param key short name of the artifact type.
* @param contextName context name of which lifecycle is needed
* @param registry registry instance
* @return lifecycle of the name associated with the context, null otherwise
* @throws RegistryException if the operation failed
*/
public static String getLifeCycleOfContext(String key, String contextName, Registry registry) throws RegistryException {
GovernanceArtifactConfiguration configuration = findGovernanceArtifactConfiguration(key, registry);
if(configuration != null) {
return configuration.getLifeCycleOfContext(contextName);
}
return null ;
}
/**
* Method to obtain a list of paths having resources of the given media type.
*
* @param registry the registry instance to run query on.
* @param mediaType the media type.
* @return an array of resource paths.
* @throws GovernanceException if the operation failed.
*/
public static String[] getResultPaths(Registry registry, String mediaType) throws GovernanceException {
try {
return MediaTypesUtils.getResultPaths(registry, mediaType);
} catch (RegistryException e) {
String msg = "Error in getting the result for media type: " + mediaType + ".";
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
// remove symbolic links in search items.
@SuppressWarnings("unused")
private static String[] removeSymbolicLinks(String[] paths, Registry governanceRegistry) {
if (paths == null) {
return new String[0];
}
List<String> fixedPaths = new LinkedList<String>();
for (String path : paths) {
try {
if ((governanceRegistry.get(path).getProperty(RegistryConstants.REGISTRY_LINK) ==
null || governanceRegistry.get(path).getProperty(
RegistryConstants.REGISTRY_REAL_PATH) != null) &&
!path.contains(RegistryConstants.SYSTEM_MOUNT_PATH)) {
fixedPaths.add(path);
}
} catch (RegistryException ignored) {
}
}
return fixedPaths.toArray(new String[fixedPaths.size()]);
}
/**
* Method to load the Governance Artifacts to be used by the API operations.
*
* @param registry the registry instance used to search for artifacts.
* @param configurations the artifact configurations to load.
* @throws RegistryException if the operation failed.
*/
public static void loadGovernanceArtifacts(UserRegistry registry,
List<GovernanceArtifactConfiguration> configurations)
throws RegistryException {
registerArtifactConfigurations(registry.getTenantId(), configurations);
}
/**
* Method to load the Governance Artifacts to be used by the API operations.
*
* @param registry the registry instance used to search for artifacts.
* @throws RegistryException if the operation failed.
*/
public static void loadGovernanceArtifacts(UserRegistry registry) throws RegistryException {
if (!artifactConfigurations.containsKey(registry.getTenantId())) {
loadGovernanceArtifacts(registry, Collections.unmodifiableList(findGovernanceArtifactConfigurations(registry)));
}
}
public static GovernanceArtifactConfiguration getGovernanceArtifactConfiguration(String elementString) {
GovernanceArtifactConfiguration configuration = null;
try {
OMElement configElement = AXIOMUtil.stringToOM(elementString);
if (configElement != null) {
configuration = new GovernanceArtifactConfiguration();
OMElement artifactNameAttributeElement = configElement.getFirstChildWithName(
new QName("nameAttribute"));
if (artifactNameAttributeElement != null) {
configuration.setArtifactNameAttribute(
artifactNameAttributeElement.getText());
}
OMElement artifactNamespaceAttributeElement =
configElement.getFirstChildWithName(
new QName("namespaceAttribute"));
if (artifactNamespaceAttributeElement != null) {
configuration.setArtifactNamespaceAttribute(
artifactNamespaceAttributeElement.getText());
} else if (Boolean.toString(false).equals(
configElement.getAttributeValue(new QName("hasNamespace")))) {
configuration.setArtifactNamespaceAttribute(null);
} else {
configuration.setHasNamespace(true);
}
OMElement artifactElementRootElement = configElement.getFirstChildWithName(
new QName("elementRoot"));
if (artifactElementRootElement != null) {
configuration.setArtifactElementRoot(
artifactElementRootElement.getText());
}
OMElement artifactElementNamespaceElement = configElement.getFirstChildWithName(
new QName("elementNamespace"));
if (artifactElementNamespaceElement != null) {
configuration.setArtifactElementNamespace(
artifactElementNamespaceElement.getText());
}
configuration.setKey(configElement.getAttributeValue(new QName("shortName")));
configuration.setSingularLabel(
configElement.getAttributeValue(new QName("singularLabel")));
configuration.setPluralLabel(
configElement.getAttributeValue(new QName("pluralLabel")));
configuration.setMediaType(
configElement.getAttributeValue(new QName("type")));
configuration.setExtension(
configElement.getAttributeValue(new QName("fileExtension")));
String iconSetString = configElement.getAttributeValue(new QName("iconSet"));
if (iconSetString != null) {
configuration.setIconSet(Integer.parseInt(iconSetString));
}
OMElement pathExpressionElement = configElement.getFirstChildWithName(
new QName("storagePath"));
if (pathExpressionElement != null) {
configuration.setPathExpression(pathExpressionElement.getText());
} else {
configuration.setPathExpression("/@{name}");
}
OMElement lifecycleElement = configElement.getFirstChildWithName(
new QName("lifecycle"));
if (lifecycleElement != null) {
configuration.setLifecycle(lifecycleElement.getText());
}
OMElement groupingAttributeElement = configElement.getFirstChildWithName(
new QName("groupingAttribute"));
if (groupingAttributeElement != null) {
configuration.setGroupingAttribute(groupingAttributeElement.getText());
}
OMElement lifecycleContextsElement = configElement.getFirstChildWithName(new QName("lifecycleContexts"));
if(lifecycleContextsElement != null) {
Iterator lifecycleContextsIterator = lifecycleContextsElement.getChildrenWithName(new QName("lifecycleContext"));
while(lifecycleContextsIterator.hasNext()) {
OMElement lifecycleContextElement = (OMElement) lifecycleContextsIterator.next();
String lcName = lifecycleContextElement.getAttributeValue(new QName("lcName"));
String contextsStr = lifecycleContextElement.getText();
String[] contexts = contextsStr.split(",");
for(String context : contexts) {
configuration.addLifeCycleToContext(context, lcName);
}
}
}
OMElement contentDefinition = configElement.getFirstChildWithName(
new QName("content"));
if (contentDefinition != null) {
String href = contentDefinition.getAttributeValue(new QName("href"));
if (href != null) {
configuration.setContentURL(href);
}
configuration.setContentDefinition(contentDefinition);
}
OMElement associationDefinitions = configElement.getFirstChildWithName(
new QName("relationships"));
if (associationDefinitions != null) {
List<Association> associations =
new LinkedList<Association>();
Iterator associationElements =
associationDefinitions.getChildrenWithName(
new QName("association"));
while (associationElements.hasNext()) {
OMElement associationElement = (OMElement) associationElements.next();
String type = associationElement.getAttributeValue(new QName("type"));
String source =
associationElement.getAttributeValue(new QName("source"));
String target =
associationElement.getAttributeValue(new QName("target"));
associations.add(new Association(source, target, type));
}
associationElements =
associationDefinitions.getChildrenWithName(
new QName("dependency"));
while (associationElements.hasNext()) {
OMElement associationElement = (OMElement) associationElements.next();
String source =
associationElement.getAttributeValue(new QName("source"));
String target =
associationElement.getAttributeValue(new QName("target"));
associations.add(new Association(source, target, "depends"));
}
configuration.setRelationshipDefinitions(associations.toArray(
new Association[associations.size()]));
}
OMElement uiConfigurations = configElement.getFirstChildWithName(
new QName("ui"));
if (uiConfigurations != null) {
configuration.setUIConfigurations(uiConfigurations);
OMElement uiListConfigurations = uiConfigurations.getFirstChildWithName(
new QName("list"));
if (uiListConfigurations != null) {
configuration.setUIListConfigurations(uiListConfigurations);
}
}
OMElement uiPermissions = configElement.getFirstChildWithName(
new QName("permissions"));
if (uiPermissions != null) {
Iterator permissionElements =
uiPermissions.getChildrenWithName(
new QName("permission"));
List<ManagementPermission> managementPermissions =
new LinkedList<ManagementPermission>();
while (permissionElements.hasNext()) {
OMElement permissionElement = (OMElement) permissionElements.next();
OMElement nameElement =
permissionElement.getFirstChildWithName(
new QName("name"));
String name = (nameElement != null) ? nameElement.getText() : null;
OMElement idElement =
permissionElement.getFirstChildWithName(
new QName("id"));
String id = (idElement != null) ? idElement.getText() : null;
if (name != null && id != null) {
managementPermissions.add(new ManagementPermission(name, id));
}
}
configuration.setUIPermissions(managementPermissions.toArray(
new ManagementPermission[managementPermissions.size()]));
} else {
// if no permission definitions were present, define the default ones.
List<ManagementPermission> managementPermissions =
new LinkedList<ManagementPermission>();
String idPrefix = "/permission/admin/manage/resources/govern/" +
configuration.getKey();
managementPermissions.add(
new ManagementPermission(configuration.getPluralLabel(), idPrefix));
managementPermissions.add(
new ManagementPermission("Add", idPrefix + "/add"));
managementPermissions.add(
new ManagementPermission("List", idPrefix + "/list"));
managementPermissions.add(
new ManagementPermission(configuration.getPluralLabel(),
"/permission/admin/configure/governance/" +
configuration.getKey() + "-ui"
)
);
configuration.setUIPermissions(managementPermissions.toArray(
new ManagementPermission[managementPermissions.size()]));
}
}
} catch (XMLStreamException ignored) {
} catch (NumberFormatException ignored) {
}
return configuration;
}
/**
* Method to locate Governance Artifact configurations.
*
* @param registry the registry instance to run query on.
* @return an array of resource paths.
* @throws GovernanceException if the operation failed.
*/
public static List<GovernanceArtifactConfiguration> findGovernanceArtifactConfigurations(
Registry registry) throws RegistryException {
String[] artifactConfigurations = findGovernanceArtifacts(
GovernanceConstants.GOVERNANCE_ARTIFACT_CONFIGURATION_MEDIA_TYPE, registry);
List<GovernanceArtifactConfiguration> configurations =
new LinkedList<GovernanceArtifactConfiguration>();
for (String artifactConfiguration : artifactConfigurations) {
Resource resource = registry.get(artifactConfiguration);
Object content = resource.getContent();
String elementString;
if (content instanceof String) {
elementString = (String) content;
} else {
elementString = RegistryUtils.decodeBytes((byte[]) content);
}
configurations.add(getGovernanceArtifactConfiguration(elementString));
}
return configurations;
}
/**
* Method to return the GovernanceArtifactConfiguration for a given media type
*
* @param registry the registry instance to run query on.
* @param mediaType mediatype of the needed artifact configuration
* @return GovernanceArtifactConfiguration
* @throws RegistryException exception thorown if something goes wrong
*/
public static GovernanceArtifactConfiguration getArtifactConfigurationByMediaType(Registry registry, String mediaType) throws RegistryException {
List<GovernanceArtifactConfiguration> configurations = findGovernanceArtifactConfigurations(registry);
for(GovernanceArtifactConfiguration configuration : configurations) {
if(configuration.getMediaType().equals(mediaType)) {
return configuration;
}
}
return null;
}
@SuppressWarnings("unused")
public static void setTenantGovernanceSystemRegistry(final int tenantId) throws RegistryException {
if (registryService != null) {
tenantGovernanceSystemRegistry.set(
registryService.getGovernanceSystemRegistry(tenantId));
}
}
@SuppressWarnings("unused")
public static void unsetTenantGovernanceSystemRegistry() throws RegistryException {
tenantGovernanceSystemRegistry.remove();
}
/**
* Returns the system governance registry.
*
* @param registry the user registry.
* @return the system registry.
* @throws RegistryException throws if an error occurs
*/
public static Registry getGovernanceSystemRegistry(Registry registry) throws RegistryException {
if (tenantGovernanceSystemRegistry.get() != null) {
return tenantGovernanceSystemRegistry.get();
}
if (registryService == null) {
return null;
}
UserRegistry userRegistry;
if (!(registry instanceof UserRegistry)) {
return null;
}
userRegistry = (UserRegistry) registry;
return registryService.getGovernanceSystemRegistry(userRegistry.getTenantId());
}
/**
* Obtains the governance user registry from the given root registry instance. This is useful
* when creating a governance user registry out of a remote client registry instance.
*
* @param registry the remote client registry instance.
* @param username the name of the user to connect as.
* @return the system registry.
* @throws RegistryException throws if an error occurs
*/
@SuppressWarnings("unused")
public static Registry getGovernanceUserRegistry(Registry registry, String username)
throws RegistryException {
if (RegistryContext.getBaseInstance() == null) {
RegistryContext.getBaseInstance(null, false);
}
return new UserRegistry(username, MultitenantConstants.SUPER_TENANT_ID, registry, null,
RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
}
/**
* Obtains the governance user registry from the given root registry instance. This is useful
* when creating a tenant aware governance user registry out of a remote client registry instance.
*
* @param registry registry the remote client registry instance.
* @param username username the name of the user to connect as.
* @param tenantId tenant id
* @return the system registry
* @throws RegistryException throws if an error occurs
*/
public static Registry getGovernanceUserRegistry(Registry registry, String username, int tenantId)
throws RegistryException {
if (RegistryContext.getBaseInstance() == null) {
RegistryContext.getBaseInstance(null, false);
}
return new UserRegistry(username, tenantId, registry, null,
RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
}
public static String parameterizeString(RequestContext requestContext, String parameterString) {
String parameterizedString = parameterString;
Pattern pattern = Pattern.compile("\\{@(\\w)*\\}");
Matcher matcher = pattern.matcher(parameterString);
GovernanceArtifact governanceArtifact;
Registry registry = requestContext.getRegistry();
String resourcePath = requestContext.getResourcePath().getPath();
Set<String> matchSet = new HashSet<String>();
while (matcher.find()) {
matchSet.add(matcher.group());
}
for (String current : matchSet) {
String name = current.substring(2, current.length() - 1);
//To replace special values such as {@resourcePath}
if (name.equals("resourcePath")) {
parameterizedString = parameterizedString.replaceAll("\\" + current.replace("}", "\\}"), resourcePath);
}
try {
governanceArtifact = GovernanceUtils.retrieveGovernanceArtifactByPath(requestContext.getSystemRegistry(), resourcePath);
if (governanceArtifact != null && governanceArtifact.getAttribute(name) != null) {
parameterizedString = parameterizedString.replaceAll("\\" + current.replace("}", "\\}"), governanceArtifact.getAttribute(name));
} else if (registry.get(resourcePath).getProperty(name) != null) {
parameterizedString = parameterizedString.replaceAll("\\" + current.replace("}", "\\}"), registry.get(resourcePath).getProperty(name));
} else {
log.error("Unable to locate the given value in properties or attributes");
}
} catch (RegistryException e) {
log.error(e.getMessage(), e);
}
}
return parameterizedString;
}
/**
* Method to remove a governance artifact from the registry.
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @throws GovernanceException if the operation failed.
*/
public static void removeArtifact(Registry registry, String artifactId)
throws GovernanceException {
try {
String path = getArtifactPath(registry, artifactId);
if (registry.resourceExists(path)) {
registry.delete(path);
}
ArtifactCache artifactCache =
ArtifactCacheManager.getCacheManager().getTenantArtifactCache(((UserRegistry) registry).getTenantId());
if (artifactCache != null && path != null) {
artifactCache.invalidateArtifact(path);
}
} catch (RegistryException e) {
String msg = "Error in deleting the the artifact id:" + artifactId + ".";
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
/**
* Method to obtain the artifact path of a governance artifact on the registry.
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @return the artifact path.
* @throws GovernanceException if the operation failed.
*/
public static String getArtifactPath(Registry registry, String artifactId)
throws GovernanceException {
Cache<String, String> cache;
UserRegistry userRegistry = (UserRegistry) registry;
//This is temp fix to identify remote calls. Will move cache initialization logic into registry core
// with next major carbon(ex:4.5.0) release.
if (userRegistry.getUserRealm() == null) {
return getDirectArtifactPath(registry, artifactId);
}
try{
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(userRegistry.getTenantId());
String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain(true);
if (tenantDomain == null) {
tenantDomain = MultitenantUtils.getTenantDomain(((UserRegistry) registry).getUserName());
}
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain);
cache = RegistryUtils.getUUIDCache(RegistryConstants.UUID_CACHE_ID);
if(cache.containsKey(artifactId)){
return cache.get(artifactId);
}
try {
String sql = "SELECT REG_PATH_ID, REG_NAME FROM REG_RESOURCE WHERE REG_UUID = ?";
String[] result;
Map<String, String> parameter = new HashMap<String, String>();
parameter.put("1", artifactId);
parameter.put("query", sql);
result = registry.executeQuery(null, parameter).getChildren();
if (result != null && result.length == 1) {
cache.put(artifactId, result[0]);
return result[0];
}
return null;
} catch (RegistryException e) {
String msg =
"Error in getting the path from the registry. Execute query failed with message : " +
e.getMessage();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
} finally {
PrivilegedCarbonContext.endTenantFlow();
}
}
/**
* Method to obtain the artifact path of a governance artifact on the registry.
* without going through the UUID cache
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @return the artifact path.
* @throws GovernanceException if the operation failed.
* TODO: This method is added since UUID cache cannot be properly implemented without proper
* TODO: changes in the registry core. getArtifactPath needs to be moved into the registry core
* TODO: and UUID caching should be handled by the cacheBackedRegistry and cachingHandler
*/
public static String getDirectArtifactPath(Registry registry, String artifactId)
throws GovernanceException {
try {
String sql = "SELECT REG_PATH_ID, REG_NAME FROM REG_RESOURCE WHERE REG_UUID = ?";
String[] result;
Map<String, String> parameter = new HashMap<String, String>();
parameter.put("1", artifactId);
parameter.put("query", sql);
result = registry.executeQuery(null, parameter).getChildren();
if (result != null && result.length == 1) {
return result[0];
}
return null;
} catch (RegistryException e) {
String msg = "Error in getting the path from the registry. Execute query failed with message : "
+ e.getMessage();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
/**
* Retrieve all the governance artifact paths which associated with the given lifecycle
*
* @param registry registry instance
* @param lcName lifecycle name
* @param mediaType mediatype of the artifacts
* @return String array of all the artifact paths
* @throws GovernanceException if the operation failed.
*/
public static String[] getAllArtifactPathsByLifecycle(Registry registry, String lcName, String mediaType) throws GovernanceException {
String sql = "SELECT R.REG_PATH_ID, R.REG_NAME FROM REG_RESOURCE R, REG_PROPERTY PP, " +
"REG_RESOURCE_PROPERTY RP WHERE R.REG_VERSION=RP.REG_VERSION AND RP.REG_PROPERTY_ID=PP.REG_ID " +
"AND PP.REG_NAME = ? AND PP.REG_VALUE = ? AND R.REG_MEDIA_TYPE = ?";
Map<String, String> parameter = new HashMap<String, String>();
parameter.put("1", "registry.LC.name");
parameter.put("2", lcName);
parameter.put("3", mediaType);
parameter.put("query", sql);
try {
return (String[]) registry.executeQuery(null, parameter).getContent();
} catch (RegistryException e) {
String msg = "Error occured while executing custom query";
throw new GovernanceException(msg, e);
}
}
/**
* Retrieve all the governance artifact paths which associated with the given lifecycle in the given lifecycle state
*
* @param registry registry instance
* @param lcName lifecycle name
* @param lcState lifecycle state
* @param mediaType mediatype of the artifacts
* @return String array of all the artifact paths
* @throws GovernanceException if the operation failed.
*/
public static String[] getAllArtifactPathsByLifecycleState(
Registry registry, String lcName, String lcState, String mediaType) throws GovernanceException {
String sql = "SELECT R.REG_PATH_ID, R.REG_NAME FROM REG_RESOURCE R, REG_PROPERTY PP, " +
"REG_RESOURCE_PROPERTY RP WHERE R.REG_VERSION=RP.REG_VERSION AND RP.REG_PROPERTY_ID=PP.REG_ID " +
"AND PP.REG_NAME = ? AND PP.REG_VALUE = ? AND R.REG_MEDIA_TYPE = ?";
Map<String, String> parameter = new HashMap<String, String>();
parameter.put("1", "registry.lifecycle." + lcName + ".state");
parameter.put("2", lcState);
parameter.put("3", mediaType);
parameter.put("query", sql);
try {
return (String[]) registry.executeQuery(null, parameter).getContent();
} catch (RegistryException e) {
String msg = "Error occured while executing custom query";
throw new GovernanceException(msg, e);
}
}
/**
* Method to obtain the value of a governance attribute.
*
* @param element the payload element.
* @param name the attribute name.
* @param namespace the namespace of the payload element.
* @return the value of the attribute by the given name if it exists or an empty string.
*/
public static String getAttributeValue(OMElement element, String name, String namespace) {
String[] parts = name.split("_");
OMElement attributeElement = element;
for (String part : parts) {
attributeElement = attributeElement.getFirstChildWithName(new QName(namespace, part));
if (attributeElement == null) {
return "";
}
}
return attributeElement.getText();
}
/**
* @param registry the registry instance.
* @return list of governance artifact identifiers.
* @throws GovernanceException if the operation failed.
* @deprecated Method to obtain all indexed governance artifact identifiers on the provided registry
* instance.
*/
public static String[] getAllArtifactIds(Registry registry)
throws GovernanceException {
throw new UnsupportedOperationException();
}
/**
* @param registry the registry instance.
* @return list of governance artifacts
* @throws GovernanceException if the operation failed.
* @deprecated Method to obtain all indexed governance artifacts on the provided registry instance.
*/
@SuppressWarnings("unused")
public static GovernanceArtifact[] getAllArtifacts(Registry registry)
throws GovernanceException {
throw new UnsupportedOperationException();
}
/**
* Method to obtain a governance artifact on the registry.
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @return the artifact.
* @throws GovernanceException if the operation failed.
*/
public static GovernanceArtifact retrieveGovernanceArtifactById(Registry registry,
String artifactId)
throws GovernanceException {
String artifactPath = getArtifactPath(registry, artifactId);
if (artifactPath == null) {
String msg = "Governance artifact is not found for id: " + artifactId + ".";
if (log.isDebugEnabled()) {
log.debug(msg);
}
return null;
}
return retrieveGovernanceArtifactByPath(registry, artifactPath);
}
/**
* Method to obtain a governance artifact on the registry by the artifact path.
*
* @param registry the registry instance.
* @param artifactPath the path of the artifact.
* @return the artifact.
* @throws GovernanceException if the operation failed.
*/
public static GovernanceArtifact retrieveGovernanceArtifactByPath(Registry registry,
String artifactPath)
throws GovernanceException {
UserRegistry userRegistry = (UserRegistry) registry;
String currentUser = userRegistry.getUserName();
ArtifactCache artifactCache =
ArtifactCacheManager.getCacheManager().getTenantArtifactCache(userRegistry.getTenantId());
if (artifactPath != null && artifactCache != null) {
GovernanceArtifact governanceArtifact = artifactCache.getArtifact(artifactPath);
if (governanceArtifact != null) {
return governanceArtifact;
}
}
String artifactLC;
String artifactLCState = null;
try {
Resource artifactResource;
if (registry.resourceExists(artifactPath)) {
try {
artifactResource = registry.get(artifactPath);
} catch (AuthorizationFailedException e) {
// if the the user does not have access to the specified path, we are returning null.
if (log.isDebugEnabled()) {
String msg = "User does not have access to path " + artifactPath + ".";
log.debug(msg);
}
return null;
}
artifactLC = artifactResource.getProperty("registry.LC.name");
if (artifactLC != null) {
artifactLCState = artifactResource.getProperty("registry.lifecycle." + artifactLC + ".state");
}
} else {
// if the artifact path doesn't exist we are returning null.
if (log.isDebugEnabled()) {
String msg = "The artifact path doesn't exists at " + artifactPath + ".";
log.debug(msg);
}
return null;
}
String artifactId =
artifactResource.getUUID();
String mediaType = artifactResource.getMediaType();
List<String> uniqueAttributes = getUniqueAttributesNames(registry, mediaType);
if (GovernanceConstants.WSDL_MEDIA_TYPE
.equals(mediaType)) {
Wsdl wsdl = new WsdlImpl(artifactId, registry);
((WsdlImpl) wsdl).setLcName(artifactLC);
((WsdlImpl) wsdl).setLcState(artifactLCState);
((WsdlImpl) wsdl).setArtifactPath(artifactPath);
((WsdlImpl) wsdl).setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, wsdl);
}
return wsdl;
} else if (GovernanceConstants.SCHEMA_MEDIA_TYPE
.equals(mediaType)) {
Schema schema = new SchemaImpl(artifactId, registry);
((SchemaImpl) schema).setLcName(artifactLC);
((SchemaImpl) schema).setLcState(artifactLCState);
((SchemaImpl) schema).setArtifactPath(artifactPath);
((SchemaImpl) schema).setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, schema);
}
return schema;
} else if (GovernanceConstants.POLICY_XML_MEDIA_TYPE
.equals(mediaType)) {
Policy policy = new PolicyImpl(artifactId, registry);
((PolicyImpl) policy).setLcName(artifactLC);
((PolicyImpl) policy).setLcState(artifactLCState);
((PolicyImpl) policy).setArtifactPath(artifactPath);
((PolicyImpl) policy).setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, policy);
}
return policy;
} else if (GovernanceConstants.ENDPOINT_MEDIA_TYPE
.equals(mediaType)) {
Endpoint endpoint = new EndpointImpl(artifactId, registry);
((EndpointImpl) endpoint).setLcName(artifactLC);
((EndpointImpl) endpoint).setLcState(artifactLCState);
((EndpointImpl) endpoint).setArtifactPath(artifactPath);
((EndpointImpl) endpoint).setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, endpoint);
}
return endpoint;
} else if (mediaType != null && mediaType.matches("application/[a-zA-Z0-9.+-]+")) {
if (registry instanceof UserRegistry) {
List<GovernanceArtifactConfiguration> configurations =
artifactConfigurations.get(((UserRegistry) registry).getTenantId());
if (configurations != null) {
for (GovernanceArtifactConfiguration configuration :
configurations) {
if (mediaType.equals(configuration.getMediaType())) {
GenericArtifactImpl artifact;
if (mediaType.matches("application/vnd\\.[a-zA-Z0-9.-]+\\+xml")) {
byte[] contentBytes = (byte[]) artifactResource.getContent();
if (contentBytes == null || contentBytes.length == 0) {
throw new GovernanceException(
"Unable to read payload of governance artifact " +
"at path: " + artifactPath
);
}
OMElement contentElement = buildOMElement(contentBytes);
artifact = new GenericArtifactImpl(
artifactId, contentElement,
configuration.getArtifactNameAttribute(),
configuration.getArtifactNamespaceAttribute(),
configuration.getArtifactElementNamespace(),
configuration.getMediaType());
artifact.associateRegistry(registry);
artifact.setArtifactPath(artifactPath);
} else {
artifact = new GenericArtifactImpl(artifactId, registry);
}
artifact.setLcState(artifactLCState);
artifact.setLcName(artifactLC);
artifact.setUniqueAttributes(uniqueAttributes);
if (artifactCache != null) {
artifactCache.addArtifact(artifactPath, artifact);
}
return artifact;
}
}
}
}
}
/*else if (GovernanceConstants.PEOPLE_MEDIA_TYPE.
equals(artifactResource.getMediaType())) {
// it is a peopleArtifact
byte[] contentBytes = (byte[]) artifactResource.getContent();
OMElement contentElement = null;
if (contentBytes != null) {
contentElement = buildOMElement(contentBytes);
}
String peopleGroup = CommonUtil.getPeopleGroup(contentElement);
PeopleArtifact peopleArtifact = null;
switch (PeopleGroup.valueOf(peopleGroup.toUpperCase())) {
case ORGANIZATION:
peopleArtifact = new Organization(artifactId, contentElement);
break;
case DEPARTMENT:
peopleArtifact = new Department(artifactId, contentElement);
break;
case PROJECT_GROUP:
peopleArtifact = new ProjectGroup(artifactId, contentElement);
break;
case PERSON:
peopleArtifact = new Person(artifactId, contentElement);
break;
default:
assert false;
}
peopleArtifact.associateRegistry(registry);
return peopleArtifact;
}*/
} catch (RegistryException e) {
String msg = "Error in retrieving governance artifact by path. path: " + artifactPath + ".";
//log.error(msg, e);
throw new GovernanceException(msg, e);
}
return null;
}
/**
* Extracts all CheckListItemBeans from the resource
* and update the lifecycle name and state of the artifact
*
* @param artifactResource resource related to the artifact
* @param artifact artifact which related to the resource
* @param artifactLC aspect name of which check list item bean is needed
* @return CheckListItemBean array extracted from the resource
* @throws GovernanceException GovernanceException if the operation failed.
*/
public static CheckListItemBean[] getAllCheckListItemBeans(Resource artifactResource, GovernanceArtifact artifact,
String artifactLC) throws GovernanceException {
String defaultLC = artifactResource.getProperty("registry.LC.name");
String artifactLCState = artifactResource.getProperty("registry.lifecycle." + artifactLC + ".state");
if (artifactLC.equals(defaultLC)) {
((GovernanceArtifactImpl) artifact).setLcState(artifactLCState);
}
ArrayList<CheckListItemBean> checkListItemList = new ArrayList<CheckListItemBean>();
Properties lifecycleProps = artifactResource.getProperties();
Set propertyKeys = lifecycleProps.keySet();
for (Object propertyObj : propertyKeys) {
String propertyKey = (String) propertyObj;
String checkListPrefix = "registry.custom_lifecycle.checklist.";
String checkListSuffix = ".item";
if (propertyKey.startsWith(checkListPrefix) && propertyKey.endsWith(checkListSuffix) && propertyKey
.contains(GovernanceConstants.DOT + artifactLC + GovernanceConstants.DOT)) {
List<String> propValues = (List<String>) lifecycleProps.get(propertyKey);
CheckListItemBean checkListItem = new CheckListItemBean();
if (propValues != null && propValues.size() > 2) {
for (String param : propValues) {
if ((param.startsWith("status:"))) {
checkListItem.setStatus(param.substring(7));
} else if ((param.startsWith("name:"))) {
checkListItem.setName(param.substring(5));
} else if ((param.startsWith("value:"))) {
checkListItem.setValue(Boolean.parseBoolean(param.substring(6)));
} else if ((param.startsWith("order:"))) {
checkListItem.setOrder(Integer.parseInt(param.substring(6)));
}
}
}
checkListItemList.add(checkListItem);
}
}
CheckListItemBean[] checkListItemBeans = new CheckListItemBean[checkListItemList.size()];
if (checkListItemBeans.length > 0) {
for (CheckListItemBean checkListItemBean : checkListItemList) {
checkListItemBeans[checkListItemBean.getOrder()] = checkListItemBean;
}
return checkListItemBeans;
}
return null;
}
/**
* Extracts all ApproveItemBeans from the resource
*
* @param currentUser current registry user
* @param artifactResource resource related to the artifact
* @param artifact artifact related to the resource
* @return ApproveItemBean array extracted from the resource
* @throws GovernanceException if the operation failed.
*/
public static ApproveItemBean[] getAllApproveItemBeans(
String currentUser, Resource artifactResource, GovernanceArtifact artifact) throws GovernanceException {
String artifactLC = artifactResource.getProperty("registry.LC.name");
if (artifactLC == null) {
throw new GovernanceException("No lifecycle associated with the artifact path " +
artifactResource.getPath());
}
String artifactLCState = artifactResource.getProperty("registry.lifecycle." + artifactLC + ".state");
((GovernanceArtifactImpl) artifact).setLcState(artifactLCState);
ArrayList<ApproveItemBean> approveItemList = new ArrayList<ApproveItemBean>();
Properties lifecycleProps = artifactResource.getProperties();
Set propertyKeys = lifecycleProps.keySet();
for (Object propertyObj : propertyKeys) {
String propertyKey = (String) propertyObj;
String votingPrefix = "registry.custom_lifecycle.votes.";
String votingSuffix = ".vote";
if (propertyKey.startsWith(votingPrefix) && propertyKey.endsWith(votingSuffix)) {
List<String> propValues = (List<String>) lifecycleProps.get(propertyKey);
ApproveItemBean approveItemBean = new ApproveItemBean();
if (propValues != null && propValues.size() > 2) {
for (String param : propValues) {
if ((param.startsWith("status:"))) {
approveItemBean.setStatus(param.substring(7));
} else if ((param.startsWith("name:"))) {
approveItemBean.setName(param.substring(5));
} else if ((param.startsWith("votes:"))) {
approveItemBean.setRequiredVotes(Integer.parseInt(param.substring(6)));
} else if ((param.startsWith("current:"))) {
approveItemBean.setVotes(Integer.parseInt(param.substring(8)));
} else if ((param.startsWith("order:"))) {
approveItemBean.setOrder(Integer.parseInt(param.substring(6)));
} else if ((param.startsWith("users:"))) {
String users = param.substring(6);
if (!users.equals("")) {
List<String> votedUsers = Arrays.asList(users.split(","));
approveItemBean.setVoters(votedUsers);
approveItemBean.setValue(votedUsers.contains(currentUser));
}
}
}
}
approveItemList.add(approveItemBean);
}
}
ApproveItemBean[] approveItemBeans = new ApproveItemBean[approveItemList.size()];
if (approveItemBeans.length > 0) {
for (ApproveItemBean approveItemBean : approveItemList) {
approveItemBeans[approveItemBean.getOrder()] = approveItemBean;
}
return approveItemBeans;
}
return null;
}
/*public static String retrieveGovernanceArtifactPath(Registry registry,
String artifactId) throws GovernanceException {
try {
Resource govIndexResource = registry.get(GovernanceConstants.GOVERNANCE_ARTIFACT_INDEX_PATH);
return govIndexResource.getProperty(artifactId);
} catch (RegistryException e) {
String msg = "Error in adding an entry for the governance artifact. uuid: " + artifactId + ".";
log.error(msg);
throw new GovernanceException(msg, e);
}
}*/
/**
* Method to register a governance artifact.
*
* @param registry the registry instance.
* @param artifactId the identifier of the artifact.
* @param artifactPath the path of the artifact.
* @throws GovernanceException if the operation failed.
*/
/*
public static void addGovernanceArtifactEntry(Registry registry,
String artifactId,
String artifactPath) throws GovernanceException {
try {
Registry systemGovernanceRegistry = getGovernanceSystemRegistry(registry);
if (systemGovernanceRegistry == null) {
systemGovernanceRegistry = registry;
}
Resource govIndexResource;
if (systemGovernanceRegistry.resourceExists(
GovernanceConstants.GOVERNANCE_ARTIFACT_INDEX_PATH)) {
govIndexResource = systemGovernanceRegistry.get(
GovernanceConstants.GOVERNANCE_ARTIFACT_INDEX_PATH);
} else {
govIndexResource = systemGovernanceRegistry.newResource();
}
govIndexResource.setProperty(artifactId, artifactPath);
govIndexResource.setVersionableChange(false);
systemGovernanceRegistry.put(GovernanceConstants.GOVERNANCE_ARTIFACT_INDEX_PATH,
govIndexResource);
} catch (RegistryException e) {
String msg =
"Error in adding an entry for the governance artifact. path: " + artifactPath +
", uuid: " + artifactId + ".";
log.error(msg);
throw new GovernanceException(msg, e);
}
}
*/
/**
* Method to build an AXIOM element from a byte stream.
*
* @param content the stream of bytes.
* @return the AXIOM element.
* @throws GovernanceException if the operation failed.
*/
public static OMElement buildOMElement(byte[] content) throws RegistryException {
XMLStreamReader parser;
try {
XMLInputFactory factory = XMLInputFactory.newInstance();
factory.setProperty(XMLInputFactory.IS_COALESCING, new Boolean(true));
parser = factory.createXMLStreamReader(new StringReader(
RegistryUtils.decodeBytes(content)));
} catch (XMLStreamException e) {
String msg = "Error in initializing the parser to build the OMElement.";
log.error(msg, e);
throw new GovernanceException(msg, e);
}
//create the builder
StAXOMBuilder builder = new StAXOMBuilder(parser);
//get the root element (in this case the envelope)
return builder.getDocumentElement();
}
/**
* Method to serialize an XML element into a string.
*
* @param element the XML element.
* @return the corresponding String representation
* @throws GovernanceException if the operation failed.
*/
public static String serializeOMElement(OMElement element) throws GovernanceException {
try {
return element.toStringWithConsume();
} catch (XMLStreamException e) {
String msg = "Error in serializing the OMElement.";
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
/**
* Method to convert the expression specified for storing the path with corresponding values
* where the artifact is stored.
*
* @param pathExpression the expression specified for storing the path
* @param artifact the governance artifact
* @param storagePath the storage path of the artifact
* @return the path with corresponding values where the artifact is stored
* @throws GovernanceException if the operation failed.
*/
public static String getPathFromPathExpression(String pathExpression,
GovernanceArtifact artifact,
String storagePath) throws GovernanceException {
return getPathFromPathExpression(
pathExpression.replace("@{storagePath}", storagePath).replace("@{uuid}",
artifact.getId()), artifact
);
}
/**
* Method to convert the expression specified for storing the path with corresponding values
* where the artifact is stored.
*
* @param pathExpression the expression specified for storing the path
* @param artifact the governance artifact
* @return the path with corresponding values where the artifact is stored
* @throws GovernanceException if the operation failed.
*/
public static String getPathFromPathExpression(String pathExpression,
GovernanceArtifact artifact)
throws GovernanceException {
String output = replaceNameAndNamespace(pathExpression, artifact);
String[] elements = output.split("@");
for (int i = 1; i < elements.length; i++) {
if (elements[i].indexOf("}") > 0 && elements[i].indexOf("{") == 0) {
String key = elements[i].split("}")[0].substring(1);
String artifactAttribute = artifact.getAttribute(key);
if (artifactAttribute != null) {
output = output.replace("@{" + key + "}", artifactAttribute);
} else {
String msg = "Value for required attribute " + key + " found empty.";
log.error(msg);
throw new GovernanceException(msg);
}
}
}
return output;
}
/**
* Method to compare the old and new artifact paths
*
* @param pathExpression the expression specified for storing the path
* @param newArtifact updated artifact
* @param oldArtifact existing artifact
* @return whether the paths are same for old artifact and new artifact
* @throws GovernanceException if the operation failed.
*/
@SuppressWarnings("unused")
public static boolean hasSamePath(String pathExpression,
GovernanceArtifact newArtifact, GovernanceArtifact oldArtifact)
throws GovernanceException {
String output = replaceNameAndNamespace(pathExpression, newArtifact);
String[] elements = output.split("@");
for (int i = 1; i < elements.length; i++) {
if (elements[i].indexOf("}") > 0 && elements[i].indexOf("{") == 0) {
String key = elements[i].split("}")[0].substring(1);
String oldArtifactAttribute = oldArtifact.getAttribute(key);
String newArtifactAttribute = newArtifact.getAttribute(key);
if (newArtifactAttribute != null) {
if (newArtifactAttribute.equals(oldArtifactAttribute)) {
continue;
} else {
return false;
}
} else {
String msg = "Value for required attribute " + key + " found empty.";
log.error(msg);
throw new GovernanceException(msg);
}
}
}
return true;
}
/**
* Method to convert the expression specified for storing the path with corresponding values
* where the artifact is stored. This method will return multiple paths.
*
* @param pathExpression the expression specified for storing the path
* @param artifact the governance artifact
* @return the paths with corresponding values where the artifact is stored
* @throws GovernanceException if the operation failed.
*/
public static String[] getPathsFromPathExpression(String pathExpression,
GovernanceArtifact artifact)
throws GovernanceException {
String expression = replaceNameAndNamespace(pathExpression, artifact);
String[] elements = expression.split("@");
for (int i = 1; i < elements.length; i++) {
if (!(elements[i].indexOf(":") > 0) &&
elements[i].indexOf("}") > 0 && elements[i].indexOf("{") == 0) {
String key = elements[i].split("}")[0].substring(1);
String artifactAttribute = artifact.getAttribute(key);
if (artifactAttribute != null) {
expression = expression.replace("@{" + key + "}", artifactAttribute);
}
}
}
List<String> output = fixExpressionForMultiplePaths(artifact, expression);
return output.toArray(new String[output.size()]);
}
private static List<String> fixExpressionForMultiplePaths(GovernanceArtifact artifact,
String expression)
throws GovernanceException {
if (!expression.contains("@")) {
return Collections.singletonList(expression);
}
List<String> output = new LinkedList<String>();
String[] elements = expression.split("@");
for (int i = 1; i < elements.length; i++) {
if (elements[i].indexOf("}") > 0 && elements[i].indexOf("{") == 0) {
String key = elements[i].split("}")[0].substring(1).split(":")[0];
String[] artifactAttributes = artifact.getAttributes(key);
if (artifactAttributes != null) {
for (String artifactAttribute : artifactAttributes) {
String[] parts = artifactAttribute.split(":");
if (parts.length > 1) {
output.addAll(fixExpressionForMultiplePaths(artifact,
expression.replace("@{" + key + ":key}", parts[0])
.replace("@{" + key + ":value}", parts[1])));
}
}
}
break;
}
}
return output;
}
private static String replaceNameAndNamespace(String pathExpression,
GovernanceArtifact artifact) {
String output = pathExpression;
QName qName = artifact.getQName();
if (qName != null) {
output = output.replace("@{name}", qName.getLocalPart());
String replacement =
CommonUtil.derivePathFragmentFromNamespace(qName.getNamespaceURI());
if (replacement.startsWith("/")) {
replacement = replacement.substring(1);
}
if (replacement.endsWith("/")) {
replacement = replacement.substring(0, replacement.length() - 1);
}
output = output.replace("@{namespace}", replacement);
}
return output;
}
/**
* Method to obtain all available aspects for the given tenant.
*
* @return list of available aspects.
* @throws RegistryException if the operation failed.
*/
public static String[] getAvailableAspects() throws RegistryException {
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
Registry systemRegistry = registryService.getConfigSystemRegistry(tenantId);
String[] aspectsToAdd = systemRegistry.getAvailableAspects();
if (aspectsToAdd == null) {
return new String[0];
}
List<String> lifecycleAspectsToAdd = new LinkedList<String>();
boolean isTransactionStarted = false;
String tempResourcePath = "/governance/lcm/" + UUIDGenerator.generateUUID();
for (String aspectToAdd : aspectsToAdd) {
if (systemRegistry.getRegistryContext().isReadOnly()) {
lifecycleAspectsToAdd.add(aspectToAdd);
continue;
}
Map<String, Boolean> aspectsMap;
if (!lifecycleAspects.containsKey(tenantId)) {
synchronized (ASPECT_MAP_LOCK) {
if (!lifecycleAspects.containsKey(tenantId)) {
aspectsMap = new HashMap<String, Boolean>();
lifecycleAspects.put(tenantId, aspectsMap);
} else {
aspectsMap = lifecycleAspects.get(tenantId);
}
}
} else {
aspectsMap = lifecycleAspects.get(tenantId);
}
Boolean isLifecycleAspect = aspectsMap.get(aspectToAdd);
if (isLifecycleAspect == null) {
if (!isTransactionStarted) {
systemRegistry.beginTransaction();
isTransactionStarted = true;
}
systemRegistry.put(tempResourcePath, systemRegistry.newResource());
systemRegistry.associateAspect(tempResourcePath, aspectToAdd);
Resource r = systemRegistry.get(tempResourcePath);
Properties props = r.getProperties();
Set keys = props.keySet();
for (Object key : keys) {
String propKey = (String) key;
if (propKey.startsWith("registry.lifecycle.")
|| propKey.startsWith("registry.custom_lifecycle.checklist.")) {
isLifecycleAspect = Boolean.TRUE;
break;
}
}
if (isLifecycleAspect == null) {
isLifecycleAspect = Boolean.FALSE;
}
aspectsMap.put(aspectToAdd, isLifecycleAspect);
}
if (isLifecycleAspect) {
lifecycleAspectsToAdd.add(aspectToAdd);
}
}
if (isTransactionStarted) {
systemRegistry.delete(tempResourcePath);
systemRegistry.rollbackTransaction();
}
return lifecycleAspectsToAdd.toArray(new String[lifecycleAspectsToAdd.size()]);
}
/**
* Method to obtain a path from a qualified name.
*
* @param qName the qualified name.
* @return the corresponding path.
*/
@SuppressWarnings("unused")
public static String derivePathFromQName(QName qName) {
String serviceName = qName.getLocalPart();
String serviceNamespace = qName.getNamespaceURI();
return (serviceNamespace == null ?
"" : CommonUtil.derivePathFragmentFromNamespace(serviceNamespace)) + serviceName;
}
/**
* Obtain a name that can represent a URL.
*
* @param url the URL.
* @return the name.
*/
public static String getNameFromUrl(String url) {
int slashIndex = url.lastIndexOf('/');
if (slashIndex == -1) {
return url;
}
if (slashIndex == url.length() - 1) {
return url.substring(0, url.length() - 1);
}
return url.substring(slashIndex + 1);
}
@SuppressWarnings("unchecked")
public static List<OMElement> evaluateXPathToElements(String expression,
OMElement root) throws JaxenException {
String[] wsdlPrefixes = {
"wsdl", "http://schemas.xmlsoap.org/wsdl/",
"wsdl2", "http://www.w3.org/ns/wsdl",
"xsd", "http://www.w3.org/2001/XMLSchema",
"soap", "http://schemas.xmlsoap.org/wsdl/soap/",
"soap12", "http://schemas.xmlsoap.org/wsdl/soap12/",
"http", "http://schemas.xmlsoap.org/wsdl/http/",
};
AXIOMXPath xpathExpression = new AXIOMXPath(expression);
for (int j = 0; j < wsdlPrefixes.length; j++) {
xpathExpression.addNamespace(wsdlPrefixes[j++], wsdlPrefixes[j]);
}
return (List<OMElement>) xpathExpression.selectNodes(root);
}
/**
* Method to associate an aspect with a given resource on the registry.
*
* @param path the path of the resource.
* @param aspect the aspect to add.
* @param registry the registry instance on which the resource is available.
* @throws RegistryException if the operation failed.
*/
public static void associateAspect(String path, String aspect, Registry registry)
throws RegistryException {
try {
registry.associateAspect(path, aspect);
Resource resource = registry.get(path);
if(resource.getAspects().size() == 1) {
// Since this is the first life-cycle we make it default
resource.setProperty("registry.LC.name", aspect);
registry.put(path, resource);
}
} catch (RegistryException e) {
String msg = "Failed to associate aspect with the resource " +
path + ". " + e.getMessage();
log.error(msg, e);
throw new RegistryException(msg, e);
}
}
/**
* Method to remove an aspect from a given resource on the registry.
*
* @param path the path of the resource.
* @param aspect the aspect to be removed.
* @param registry the registry instance on which the resource is available.
* @throws RegistryException if the operation failed.
*/
public static void removeAspect(String path, String aspect, Registry registry)
throws RegistryException {
try {
/* set all the variables to the resource */
Resource resource = registry.get(path);
Properties props = resource.getProperties();
//List<Property> propList = new ArrayList<Property>();
Iterator iKeys = props.keySet().iterator();
ArrayList<String> propertiesToRemove = new ArrayList<String>();
while (iKeys.hasNext()) {
String propKey = (String) iKeys.next();
if ((propKey.startsWith("registry.custom_lifecycle.votes.")
|| propKey.startsWith("registry.custom_lifecycle.user.")
|| propKey.startsWith("registry.custom_lifecycle.checklist.")
|| propKey.startsWith("registry.LC.name")
|| propKey.startsWith("registry.lifecycle.")
|| propKey.startsWith("registry.Aspects")) && propKey.contains(aspect)) {
propertiesToRemove.add(propKey);
}
}
for (String propertyName : propertiesToRemove) {
resource.removeProperty(propertyName);
}
// This is needed as we are not removing all the aspects, which was removed when the current method is called
resource.removeAspect(aspect);
if(resource.getProperty("registry.LC.name").equals(aspect)) {
resource.removeProperty("registry.LC.name");
if(resource.getAspects().size() > 0) {
resource.setProperty("registry.LC.name", resource.getAspects().get(0));
}
}
registry.put(path, resource);
} catch (RegistryException e) {
String msg = "Failed to remove aspect " + aspect +
" on resource " + path + ". " + e.getMessage();
log.error(msg, e);
throw new RegistryException(msg, e);
}
}
public static AttributeSearchService getAttributeSearchService() {
return attributeSearchService;
}
public static void setAttributeSearchService(AttributeSearchService attributeSearchService) {
GovernanceUtils.attributeSearchService = attributeSearchService;
}
public static TermsSearchService getTermsSearchService() {
return termsSearchService;
}
public static void setTermsSearchService(TermsSearchService termsSearchService) {
GovernanceUtils.termsSearchService = termsSearchService;
}
/**
* Method to make an aspect to default.
* @param path path of the resource
* @param aspect the aspect to be removed.
* @param registry registry instance to be used
*/
public static void setDefaultLifeCycle(String path, String aspect, Registry registry) throws RegistryException {
Resource resource = registry.get(path);
if(resource != null) {
resource.setProperty("registry.LC.name", aspect);
registry.put(path, resource);
}
}
/**
* Returns a list of governance artifacts found by searching indexes. This method requires an instance of an
* attribute search service.
*
* @param criteria the search criteria
* @param registry the governance registry instance
* @return search result
* @throws GovernanceException if the operation failed
*/
public static List<GovernanceArtifact> findGovernanceArtifacts(Map<String, List<String>> criteria,
Registry registry, String mediaType)
throws GovernanceException {
if (getAttributeSearchService() == null) {
throw new GovernanceException("Attribute Search Service not Found");
}
List<GovernanceArtifact> artifacts = new ArrayList<GovernanceArtifact>();
Map<String, String> fields = new HashMap<String, String>();
if (mediaType != null) {
fields.put("mediaType", mediaType);
}
for (Map.Entry<String, List<String>> e : criteria.entrySet()) {
StringBuilder builder = new StringBuilder();
for (String referenceValue : e.getValue()) {
if (referenceValue != null && !"".equals(referenceValue)) {
String referenceValueModified = referenceValue;
if(referenceValueModified.contains(" ")) {
referenceValueModified = referenceValueModified.replace(" ", "\\ ");
}
builder.append(referenceValueModified.toLowerCase()).append(",");
}
}
if (builder.length() > 0) {
fields.put(e.getKey(), builder.substring(0, builder.length() - 1));
}
}
try {
ResourceData[] results = getAttributeSearchService().search(fields);
int errorCount = 0; // We use this to check how many errors occurred.
for (ResourceData result : results) {
GovernanceArtifact governanceArtifact = null;
String path = result.getResourcePath().substring(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH.length());
try {
governanceArtifact = retrieveGovernanceArtifactByPath(registry, path);
} catch (GovernanceException e) {
// We do not through any exception here. Only logging is done.
// We increase the error count for each error. If all the paths failed, then we throw an error
errorCount++;
log.error("Error occurred while retrieving governance artifact by path : " + path, e);
}
if (governanceArtifact != null) {
artifacts.add(governanceArtifact);
}
} if (errorCount != 0 && errorCount == results.length) {
// This means that all the paths have failed. So we throw an error.
throw new GovernanceException("Error occurred while retrieving all the governance artifacts");
}
} catch (RegistryException e) {
throw new GovernanceException("Unable to search by attribute", e);
}
return artifacts;
}
/**
* @param criteria query string that should be searched for
* @param registry the governance registry instance
* @param mediaType media type to be matched for search. Media type can be specified in the query string too
* @return The list of artifacts. null if the media type and string is empty.
* @throws GovernanceException thrown when an error occurs
*/
public static List<GovernanceArtifact> findGovernanceArtifacts(String criteria,
Registry registry, String mediaType)
throws GovernanceException {
Map<String, String> fields = new HashMap<String, String>();
Map<String, String> possibleProperties = new HashMap<String, String>();
GovernanceArtifactConfiguration artifactConfiguration;
if (mediaType != null && !"".equals(mediaType)) {
fields.put("mediaType", mediaType);
} else if("".equals(criteria)) {
return null;
}
try {
artifactConfiguration = findGovernanceArtifactConfigurationByMediaType(mediaType, registry);
} catch (RegistryException e) {
throw new GovernanceException(e);
}
List<String> possibleKeys = Arrays.asList("createdAfter", "createdBefore", "updatedAfter", "updatedBefore", "author", "author!", "associationType", "associationDest",
"updater", "updater!", "tags", "taxonomy", "content", "mediaType", "mediaType!", "lcName", "lcState");
List<String> finalTempList = new ArrayList<>();
if (StringUtils.isNotEmpty(criteria)) {
String[] tempList = criteria.split("&");
for (int i = 0; i < tempList.length; i++) {
try {
finalTempList.add(URLDecoder.decode(tempList[i], "utf-8"));
} catch (UnsupportedEncodingException e) {
throw new GovernanceException("Error occurred while decoding the query params");
}
}
}
for(String temp : finalTempList) {
String[] subParts = temp.split("=");
if(subParts.length != 2) {
String value = subParts[0].toLowerCase();
if(value.contains(" ")) {
value = value.replace(" ", "\\ ");
}
fields.put("overview_name", value);
} else {
if(possibleKeys.contains(subParts[0])) {
switch(subParts[0]) {
case "author!":
fields.put(subParts[0].substring(0, subParts[0].length() - 1), subParts[1].toLowerCase());
fields.put("authorNameNegate", "on");
break;
case "updater!":
fields.put(subParts[0].substring(0, subParts[0].length() - 1), subParts[1].toLowerCase());
fields.put("updaterNameNegate", "on");
break;
case "mediaType!":
fields.put(subParts[0].substring(0, subParts[0].length() - 1), subParts[1].toLowerCase());
fields.put("mediaTypeNegate", "on");
break;
case "tags":
case "associationType":
case "taxonomy":
case "associationDest":
fields.put(subParts[0], subParts[1]);
break;
default:
fields.put(subParts[0], subParts[1].toLowerCase());
break;
}
} else if(subParts[0].equals("comments")){
fields.put("commentWords", subParts[1].toLowerCase());
} else {
if(subParts[0].contains(":")) {
String value = subParts[1].toLowerCase();
if(value.contains(" or ")){
String[] values = value.split(" or ");
for(int i=0; i<values.length; i++){
values[i] = values[i].trim().replace(" ", "\\ ");
}
value = StringUtils.join(values, " OR ");
} else if(value.contains(" ")) {
value = value.replace(" ", "\\ ");
}
String[] tableParts = subParts[0].split(":");
if ("overview".equals(tableParts[0])) {
possibleProperties.put(tableParts[1], value);
}
fields.put(subParts[0].replace(":", "_"), value);
} else {
String value = subParts[1].toLowerCase();
if(value.contains(" or ")){
String[] values = value.split(" or ");
for(int i=0; i<values.length; i++){
values[i] = values[i].trim().replace(" ", "\\ ");
}
value = StringUtils.join(values, " OR ");
} else if(value.contains(" ")) {
value = value.replace(" ", "\\ ");
}
if(!subParts[0].equals("name")) {
possibleProperties.put(subParts[0], value);
fields.put(OVERVIEW + UNDERSCORE + subParts[0], value.toLowerCase());
} else {
if (artifactConfiguration != null) {
fields.put(artifactConfiguration.getArtifactNameAttribute(), value.toLowerCase());
} else {
fields.put(OVERVIEW + UNDERSCORE + subParts[0], value.toLowerCase());
}
}
}
}
}
}
List<GovernanceArtifact> attributeSearchResults = performAttributeSearch(fields, registry);
// Following check is done since Attribute Search service only has a way to search one property at a time
if(possibleProperties.size() == 1) {
int paginationSizeAtts = PaginationContext.getInstance().getLength();
for(Map.Entry<String, String> entry : possibleProperties.entrySet()) {
String propertyName = entry.getKey();
fields.remove("overview_" + propertyName);
fields.put("propertyName", propertyName);
fields.put("rightPropertyValue", entry.getValue());
fields.put("rightOp", "eq");
}
List<GovernanceArtifact> propertySearchResults = performAttributeSearch(fields, registry);
Set<GovernanceArtifact> set = new TreeSet<>(new Comparator<GovernanceArtifact>() {
public int compare(GovernanceArtifact artifact1, GovernanceArtifact artifact2)
{
return artifact1.getId().compareTo(artifact2.getId()) ;
}
});
set.addAll(attributeSearchResults);
set.addAll(propertySearchResults);
List<GovernanceArtifact> mergeListWithoutDuplicates = new ArrayList<>();
mergeListWithoutDuplicates.addAll(set);
int paginationSizePros = PaginationContext.getInstance().getLength();
PaginationContext.getInstance().setLength(paginationSizeAtts +paginationSizePros);
return mergeListWithoutDuplicates;
}
return attributeSearchResults;
}
private static List<GovernanceArtifact> performAttributeSearch(Map<String, String> fields, Registry registry) throws GovernanceException {
if (getAttributeSearchService() == null) {
throw new GovernanceException("Attribute Search Service not Found");
}
List<GovernanceArtifact> artifacts = new ArrayList<GovernanceArtifact>();
try {
ResourceData[] results = getAttributeSearchService().search(fields);
int errorCount = 0; // We use this to check how many errors occurred.
for (ResourceData result : results) {
GovernanceArtifact governanceArtifact = null;
String path = result.getResourcePath().substring(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH.length());
try {
governanceArtifact = retrieveGovernanceArtifactByPath(registry, path);
} catch (GovernanceException e) {
// We do not through any exception here. Only logging is done.
// We increase the error count for each error. If all the paths failed, then we throw an error
errorCount++;
log.error("Error occurred while retrieving governance artifact by path : " + path, e);
}
if (governanceArtifact != null) {
artifacts.add(governanceArtifact);
}
} if (errorCount != 0 && errorCount == results.length) {
// This means that all the paths have failed. So we throw an error.
throw new GovernanceException("Error occurred while retrieving all the governance artifacts");
}
} catch (RegistryException e) {
throw new GovernanceException("Unable to search by attribute", e);
}
return artifacts;
}
/**
* Find all possible terms and its count for the given facet field and query criteria
* @param criteria the filter criteria to be matched
* @param facetField field used for faceting : required
* @param mediaType artifact type need to filter : optional
* @param authRequired authorization required flag
* @return term results
* @throws GovernanceException
*/
public static List<TermData> getTermDataList(Map<String, List<String>> criteria, String facetField, String mediaType, boolean authRequired) throws GovernanceException {
if (getTermsSearchService() == null) {
throw new GovernanceException("Term Search Service not Found");
}
Map<String, String> fields = new HashMap<>();
if (mediaType != null) {
fields.put(IndexingConstants.FIELD_MEDIA_TYPE, mediaType);
}
for (Map.Entry<String, List<String>> e : criteria.entrySet()) {
StringBuilder builder = new StringBuilder();
for (String referenceValue : e.getValue()) {
if (referenceValue != null && !"".equals(referenceValue)) {
String referenceValueModified = referenceValue.replace(" ", "\\ ");
builder.append(referenceValueModified.toLowerCase()).append(',');
}
}
if (builder.length() > 0) {
fields.put(e.getKey(), builder.substring(0, builder.length() - 1));
}
}
//set whether authorization is required for the facet search.
fields.put(IndexingConstants.AUTH_REQUIRED, String.valueOf(authRequired));
//setting the facet Field which needs grouping. Facet Field is required for searching.
if (facetField != null) {
fields.put(IndexingConstants.FACET_FIELD_NAME, facetField);
} else {
throw new GovernanceException("Facet field is required. field cannot be null");
}
try {
TermData[] termData = getTermsSearchService().search(fields);
return Arrays.asList(termData);
} catch (RegistryException e) {
throw new GovernanceException("Unable to get terms for the given field", e);
}
}
/**
* Method used to retrieve cache object for RXT Configs.
*
* @param name the name of the cache
* @return the cache object for the given cache manger and cache name
*/
public static Cache<String, Boolean> getRXTConfigCache(String name) {
CacheManager manager = getCacheManager();
Cache<String, Boolean> cache = (manager != null) ? manager.<String, Boolean>getCache(name) :
Caching.getCacheManager().<String, Boolean>getCache(name);
if (rxtCacheInitiated) {
cache.registerCacheEntryListener(entryCreatedListener);
cache.registerCacheEntryListener(entryUpdatedListener);
cache.registerCacheEntryListener(entryRemovedListener);
rxtCacheInitiated = true;
}
return cache;
}
/**
* Get the Cache Manager for Registry
*
* @return CacheManager is returned
*/
private static CacheManager getCacheManager() {
return Caching.getCacheManagerFactory().getCacheManager(
RegistryConstants.REGISTRY_CACHE_MANAGER);
}
/*
* This method is used to retrieve departments attached to a given artifact. Applicable to
* ProjectGroup and Person artifacts
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - ProjectGroup or Person artifact to which Departments are attached
* @return Department artifacts attached to <code>artifact</code>
* @throws GovernanceException If operation fails
*/
/* public static Department[] getAffiliatedDepartments(Registry registry, PeopleArtifact artifact)
throws GovernanceException {
List<Department> list = new ArrayList<Department>();
PeopleManager manager = new PeopleManager(registry);
String[] affiliations = artifact.getAttributes(GovernanceConstants.AFFILIATIONS_ATTRIBUTE);
if (affiliations != null) {
for (String deptText : affiliations) {
String deptName = deptText.split(GovernanceConstants.ENTRY_VALUE_SEPARATOR)[1];
*//* We are assuming data consistency at this point and hence, not checking the 0th
element of the above returned array *//*
PeopleArtifact pa = manager.getPeopleArtifactByName(deptName);
if (pa instanceof Department) {
list.add((Department) pa);
}
}
}
return list.toArray(new Department[list.size()]);
}*/
/*
* This method is used to retrieve organizations attached to a given artifact. Applicable to
* ProjectGroup and Person artifacts
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - ProjectGroup or Person artifact to which Organizations are attached
* @return Organization artifacts attached to <code>artifact</code>
* @throws GovernanceException If operation fails
*/
/*public static Organization[] getAffiliatedOrganizations(Registry registry,
PeopleArtifact artifact)
throws GovernanceException {
List<Organization> list = new ArrayList<Organization>();
PeopleManager manager = new PeopleManager(registry);
String[] affiliations = artifact.getAttributes(GovernanceConstants.AFFILIATIONS_ATTRIBUTE);
if (affiliations != null) {
for (String orgText : affiliations) {
String orgName = orgText.split(GovernanceConstants.ENTRY_VALUE_SEPARATOR)[1];
*//* We are assuming data consistency at this point and hence, not checking the 0th
element of the above returned array *//*
PeopleArtifact pa = manager.getPeopleArtifactByName(orgName);
if (pa instanceof Organization) {
list.add((Organization) pa);
}
}
}
return list.toArray(new Organization[list.size()]);
}*/
/*
* This method is used to retrieve project groups attached to a given artifact. Applicable to
* Person artifacts
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - Person artifact to which project groups are attached
* @return ProjectGroup artifacts attached to <code>artifact</code>
* @throws GovernanceException If operation fails
*/
/*public static ProjectGroup[] getAffiliatedProjectGroups(Registry registry,
PeopleArtifact artifact)
throws GovernanceException {
List<ProjectGroup> list = new ArrayList<ProjectGroup>();
PeopleManager manager = new PeopleManager(registry);
String[] affiliations = artifact.getAttributes(GovernanceConstants.AFFILIATIONS_ATTRIBUTE);
if (affiliations != null) {
for (String pgText : affiliations) {
String pgName = pgText.split(GovernanceConstants.ENTRY_VALUE_SEPARATOR)[1];
*//* We are assuming data consistency at this point and hence, not checking the 0th
element of the above returned array *//*
PeopleArtifact pa = manager.getPeopleArtifactByName(pgName);
if (pa instanceof ProjectGroup) {
list.add((ProjectGroup) pa);
}
}
}
return list.toArray(new ProjectGroup[list.size()]);
}*/
/*
* This method is used to retrieve project groups that have the given artifact (Organization or
* Department) as an affiliation
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - Organization/Department artifact
* @return ProjectGroups that have <code>artifact</code> as an affiliation
* @throws GovernanceException
*/
/*
public static ProjectGroup[] getAttachedProjectGroups(Registry registry, PeopleArtifact artifact)
throws GovernanceException {
ProjectGroup[] pgs = new PeopleManager(registry).getAllProjectGroups();
List<ProjectGroup> list = new ArrayList<ProjectGroup>();
for (ProjectGroup pg : pgs) {
for (Department department : pg.getDepartments()) {
if (artifact.getName().equals(department.getName())) {
list.add(pg);
}
}
}
return list.toArray(new ProjectGroup[list.size()]);
}
*/
/*
* This method is used to retrieve persons that have the given artifact (Organization or
* Department) as an affiliation
*
* @param registry - Registry associated with <code>artifact</code>
* @param artifact - Organization/Department artifact
* @return Person artifacts that have <code>artifact</code> as an affiliation
* @throws GovernanceException
*/
/*
public static Person[] getAttachedPersons(Registry registry, PeopleArtifact artifact)
throws GovernanceException {
Person[] persons = new PeopleManager(registry).getAllPersons();
List<Person> list = new ArrayList<Person>();
for (Person person : persons) {
for (Department department : person.getDepartments()) {
if (artifact.getName().equals(department.getName())) {
list.add(person);
}
}
}
return list.toArray(new Person[list.size()]);
}
*/
/*
* This method writes sub-group associations contained within the given ProjectGroup to the
* registry. Existence of all the sub groups must be validated before calling this method.
*
* @param registry
* @param projectGroup
* @throws GovernanceException
*/
/*
public static void writeSubGroupAssociations(Registry registry, ProjectGroup projectGroup)
throws GovernanceException {
try {
if (!registry.resourceExists(projectGroup.getPath())) {
return;
}
ProjectGroup[] subGroups = projectGroup.getSubGroups();
// Get the existing association list which is related to the current operation
Set<String> existingSet = new HashSet<String>();
for (Association asso : registry.getAllAssociations(projectGroup.getPath())) {
if ((GovernanceConstants.SUB_GROUP.equals(asso.getAssociationType()) &&
asso.getSourcePath().equals(projectGroup.getPath()))
||
(GovernanceConstants.IS_PART_OF.equals(asso.getAssociationType()) &&
asso.getDestinationPath().equals(projectGroup.getPath()))) {
existingSet.add(asso.getSourcePath() + SEPARATOR + asso.getDestinationPath() +
SEPARATOR + asso.getAssociationType());
}
}
// Get the updated association list from the projectGroup object
Set<String> updatedSet = new HashSet<String>();
for (ProjectGroup subGroup : subGroups) {
updatedSet.add(projectGroup.getPath() + SEPARATOR + subGroup.getPath() +
SEPARATOR + GovernanceConstants.SUB_GROUP);
updatedSet.add(subGroup.getPath() + SEPARATOR + projectGroup.getPath() + SEPARATOR +
GovernanceConstants.IS_PART_OF);
}
updateAssociations(registry, existingSet, updatedSet);
} catch (RegistryException e) {
String msg = "Error in writing sub group associations, parent project-group id: " +
projectGroup.getId() + ", path: " + projectGroup.getPath();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
*/
/*
* This method writes owner associations contained within the service object to the registry.
* Existence of all the owners as people artifacts must be validated before calling this method.
*
* @param registry
* @param service
* @throws GovernanceException
*/
/*
public static void writeOwnerAssociations(Registry registry, Service service)
throws GovernanceException {
try {
if (!registry.resourceExists(service.getPath())) {
return;
}
PeopleArtifact[] owners = service.getOwners();
// Remove associations that are not there anymore and add any new associations
Association[] oldAssociations = registry.getAllAssociations(service.getPath());
Set<String> oldSet = new HashSet<String>();
for (Association association : oldAssociations) {
if (GovernanceConstants.OWNED_BY.equals(association.getAssociationType()) ||
GovernanceConstants.OWNS.equals(association.getAssociationType())) {
oldSet.add(association.getSourcePath() + SEPARATOR +
association.getDestinationPath() + SEPARATOR +
association.getAssociationType());
}
}
Set<String> updatedSet = new HashSet<String>();
for (PeopleArtifact owner : owners) {
updatedSet.add(service.getPath() + SEPARATOR + owner.getPath() + SEPARATOR +
GovernanceConstants.OWNED_BY);
updatedSet.add(owner.getPath() + SEPARATOR + service.getPath() + SEPARATOR +
GovernanceConstants.OWNS);
}
updateAssociations(registry, oldSet, updatedSet);
} catch (RegistryException e) {
String msg = "Error in associating owners to service. Id: " + service.getId() +
", path: " + service.getPath();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
*/
/*
* This method writes consumer associations contained within the service object to the registry.
* Existence of all the consumers as people artifacts must be validated before calling this
* method.
*
* @param registry
* @param service
* @throws GovernanceException
*/
/*
public static void writeConsumerAssociations(Registry registry, Service service)
throws GovernanceException {
try {
if (!registry.resourceExists(service.getPath())) {
return;
}
PeopleArtifact[] consumers = service.getConsumers();
// Remove associations that are not there anymore and add any new associations
Association[] oldAssociations = registry.getAllAssociations(service.getPath());
Set<String> oldSet = new HashSet<String>();
for (Association association : oldAssociations) {
if (GovernanceConstants.CONSUMED_BY.equals(association.getAssociationType()) ||
GovernanceConstants.CONSUMES.equals(association.getAssociationType())) {
oldSet.add(association.getSourcePath() + SEPARATOR +
association.getDestinationPath() + SEPARATOR +
association.getAssociationType());
}
}
Set<String> updatedSet = new HashSet<String>();
for (PeopleArtifact consumer : consumers) {
updatedSet.add(service.getPath() + SEPARATOR + consumer.getPath() + SEPARATOR +
GovernanceConstants.CONSUMED_BY);
updatedSet.add(consumer.getPath() + SEPARATOR + service.getPath() + SEPARATOR +
GovernanceConstants.CONSUMES);
}
updateAssociations(registry, oldSet, updatedSet);
} catch (RegistryException e) {
String msg = "Error in associating owners to service. Id: " + service.getId() +
", path: " + service.getPath();
log.error(msg, e);
throw new GovernanceException(msg, e);
}
}
*/
/*
* This method extracts people names from the given attribute of the given artifact and returns
* an array containing PeopleArtifacts represented by those names.
* Existence of people artifacts listed under the atrribute name must be validated before
* calling this method.
*
* @param registry Associated registry
* @param artifact GovernanceArtifact which stores people list as an attribute
* @param attributeName Name of the attribute which stores people names
* @throws GovernanceException
*/
/*
public static PeopleArtifact[] extractPeopleFromAttribute(Registry registry,
GovernanceArtifact artifact,
String attributeName)
throws GovernanceException {
String[] peopleTexts = artifact.getAttributes(attributeName);
PeopleManager manager = new PeopleManager(registry);
List<PeopleArtifact> list = new ArrayList<PeopleArtifact>();
if (peopleTexts != null) {
for (String peopleText : peopleTexts) {
String name = peopleText.split(GovernanceConstants.ENTRY_VALUE_SEPARATOR)[1];
PeopleArtifact pa = manager.getPeopleArtifactByName(name);
if (pa == null) {
String msg = "Invalid people artifact name is found within the governance " +
"artifact. Path: " + artifact.getPath() + ", Invalid people artifact " +
"name:" + name;
log.error(msg);
throw new GovernanceException(msg);
} else {
list.add(pa);
}
}
}
return list.toArray(new PeopleArtifact[list.size()]);
}
*/
/*
private static void updateAssociations(Registry registry, Set<String> existingAssociationSet,
Set<String> updatedAssociationSet)
throws RegistryException {
Set<String> removedAssociations = new HashSet<String>(existingAssociationSet);
removedAssociations.removeAll(updatedAssociationSet);
Set<String> newAssociations = new HashSet<String>(updatedAssociationSet);
newAssociations.removeAll(existingAssociationSet);
for (String removedAssociation : removedAssociations) {
String[] params = removedAssociation.split(SEPARATOR);
try {
for (int i = 0; i < 2; i++) {
if (GovernanceUtils.retrieveGovernanceArtifactByPath(registry, params[i])
instanceof PeopleArtifact) {
registry.removeAssociation(params[0], params[1], params[2]);
break;
}
}
} catch (GovernanceException ignored) {
}
}
for (String newAssociation : newAssociations) {
String[] params = newAssociation.split(SEPARATOR);
registry.addAssociation(params[0], params[1], params[2]);
}
}
*/
public static void removeArtifactFromPath(Registry registry, String path)
throws GovernanceException {
try {
if (registry.resourceExists(path)) {
registry.delete(path);
}
ArtifactCache artifactCache =
ArtifactCacheManager.getCacheManager().getTenantArtifactCache(((UserRegistry) registry).getTenantId());
if (artifactCache != null && path != null && artifactCache.getArtifact(path) != null) {
artifactCache.invalidateArtifact(path);
}
} catch (RegistryException e) {
String msg = "Error in deleting the the artifact path:" + path + ".";
throw new GovernanceException(msg, e);
}
}
public static List<String> getUniqueAttributesNames(Registry registry,
String mediaType) throws GovernanceException {
try {
GovernanceArtifactConfiguration configuration = findGovernanceArtifactConfigurationByMediaType(mediaType, registry);
if (configuration != null) {
return configuration.getUniqueAttributes();
}
} catch (RegistryException e) {
throw new GovernanceException(e);
}
return null;
}
/**
* Validates a given artifact to ensure all the mandatory fields are filled
* If a mandatory field is left empty this check method will throw an exception
* indicating field name to be filled.
*
* @param registry the instance of the registry.
* @param elementString the short name of the artifact type.
* @param artifact artifact to be checked for mandatory fields.
* @throws GovernanceException
*/
public static void CheckMandatoryFields(Registry registry, String elementString, GovernanceArtifact artifact)
throws GovernanceException {
if (artifact instanceof WsdlImpl || artifact instanceof SchemaImpl || artifact instanceof PolicyImpl) {
return;
}
GovernanceArtifactConfiguration configuration = null;
try {
configuration = GovernanceUtils.findGovernanceArtifactConfiguration(elementString, registry);
} catch (RegistryException e) {
throw new GovernanceException("Retrieving RXT configuration for type :" + elementString + "failed.", e);
}
if (configuration == null) {
throw new GovernanceException("Could not find RXT configuration for type :" + elementString);
}
List<Map> mandatoryAttributes = configuration.getMandatoryAttributes();
if (mandatoryAttributes == null) {
return;
}
Map<String, Object> map;
for (int i = 0; i < mandatoryAttributes.size(); ++i) {
map = mandatoryAttributes.get(i);
String prop = (String) map.get("properties");
List<String> keys = (List<String>) map.get("keys");
if (prop != null && "unbounded".equals(prop)) {
//assume there are only 1 key
String[] values = artifact.getAttributes((String) keys.get(0));
if (values != null) {
for (int j = 0; j < values.length; ++j) {
if (values[j] == null || "".equals(values[j])) {
//return an exception to stop adding artifact
throw new GovernanceException((String) map.get("name") + " is a required field, " +
"Please provide a value for this parameter.");
}
}
}
} else {
String value = "";
for (int j = 0; j < keys.size(); ++j) {
String v = artifact.getAttribute(keys.get(j));
if (j != 0) {
value += ":";
}
value += (v == null ? "" : v);
}
if (value == null || "".equals(value)) {
//return an exception to stop adding artifact
throw new GovernanceException((String) map.get("name") + " is a required field, " +
"Please provide a value for this parameter.");
}
}
}
}
}
| Fix integration test failures
| components/governance/org.wso2.carbon.governance.api/src/main/java/org/wso2/carbon/governance/api/util/GovernanceUtils.java | Fix integration test failures |
|
Java | apache-2.0 | 62c8bd4fcf4af87bd28bc1e71f930eca148e76c6 | 0 | webanno/webanno,webanno/webanno,webanno/webanno,webanno/webanno | /*******************************************************************************
* Copyright 2012
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.tudarmstadt.ukp.clarin.webanno.brat.dao;
import static org.apache.commons.io.IOUtils.closeQuietly;
import static org.apache.commons.io.IOUtils.copyLarge;
import static org.uimafit.factory.AnalysisEngineFactory.createPrimitiveDescription;
import static org.uimafit.pipeline.SimplePipeline.runPipeline;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Resource;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.comparator.LastModifiedFileComparator;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.uima.UIMAException;
import org.apache.uima.analysis_engine.AnalysisEngine;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.cas.CAS;
import org.apache.uima.collection.CollectionReader;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.ResourceInitializationException;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.PropertyAccessorFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.transaction.annotation.Transactional;
import org.uimafit.factory.AnalysisEngineFactory;
import org.uimafit.factory.CollectionReaderFactory;
import org.uimafit.factory.JCasFactory;
import de.tudarmstadt.ukp.clarin.webanno.api.AnnotationService;
import de.tudarmstadt.ukp.clarin.webanno.api.RepositoryService;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocumentState;
import de.tudarmstadt.ukp.clarin.webanno.model.Authority;
import de.tudarmstadt.ukp.clarin.webanno.model.Project;
import de.tudarmstadt.ukp.clarin.webanno.model.ProjectPermissions;
import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument;
import de.tudarmstadt.ukp.clarin.webanno.model.TagSet;
import de.tudarmstadt.ukp.clarin.webanno.model.User;
import de.tudarmstadt.ukp.dkpro.core.api.io.JCasFileWriter_ImplBase;
import de.tudarmstadt.ukp.dkpro.core.api.metadata.type.DocumentMetaData;
import de.tudarmstadt.ukp.dkpro.core.io.bincas.SerializedCasReader;
import de.tudarmstadt.ukp.dkpro.core.io.bincas.SerializedCasWriter;
import eu.clarin.weblicht.wlfxb.io.WLFormatException;
public class RepositoryServiceDbData
implements RepositoryService
{
public static Logger createLog(Project aProject, User aUser)
throws IOException
{
Logger logger = Logger.getLogger(RepositoryService.class);
String targetLog = dir.getAbsolutePath() + PROJECT + "project-" + aProject.getId() + ".log";
FileAppender apndr = new FileAppender(new PatternLayout("%d [" + aUser.getUsername()
+ "] %m%n"), targetLog, true);
logger.addAppender(apndr);
logger.setLevel((Level) Level.ALL);
return logger;
}
@Resource(name = "annotationService")
private AnnotationService annotationService;
@Value(value = "${backup.keep.time}")
private long backupKeepTime;
@Value(value = "${backup.interval}")
private long backupInterval;
@Value(value = "${backup.keep.number}")
private int backupKeepNumber;
@Resource(name = "formats")
private Properties readWriteFileFormats;
private static final String PROJECT = "/project/";
private static final String DOCUMENT = "/document/";
private static final String SOURCE = "/source";
private static final String GUIDELINE = "/guideline/";
private static final String ANNOTATION = "/annotation";
private static final String SETTINGS = "/settings/";
@PersistenceContext
private EntityManager entityManager;
private static File dir;
// The annotation preference properties File name
String annotationPreferencePropertiesFileName;
/*
* @Resource(name = "formats") private Properties readWriteFileFormats;
*/
private Object lock = new Object();
public RepositoryServiceDbData()
{
}
@Override
@Transactional
public void createAnnotationDocument(AnnotationDocument aAnnotationDocument)
{
if (aAnnotationDocument.getId() < 0) {
entityManager.persist(aAnnotationDocument);
}
else {
entityManager.merge(aAnnotationDocument);
}
}
/**
* Renames a file.
*
* @throws IOException
* if the file cannot be renamed.
* @return the target file.
*/
private File renameFile(File aFrom, File aTo)
throws IOException
{
if (!aFrom.renameTo(aTo)) {
throw new IOException("Cannot renamed file [" + aFrom + "] to [" + aTo + "]");
}
// We are not sure if File is mutable. This makes sure we get a new file in any case.
return new File(aTo.getPath());
}
/**
* Get the folder where the annotations are stored. Creates the folder if necessary.
*
* @throws IOException
* if the folder cannot be created.
*/
private File getAnnotationFolder(AnnotationDocument aAnnotationDocument)
throws IOException
{
File annotationFolder = new File(dir, PROJECT + aAnnotationDocument.getProject().getId()
+ DOCUMENT + aAnnotationDocument.getDocument().getId() + ANNOTATION);
FileUtils.forceMkdir(annotationFolder);
return annotationFolder;
}
@Override
@Transactional
public void createAnnotationDocumentContent(JCas aJcas, AnnotationDocument aAnnotationDocument,
User aUser)
throws IOException
{
synchronized (lock) {
File annotationFolder = getAnnotationFolder(aAnnotationDocument);
FileUtils.forceMkdir(annotationFolder);
final String username = aAnnotationDocument.getUser().getUsername();
File currentVersion = new File(annotationFolder, username + ".ser");
File oldVersion = new File(annotationFolder, username + ".ser.old");
// Save current version
try {
// Make a backup of the current version of the file before overwriting
if (currentVersion.exists()) {
renameFile(currentVersion, oldVersion);
}
// Now write the new version to "<username>.ser"
writeContent(aAnnotationDocument, aJcas);
createLog(aAnnotationDocument.getProject(), aUser).info(
" Updated annotation file [" + aAnnotationDocument.getName() + "] "
+ "with ID [" + aAnnotationDocument.getDocument().getId()
+ "] in project ID [" + aAnnotationDocument.getProject().getId()
+ "]");
createLog(aAnnotationDocument.getProject(), aUser).removeAllAppenders();
// If the saving was successful, we delete the old version
if (oldVersion.exists()) {
FileUtils.forceDelete(oldVersion);
}
}
catch (IOException e) {
// If we could not save the new version, restore the old one.
FileUtils.forceDelete(currentVersion);
// If this is the first version, there is no old version, so do not restore anything
if (oldVersion.exists()) {
renameFile(oldVersion, currentVersion);
}
// Now abort anyway
throw e;
}
// Manage history
if (backupInterval > 0) {
// Determine the reference point in time based on the current version
long now = currentVersion.lastModified();
// Get all history files for the current user
File[] history = annotationFolder.listFiles(new FileFilter()
{
private Matcher matcher = Pattern.compile(
Pattern.quote(username) + "\\.ser\\.[0-9]+\\.bak").matcher("");
@Override
public boolean accept(File aFile)
{
// Check if the filename matches the pattern given above.
return matcher.reset(aFile.getName()).matches();
}
});
// Sort the files (oldest one first)
Arrays.sort(history, LastModifiedFileComparator.LASTMODIFIED_COMPARATOR);
// Check if we need to make a new history file
boolean historyFileCreated = false;
File historyFile = new File(annotationFolder, username + ".ser." + now + ".bak");
if (history.length == 0) {
// If there is no history yet but we should keep history, then we create a
// history file in any case.
FileUtils.copyFile(currentVersion, historyFile);
historyFileCreated = true;
}
else {
// Check if the newest history file is significantly older than the current one
File latestHistory = history[history.length - 1];
if (latestHistory.lastModified() + backupInterval < now) {
FileUtils.copyFile(currentVersion, historyFile);
historyFileCreated = true;
}
}
// Prune history based on number of backup
if (historyFileCreated) {
// The new version is not in the history, so we keep that in any case. That
// means we need to keep one less.
int toKeep = Math.max(backupKeepNumber - 1, 0);
if ((backupKeepNumber > 0) && (toKeep < history.length)) {
// Copy the oldest files to a new array
File[] toRemove = new File[history.length - toKeep];
System.arraycopy(history, 0, toRemove, 0, toRemove.length);
// Restrict the history to what is left
File[] newHistory = new File[toKeep];
if (toKeep > 0) {
System.arraycopy(history, toRemove.length, newHistory, 0,
newHistory.length);
}
history = newHistory;
// Remove these old files
for (File file : toRemove) {
FileUtils.forceDelete(file);
createLog(aAnnotationDocument.getProject(), aUser).info(
"Removed surplus history file [" + file.getName() + "] "
+ " for document with ID ["
+ aAnnotationDocument.getDocument().getId()
+ "] in project ID ["
+ aAnnotationDocument.getProject().getId() + "]");
createLog(aAnnotationDocument.getProject(), aUser).removeAllAppenders();
}
}
// Prune history based on time
if (backupKeepTime > 0) {
for (File file : history) {
if ((file.lastModified() + backupKeepTime) < now) {
FileUtils.forceDelete(file);
createLog(aAnnotationDocument.getProject(), aUser).info(
"Removed outdated history file [" + file.getName() + "] "
+ " for document with ID ["
+ aAnnotationDocument.getDocument().getId()
+ "] in project ID ["
+ aAnnotationDocument.getProject().getId() + "]");
createLog(aAnnotationDocument.getProject(), aUser)
.removeAllAppenders();
}
}
}
}
}
}
}
@Override
@Transactional
public void createProject(Project aProject, User aUser)
throws IOException
{
entityManager.persist(aProject);
String path = dir.getAbsolutePath() + PROJECT + aProject.getId();
FileUtils.forceMkdir(new File(path));
createLog(aProject, aUser)
.info(" Created Project [" + aProject.getName() + "] with ID [" + aProject.getId()
+ "]");
createLog(aProject, aUser).removeAllAppenders();
}
@Override
@Transactional
public void createProjectPermission(ProjectPermissions aPermission)
throws IOException
{
entityManager.persist(aPermission);
createLog(aPermission.getProject(), aPermission.getUser()).info(
" New Permission created on Project[" + aPermission.getProject().getName()
+ "] for user [" + aPermission.getUser().getUsername()
+ "] with permission [" + aPermission.getLevel() + "]" + "]");
createLog(aPermission.getProject(), aPermission.getUser()).removeAllAppenders();
}
@Override
@Transactional
public void createSourceDocument(SourceDocument aDocument, User aUser)
throws IOException
{
entityManager.persist(aDocument);
}
@Override
@Transactional
public boolean existsAnnotationDocument(SourceDocument aDocument, User aUser)
{
try {
entityManager
.createQuery(
"FROM AnnotationDocument WHERE project = :project "
+ " AND document = :document AND user = :user",
AnnotationDocument.class)
.setParameter("project", aDocument.getProject())
.setParameter("document", aDocument).setParameter("user", aUser)
.getSingleResult();
return true;
}
catch (NoResultException ex) {
return false;
}
}
@Override
@Transactional
public boolean existsProject(String aName)
{
try {
entityManager.createQuery("FROM Project WHERE name = :name", Project.class)
.setParameter("name", aName).getSingleResult();
return true;
}
catch (NoResultException ex) {
return false;
}
}
/**
* A new directory is created using UUID so that every exported file will reside in its own
* directory. This is useful as the written file can have multiple extensions based on the
* Writer class used.
*/
@Override
@Transactional
public File exportAnnotationDocument(SourceDocument aDocument, Project aProject, User aUser,
Class aWriter, String aFileName)
throws UIMAException, IOException, WLFormatException, ClassNotFoundException
{
File exportTempDir = File.createTempFile("webanno", "export");
exportTempDir.delete();
exportTempDir.mkdirs();
File annotationFolder = getAnnotationFolder(getAnnotationDocument(aDocument, aUser));
String serializedCaseFileName = aUser.getUsername() + ".ser";
CollectionReader reader = CollectionReaderFactory
.createCollectionReader(SerializedCasReader.class, SerializedCasReader.PARAM_PATH,
annotationFolder, SerializedCasReader.PARAM_PATTERNS, new String[] { "[+]"
+ serializedCaseFileName });
if (!reader.hasNext()) {
throw new FileNotFoundException("Annotation file [" + serializedCaseFileName
+ "] not found in [" + annotationFolder + "]");
}
AnalysisEngineDescription writer = createPrimitiveDescription(aWriter,
JCasFileWriter_ImplBase.PARAM_PATH, exportTempDir,
JCasFileWriter_ImplBase.PARAM_STRIP_EXTENSION, true);
CAS cas = JCasFactory.createJCas().getCas();
reader.getNext(cas);
// Get the original TCF file and preserve it
DocumentMetaData documentMetadata = DocumentMetaData.get(cas.getJCas());
// Update the source file name in case it is changed for some reason
File currentDocumentUri = new File(dir.getAbsolutePath() + PROJECT + aProject.getId()
+ DOCUMENT + aDocument.getId() + SOURCE);
documentMetadata.setDocumentUri(new File(currentDocumentUri, aFileName).toURI().toURL()
.toExternalForm());
documentMetadata.setDocumentBaseUri(currentDocumentUri.toURI().toURL().toExternalForm());
documentMetadata.setCollectionId(currentDocumentUri.toURI().toURL().toExternalForm());
documentMetadata.setDocumentUri(new File(dir.getAbsolutePath() + PROJECT + aProject.getId()
+ DOCUMENT + aDocument.getId() + SOURCE + "/" + aFileName).toURI().toURL()
.toExternalForm());
runPipeline(cas, writer);
createLog(aProject, aUser).info(
" Exported file [" + aDocument.getName() + "] with ID [" + aDocument.getId()
+ "] from Project[" + aProject.getId() + "]");
createLog(aProject, aUser).removeAllAppenders();
if (exportTempDir.listFiles().length > 1) {
try {
DaoUtils.zipFolder(exportTempDir, new File(exportTempDir.getName() + ".zip"));
}
catch (Exception e) {
createLog(aProject, aUser).info("Unable to create Zip File");
}
return new File(exportTempDir.getName() + ".zip");
}
return exportTempDir.listFiles()[0];
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public AnnotationDocument getAnnotationDocument(SourceDocument aDocument, User aUser)
{
return entityManager
.createQuery(
"FROM AnnotationDocument WHERE document = :document AND " + "user =:user"
+ " AND project = :project", AnnotationDocument.class)
.setParameter("document", aDocument).setParameter("user", aUser)
.setParameter("project", aDocument.getProject()).getSingleResult();
}
@Override
@Transactional
public JCas getAnnotationDocumentContent(AnnotationDocument aAnnotationDocument)
throws IOException, UIMAException, ClassNotFoundException
{
synchronized (lock) {
File annotationFolder = getAnnotationFolder(aAnnotationDocument);
String file = aAnnotationDocument.getUser().getUsername() + ".ser";
try {
CAS cas = JCasFactory.createJCas().getCas();
CollectionReader reader = CollectionReaderFactory.createCollectionReader(
SerializedCasReader.class, SerializedCasReader.PARAM_PATH,
annotationFolder, SerializedCasReader.PARAM_PATTERNS, new String[] { "[+]"
+ file });
if (!reader.hasNext()) {
throw new FileNotFoundException("Annotation file [" + file + "] not found in ["
+ annotationFolder
+ "]. Report the incident to the Project Administrator");
}
reader.getNext(cas);
return cas.getJCas();
}
catch (IOException e) {
throw new DataRetrievalFailureException("Unable to parse annotation", e);
}
catch (UIMAException e) {
throw new DataRetrievalFailureException("Unable to parse annotation", e);
}
/*
* catch (SAXException e) { throw new
* DataRetrievalFailureException("Unable to parse annotation", e); }
*/
}
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<Authority> getAuthorities(User aUser)
{
return entityManager.createQuery("FROM Authority where user =:user", Authority.class)
.setParameter("user", aUser).getResultList();
}
@Override
public File getDir()
{
return dir;
}
@Override
public File getGuideline(Project aProject, String aFilename)
{
return new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE + aFilename);
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public String getPermisionLevel(User aUser, Project aProject)
{
return entityManager
.createQuery(
"Select level FROM ProjectPermissions WHERE user =:user AND "
+ "project =:project", String.class).setParameter("user", aUser)
.setParameter("project", aProject).getSingleResult();
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<String> listProjectPermisionLevels(User aUser, Project aProject)
{
return entityManager
.createQuery(
"Select level FROM ProjectPermissions WHERE user =:user AND "
+ "project =:project", String.class).setParameter("user", aUser)
.setParameter("project", aProject).getResultList();
}
@Override
@Transactional
public ProjectPermissions getProjectPermission(User aUser, Project aProject)
{
return entityManager
.createQuery(
"FROM ProjectPermissions WHERE user =:user AND " + "project =:project",
ProjectPermissions.class).setParameter("user", aUser)
.setParameter("project", aProject).getSingleResult();
}
@Override
@Transactional
public Project getProject(String aName)
{
return entityManager.createQuery("FROM Project WHERE name = :name", Project.class)
.setParameter("name", aName).getSingleResult();
}
public Project getProject(long aId)
{
return entityManager.createQuery("FROM Project WHERE id = :id", Project.class)
.setParameter("id", aId).getSingleResult();
}
@Override
public void writeGuideline(Project aProject, File aContent, String aFileName)
throws IOException
{
String guidelinePath = dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE;
FileUtils.forceMkdir(new File(guidelinePath));
copyLarge(new FileInputStream(aContent), new FileOutputStream(new File(guidelinePath
+ aFileName)));
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<ProjectPermissions> getProjectPermisions(Project aProject)
{
return entityManager
.createQuery("FROM ProjectPermissions WHERE project =:project",
ProjectPermissions.class).setParameter("project", aProject).getResultList();
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public SourceDocument getSourceDocument(String aDocumentName, Project aProject)
{
return entityManager
.createQuery("FROM SourceDocument WHERE name = :name AND project =:project",
SourceDocument.class).setParameter("name", aDocumentName)
.setParameter("project", aProject).getSingleResult();
}
@Override
@Transactional
public File getSourceDocumentContent(Project aProject, SourceDocument aDocument)
{
String path = dir.getAbsolutePath() + PROJECT + aProject.getId() + DOCUMENT
+ aDocument.getId() + SOURCE;
return new File(path + "/" + aDocument.getName());
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public User getUser(String aUsername)
{
return entityManager.createQuery("FROM User WHERE username =:username", User.class)
.setParameter("username", aUsername).getSingleResult();
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<AnnotationDocument> listAnnotationDocument(SourceDocument aDocument)
{
return entityManager
.createQuery("FROM AnnotationDocument WHERE document = :document",
AnnotationDocument.class).setParameter("document", aDocument)
.getResultList();
}
@Override
public List<String> listAnnotationGuidelineDocument(Project aProject)
{
// list all guideline files
File[] files = new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE)
.listFiles();
// Name of the guideline files
List<String> annotationGuidelineFiles = new ArrayList<String>();
if (files != null) {
for (File file : files) {
annotationGuidelineFiles.add(file.getName());
}
}
return annotationGuidelineFiles;
}
@Override
@Transactional
public List<AnnotationDocument> listAnnotationDocument()
{
return entityManager.createQuery("From AnnotationDocument", AnnotationDocument.class)
.getResultList();
}
@Override
@Transactional
public List<AnnotationDocument> listAnnotationDocument(Project aProject)
{
return entityManager
.createQuery("FROM AnnotationDocument WHERE project = :project",
AnnotationDocument.class).setParameter("project", aProject).getResultList();
}
@Override
@Transactional
public List<String> listFinishedAnnotationDocuments(Project aProject, User aUser,
AnnotationDocumentState aState)
{
return entityManager
.createQuery(
"SELECT name FROM AnnotationDocument WHERE project = :project AND "
+ "user =:user AND state =:state", String.class)
.setParameter("project", aProject).setParameter("user", aUser)
.setParameter("state", aState).getResultList();
}
@Override
@Transactional
public List<Project> listProjects()
{
return entityManager.createQuery("FROM Project", Project.class).getResultList();
}
@Override
@Transactional
public List<String> listProjectUserNames(Project aproject)
{
List<String> users = entityManager
.createQuery(
"SELECT i.username FROM Project s JOIN s.users i WHERE s.id = :projectId",
String.class).setParameter("projectId", aproject.getId()).getResultList();
return users;
}
@Override
@Transactional
public List<User> listProjectUsers(Project aproject)
{
return entityManager
.createQuery("SELECT i FROM Project s JOIN s.users i WHERE s.id = :projectId",
User.class).setParameter("projectId", aproject.getId()).getResultList();
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<SourceDocument> listSourceDocuments(Project aProject)
{
return entityManager
.createQuery("FROM SourceDocument where project =:project", SourceDocument.class)
.setParameter("project", aProject).getResultList();
}
@Override
@Transactional
public List<User> listUsers()
{
return entityManager.createQuery("FROM User", User.class).getResultList();
}
@Override
public Properties loadUserSettings(String aUsername, Project aProject, String aSubject)
throws FileNotFoundException, IOException
{
Properties property = new Properties();
property.load(new FileInputStream(new File(dir.getAbsolutePath() + PROJECT
+ aProject.getId() + SETTINGS + aUsername + "/"
+ annotationPreferencePropertiesFileName)));
return property;
}
@Override
@Transactional
public void removeProject(Project aProject, User aUser)
throws IOException
{
for (SourceDocument document : listSourceDocuments(aProject)) {
removeSourceDocument(document, aUser);
// removeAnnotationDocument(document);
}
for (TagSet tagset : annotationService.listTagSets(aProject)) {
annotationService.removeTagSet(tagset);
}
// remove the project directory from the file system
String path = dir.getAbsolutePath() + PROJECT + aProject.getId();
try {
FileUtils.forceDelete(new File(path));
}
catch (FileNotFoundException e) {
createLog(aProject, aUser).warn(
"Project directory to be deleted was not found: [" + path + "]. Ignoring.");
}
for (ProjectPermissions permisions : getProjectPermisions(aProject)) {
entityManager.remove(permisions);
}
// remove metadata from DB
entityManager.remove(aProject);
createLog(aProject, aUser).info(
" Removed Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]");
createLog(aProject, aUser).removeAllAppenders();
}
@Override
public void removeAnnotationGuideline(Project aProject, String aFileName)
throws IOException
{
FileUtils.forceDelete(new File(dir.getAbsolutePath() + PROJECT + aProject.getId()
+ GUIDELINE + aFileName));
}
@Override
@Transactional
public void removeProjectPermission(ProjectPermissions projectPermission)
throws IOException
{
entityManager.remove(projectPermission);
createLog(projectPermission.getProject(), projectPermission.getUser()).info(
" Removed Project Permission [" + projectPermission.getLevel() + "] for the USer ["
+ projectPermission.getUser().getUsername() + "] From project ["
+ projectPermission.getProject().getId() + "]");
createLog(projectPermission.getProject(), projectPermission.getUser()).removeAllAppenders();
}
@Override
@Transactional
public void removeSourceDocument(SourceDocument aDocument, User aUser)
throws IOException
{
// remove metadata from DB
if (existsAnnotationDocument(aDocument, aUser)) {
entityManager.remove(getAnnotationDocument(aDocument, aUser));
}
entityManager.remove(aDocument);
String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT
+ aDocument.getId();
// remove from file both source and related annotation file
FileUtils.forceDelete(new File(path));
createLog(aDocument.getProject(), aUser).info(
" Removed Document [" + aDocument.getName() + "] with ID [" + aDocument.getId()
+ "] from Project [" + aDocument.getProject().getId() + "]");
createLog(aDocument.getProject(), aUser).removeAllAppenders();
}
public void setDir(File aDir)
{
dir = aDir;
}
@Override
public <T> void saveUserSettings(String aUsername, Project aProject, String aSubject,
T aConfigurationObject)
throws IOException
{
BeanWrapper wrapper = PropertyAccessorFactory.forBeanPropertyAccess(aConfigurationObject);
Properties property = new Properties();
for (PropertyDescriptor value : wrapper.getPropertyDescriptors()) {
property.setProperty(aSubject + "." + value.getName(),
wrapper.getPropertyValue(value.getName()).toString());
}
String propertiesPath = dir.getAbsolutePath() + PROJECT + aProject.getId() + SETTINGS
+ aUsername;
FileUtils.forceMkdir(new File(propertiesPath));
property.save(new FileOutputStream(new File(propertiesPath,
annotationPreferencePropertiesFileName)), null);
createLog(aProject, getUser(aUsername)).info(
" Saved preferences file [" + annotationPreferencePropertiesFileName
+ "] for project [" + aProject.getName() + "] with ID [" + aProject.getId()
+ "] to location: [" + propertiesPath + "]");
createLog(aProject, getUser(aUsername)).removeAllAppenders();
}
@Override
@Transactional
public void uploadSourceDocument(File aFile, SourceDocument aDocument, long aProjectId,
User aUser)
throws IOException
{
String path = dir.getAbsolutePath() + PROJECT + aProjectId + DOCUMENT + aDocument.getId()
+ SOURCE;
FileUtils.forceMkdir(new File(path));
File newTcfFile = new File(path, aDocument.getName());
InputStream is = null;
OutputStream os = null;
try {
os = new FileOutputStream(newTcfFile);
is = new FileInputStream(aFile);
copyLarge(is, os);
}
finally {
closeQuietly(os);
closeQuietly(is);
}
createLog(aDocument.getProject(), aUser).info(
" Imported file [" + aDocument.getName() + "] with ID [" + aDocument.getId()
+ "] to Project [" + aDocument.getProject().getId() + "]");
createLog(aDocument.getProject(), aUser).removeAllAppenders();
}
private void writeContent(AnnotationDocument aAnnotationDocument, JCas aJcas)
throws IOException
{
try {
File targetPath = getAnnotationFolder(aAnnotationDocument);
AnalysisEngine writer = AnalysisEngineFactory.createPrimitive(
SerializedCasWriter.class, SerializedCasWriter.PARAM_PATH, targetPath,
SerializedCasWriter.PARAM_USE_DOCUMENT_ID, true);
DocumentMetaData md;
try {
md = DocumentMetaData.get(aJcas);
}
catch (IllegalArgumentException e) {
md = DocumentMetaData.create(aJcas);
}
md.setDocumentId(aAnnotationDocument.getUser().getUsername());
writer.process(aJcas);
}
catch (ResourceInitializationException e) {
throw new IOException(e);
}
catch (AnalysisEngineProcessException e) {
throw new IOException(e);
}
}
@Override
public Map<String, Class> getReadableFormats()
throws ClassNotFoundException
{
Map<String, Class> readableFormats = new HashMap<String, Class>();
Set<String> key = (Set) readWriteFileFormats.keySet();
for (String keyvalue : key) {
if (keyvalue.contains(".label")) {
String readerLabel = keyvalue.substring(0, keyvalue.lastIndexOf(".label"));
if (readWriteFileFormats.getProperty(readerLabel + ".reader") != null) {
readableFormats.put(readWriteFileFormats.getProperty(keyvalue), Class
.forName(readWriteFileFormats.getProperty(readerLabel + ".reader")));
}
}
}
return readableFormats;
}
@Override
public Map<String, Class> getWritableFormats()
throws ClassNotFoundException
{
Map<String, Class> writableFormats = new HashMap<String, Class>();
Set<String> keys = (Set) readWriteFileFormats.keySet();
for (String key : keys) {
if (key.contains(".label")) {
String writerLabel = key.substring(0, key.lastIndexOf(".label"));
if (readWriteFileFormats.getProperty(writerLabel + ".writer") != null) {
writableFormats.put(readWriteFileFormats.getProperty(key), Class
.forName(readWriteFileFormats.getProperty(writerLabel + ".writer")));
}
}
}
return writableFormats;
}
public String getAnnotationPreferencePropertiesFileName()
{
return annotationPreferencePropertiesFileName;
}
public void setAnnotationPreferencePropertiesFileName(
String aAnnotationPreferencePropertiesFileName)
{
annotationPreferencePropertiesFileName = aAnnotationPreferencePropertiesFileName;
}
}
| de.tudarmstadt.ukp.clarin.webanno.webapp/src/main/java/de/tudarmstadt/ukp/clarin/webanno/brat/dao/RepositoryServiceDbData.java | /*******************************************************************************
* Copyright 2012
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.tudarmstadt.ukp.clarin.webanno.brat.dao;
import static org.apache.commons.io.IOUtils.closeQuietly;
import static org.apache.commons.io.IOUtils.copyLarge;
import static org.uimafit.factory.AnalysisEngineFactory.createPrimitiveDescription;
import static org.uimafit.pipeline.SimplePipeline.runPipeline;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Resource;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.comparator.LastModifiedFileComparator;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.uima.UIMAException;
import org.apache.uima.analysis_engine.AnalysisEngine;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.cas.CAS;
import org.apache.uima.collection.CollectionReader;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.ResourceInitializationException;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.PropertyAccessorFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.transaction.annotation.Transactional;
import org.uimafit.factory.AnalysisEngineFactory;
import org.uimafit.factory.CollectionReaderFactory;
import org.uimafit.factory.JCasFactory;
import de.tudarmstadt.ukp.clarin.webanno.api.AnnotationService;
import de.tudarmstadt.ukp.clarin.webanno.api.RepositoryService;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocumentState;
import de.tudarmstadt.ukp.clarin.webanno.model.Authority;
import de.tudarmstadt.ukp.clarin.webanno.model.Project;
import de.tudarmstadt.ukp.clarin.webanno.model.ProjectPermissions;
import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument;
import de.tudarmstadt.ukp.clarin.webanno.model.TagSet;
import de.tudarmstadt.ukp.clarin.webanno.model.User;
import de.tudarmstadt.ukp.dkpro.core.api.io.JCasFileWriter_ImplBase;
import de.tudarmstadt.ukp.dkpro.core.api.metadata.type.DocumentMetaData;
import de.tudarmstadt.ukp.dkpro.core.io.bincas.SerializedCasReader;
import de.tudarmstadt.ukp.dkpro.core.io.bincas.SerializedCasWriter;
import eu.clarin.weblicht.wlfxb.io.WLFormatException;
public class RepositoryServiceDbData
implements RepositoryService
{
public static Logger createLog(Project aProject, User aUser)
throws IOException
{
Logger logger = Logger.getLogger(RepositoryService.class);
String targetLog = dir.getAbsolutePath() + PROJECT + "project-" + aProject.getId() + ".log";
FileAppender apndr = new FileAppender(new PatternLayout("%d [" + aUser.getUsername()
+ "] %m%n"), targetLog, true);
logger.addAppender(apndr);
logger.setLevel((Level) Level.ALL);
return logger;
}
@Resource(name = "annotationService")
private AnnotationService annotationService;
@Value(value = "${backup.keep.time}")
private long backupKeepTime;
@Value(value = "${backup.interval}")
private long backupInterval;
@Value(value = "${backup.keep.number}")
private int backupKeepNumber;
@Resource(name = "formats")
private Properties readWriteFileFormats;
private static final String PROJECT = "/project/";
private static final String DOCUMENT = "/document/";
private static final String SOURCE = "/source";
private static final String GUIDELINE = "/guideline/";
private static final String ANNOTATION = "/annotation";
private static final String SETTINGS = "/settings/";
@PersistenceContext
private EntityManager entityManager;
private static File dir;
// The annotation preference properties File name
String annotationPreferencePropertiesFileName;
/*
* @Resource(name = "formats") private Properties readWriteFileFormats;
*/
private Object lock = new Object();
public RepositoryServiceDbData()
{
}
@Override
@Transactional
public void createAnnotationDocument(AnnotationDocument aAnnotationDocument)
{
entityManager.persist(aAnnotationDocument);
}
/**
* Renames a file.
*
* @throws IOException
* if the file cannot be renamed.
* @return the target file.
*/
private File renameFile(File aFrom, File aTo)
throws IOException
{
if (!aFrom.renameTo(aTo)) {
throw new IOException("Cannot renamed file [" + aFrom + "] to [" + aTo + "]");
}
// We are not sure if File is mutable. This makes sure we get a new file in any case.
return new File(aTo.getPath());
}
/**
* Get the folder where the annotations are stored. Creates the folder if necessary.
*
* @throws IOException
* if the folder cannot be created.
*/
private File getAnnotationFolder(AnnotationDocument aAnnotationDocument)
throws IOException
{
File annotationFolder = new File(dir, PROJECT + aAnnotationDocument.getProject().getId()
+ DOCUMENT + aAnnotationDocument.getDocument().getId() + ANNOTATION);
FileUtils.forceMkdir(annotationFolder);
return annotationFolder;
}
@Override
@Transactional
public void createAnnotationDocumentContent(JCas aJcas, AnnotationDocument aAnnotationDocument,
User aUser)
throws IOException
{
synchronized (lock) {
File annotationFolder = getAnnotationFolder(aAnnotationDocument);
FileUtils.forceMkdir(annotationFolder);
final String username = aAnnotationDocument.getUser().getUsername();
File currentVersion = new File(annotationFolder, username + ".ser");
File oldVersion = new File(annotationFolder, username + ".ser.old");
// Save current version
try {
// Make a backup of the current version of the file before overwriting
if (currentVersion.exists()) {
renameFile(currentVersion, oldVersion);
}
// Now write the new version to "<username>.ser"
writeContent(aAnnotationDocument, aJcas);
createLog(aAnnotationDocument.getProject(), aUser).info(
" Updated annotation file [" + aAnnotationDocument.getName() + "] "
+ "with ID [" + aAnnotationDocument.getDocument().getId()
+ "] in project ID [" + aAnnotationDocument.getProject().getId()
+ "]");
createLog(aAnnotationDocument.getProject(), aUser).removeAllAppenders();
// If the saving was successful, we delete the old version
if (oldVersion.exists()) {
FileUtils.forceDelete(oldVersion);
}
}
catch (IOException e) {
// If we could not save the new version, restore the old one.
FileUtils.forceDelete(currentVersion);
// If this is the first version, there is no old version, so do not restore anything
if (oldVersion.exists()) {
renameFile(oldVersion, currentVersion);
}
// Now abort anyway
throw e;
}
// Manage history
if (backupInterval > 0) {
// Determine the reference point in time based on the current version
long now = currentVersion.lastModified();
// Get all history files for the current user
File[] history = annotationFolder.listFiles(new FileFilter()
{
private Matcher matcher = Pattern.compile(
Pattern.quote(username) + "\\.ser\\.[0-9]+\\.bak").matcher("");
@Override
public boolean accept(File aFile)
{
// Check if the filename matches the pattern given above.
return matcher.reset(aFile.getName()).matches();
}
});
// Sort the files (oldest one first)
Arrays.sort(history, LastModifiedFileComparator.LASTMODIFIED_COMPARATOR);
// Check if we need to make a new history file
boolean historyFileCreated = false;
File historyFile = new File(annotationFolder, username + ".ser." + now + ".bak");
if (history.length == 0) {
// If there is no history yet but we should keep history, then we create a
// history file in any case.
FileUtils.copyFile(currentVersion, historyFile);
historyFileCreated = true;
}
else {
// Check if the newest history file is significantly older than the current one
File latestHistory = history[history.length - 1];
if (latestHistory.lastModified() + backupInterval < now) {
FileUtils.copyFile(currentVersion, historyFile);
historyFileCreated = true;
}
}
// Prune history based on number of backup
if (historyFileCreated) {
// The new version is not in the history, so we keep that in any case. That
// means we need to keep one less.
int toKeep = Math.max(backupKeepNumber - 1, 0);
if ((backupKeepNumber > 0) && (toKeep < history.length)) {
// Copy the oldest files to a new array
File[] toRemove = new File[history.length - toKeep];
System.arraycopy(history, 0, toRemove, 0, toRemove.length);
// Restrict the history to what is left
File[] newHistory = new File[toKeep];
if (toKeep > 0) {
System.arraycopy(history, toRemove.length, newHistory, 0,
newHistory.length);
}
history = newHistory;
// Remove these old files
for (File file : toRemove) {
FileUtils.forceDelete(file);
createLog(aAnnotationDocument.getProject(), aUser).info(
"Removed surplus history file [" + file.getName() + "] "
+ " for document with ID ["
+ aAnnotationDocument.getDocument().getId()
+ "] in project ID ["
+ aAnnotationDocument.getProject().getId() + "]");
createLog(aAnnotationDocument.getProject(), aUser).removeAllAppenders();
}
}
// Prune history based on time
if (backupKeepTime > 0) {
for (File file : history) {
if ((file.lastModified() + backupKeepTime) < now) {
FileUtils.forceDelete(file);
createLog(aAnnotationDocument.getProject(), aUser).info(
"Removed outdated history file [" + file.getName() + "] "
+ " for document with ID ["
+ aAnnotationDocument.getDocument().getId()
+ "] in project ID ["
+ aAnnotationDocument.getProject().getId() + "]");
createLog(aAnnotationDocument.getProject(), aUser)
.removeAllAppenders();
}
}
}
}
}
}
}
@Override
@Transactional
public void createProject(Project aProject, User aUser)
throws IOException
{
entityManager.persist(aProject);
String path = dir.getAbsolutePath() + PROJECT + aProject.getId();
FileUtils.forceMkdir(new File(path));
createLog(aProject, aUser)
.info(" Created Project [" + aProject.getName() + "] with ID [" + aProject.getId()
+ "]");
createLog(aProject, aUser).removeAllAppenders();
}
@Override
@Transactional
public void createProjectPermission(ProjectPermissions aPermission)
throws IOException
{
entityManager.persist(aPermission);
createLog(aPermission.getProject(), aPermission.getUser()).info(
" New Permission created on Project[" + aPermission.getProject().getName()
+ "] for user [" + aPermission.getUser().getUsername()
+ "] with permission [" + aPermission.getLevel() + "]" + "]");
createLog(aPermission.getProject(), aPermission.getUser()).removeAllAppenders();
}
@Override
@Transactional
public void createSourceDocument(SourceDocument aDocument, User aUser)
throws IOException
{
entityManager.persist(aDocument);
}
@Override
@Transactional
public boolean existsAnnotationDocument(SourceDocument aDocument, User aUser)
{
try {
entityManager
.createQuery(
"FROM AnnotationDocument WHERE project = :project "
+ " AND document = :document AND user = :user",
AnnotationDocument.class)
.setParameter("project", aDocument.getProject())
.setParameter("document", aDocument).setParameter("user", aUser)
.getSingleResult();
return true;
}
catch (NoResultException ex) {
return false;
}
}
@Override
@Transactional
public boolean existsProject(String aName)
{
try {
entityManager.createQuery("FROM Project WHERE name = :name", Project.class)
.setParameter("name", aName).getSingleResult();
return true;
}
catch (NoResultException ex) {
return false;
}
}
/**
* A new directory is created using UUID so that every exported file will reside in its own
* directory. This is useful as the written file can have multiple extensions based on the
* Writer class used.
*/
@Override
@Transactional
public File exportAnnotationDocument(SourceDocument aDocument, Project aProject, User aUser,
Class aWriter, String aFileName)
throws UIMAException, IOException, WLFormatException, ClassNotFoundException
{
File exportTempDir = File.createTempFile("webanno", "export");
exportTempDir.delete();
exportTempDir.mkdirs();
File annotationFolder = getAnnotationFolder(getAnnotationDocument(aDocument, aUser));
String serializedCaseFileName = aUser.getUsername() + ".ser";
CollectionReader reader = CollectionReaderFactory
.createCollectionReader(SerializedCasReader.class, SerializedCasReader.PARAM_PATH,
annotationFolder, SerializedCasReader.PARAM_PATTERNS, new String[] { "[+]"
+ serializedCaseFileName });
if (!reader.hasNext()) {
throw new FileNotFoundException("Annotation file [" + serializedCaseFileName
+ "] not found in [" + annotationFolder + "]");
}
AnalysisEngineDescription writer = createPrimitiveDescription(aWriter,
JCasFileWriter_ImplBase.PARAM_PATH, exportTempDir,
JCasFileWriter_ImplBase.PARAM_STRIP_EXTENSION, true);
CAS cas = JCasFactory.createJCas().getCas();
reader.getNext(cas);
// Get the original TCF file and preserve it
DocumentMetaData documentMetadata = DocumentMetaData.get(cas.getJCas());
// Update the source file name in case it is changed for some reason
File currentDocumentUri = new File(dir.getAbsolutePath() + PROJECT + aProject.getId()
+ DOCUMENT + aDocument.getId() + SOURCE);
documentMetadata.setDocumentUri(new File(currentDocumentUri, aFileName).toURI().toURL()
.toExternalForm());
documentMetadata.setDocumentBaseUri(currentDocumentUri.toURI().toURL().toExternalForm());
documentMetadata.setCollectionId(currentDocumentUri.toURI().toURL().toExternalForm());
documentMetadata.setDocumentUri(new File(dir.getAbsolutePath() + PROJECT + aProject.getId()
+ DOCUMENT + aDocument.getId() + SOURCE + "/" + aFileName).toURI().toURL()
.toExternalForm());
runPipeline(cas, writer);
createLog(aProject, aUser).info(
" Exported file [" + aDocument.getName() + "] with ID [" + aDocument.getId()
+ "] from Project[" + aProject.getId() + "]");
createLog(aProject, aUser).removeAllAppenders();
if (exportTempDir.listFiles().length > 1) {
try {
DaoUtils.zipFolder(exportTempDir, new File(exportTempDir.getName() + ".zip"));
}
catch (Exception e) {
createLog(aProject, aUser).info("Unable to create Zip File");
}
return new File(exportTempDir.getName() + ".zip");
}
return exportTempDir.listFiles()[0];
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public AnnotationDocument getAnnotationDocument(SourceDocument aDocument, User aUser)
{
return entityManager
.createQuery(
"FROM AnnotationDocument WHERE document = :document AND " + "user =:user"
+ " AND project = :project", AnnotationDocument.class)
.setParameter("document", aDocument).setParameter("user", aUser)
.setParameter("project", aDocument.getProject()).getSingleResult();
}
@Override
@Transactional
public JCas getAnnotationDocumentContent(AnnotationDocument aAnnotationDocument)
throws IOException, UIMAException, ClassNotFoundException
{
synchronized (lock) {
File annotationFolder = getAnnotationFolder(aAnnotationDocument);
String file = aAnnotationDocument.getUser().getUsername() + ".ser";
try {
CAS cas = JCasFactory.createJCas().getCas();
CollectionReader reader = CollectionReaderFactory.createCollectionReader(
SerializedCasReader.class, SerializedCasReader.PARAM_PATH,
annotationFolder, SerializedCasReader.PARAM_PATTERNS, new String[] { "[+]"
+ file });
if (!reader.hasNext()) {
throw new FileNotFoundException("Annotation file [" + file + "] not found in ["
+ annotationFolder
+ "]. Report the incident to the Project Administrator");
}
reader.getNext(cas);
return cas.getJCas();
}
catch (IOException e) {
throw new DataRetrievalFailureException("Unable to parse annotation", e);
}
catch (UIMAException e) {
throw new DataRetrievalFailureException("Unable to parse annotation", e);
}
/*
* catch (SAXException e) { throw new
* DataRetrievalFailureException("Unable to parse annotation", e); }
*/
}
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<Authority> getAuthorities(User aUser)
{
return entityManager.createQuery("FROM Authority where user =:user", Authority.class)
.setParameter("user", aUser).getResultList();
}
@Override
public File getDir()
{
return dir;
}
@Override
public File getGuideline(Project aProject, String aFilename)
{
return new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE + aFilename);
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public String getPermisionLevel(User aUser, Project aProject)
{
return entityManager
.createQuery(
"Select level FROM ProjectPermissions WHERE user =:user AND "
+ "project =:project", String.class).setParameter("user", aUser)
.setParameter("project", aProject).getSingleResult();
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<String> listProjectPermisionLevels(User aUser, Project aProject)
{
return entityManager
.createQuery(
"Select level FROM ProjectPermissions WHERE user =:user AND "
+ "project =:project", String.class).setParameter("user", aUser)
.setParameter("project", aProject).getResultList();
}
@Override
@Transactional
public ProjectPermissions getProjectPermission(User aUser, Project aProject)
{
return entityManager
.createQuery(
"FROM ProjectPermissions WHERE user =:user AND " + "project =:project",
ProjectPermissions.class).setParameter("user", aUser)
.setParameter("project", aProject).getSingleResult();
}
@Override
@Transactional
public Project getProject(String aName)
{
return entityManager.createQuery("FROM Project WHERE name = :name", Project.class)
.setParameter("name", aName).getSingleResult();
}
public Project getProject(long aId)
{
return entityManager.createQuery("FROM Project WHERE id = :id", Project.class)
.setParameter("id", aId).getSingleResult();
}
@Override
public void writeGuideline(Project aProject, File aContent, String aFileName)
throws IOException
{
String guidelinePath = dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE;
FileUtils.forceMkdir(new File(guidelinePath));
copyLarge(new FileInputStream(aContent), new FileOutputStream(new File(guidelinePath
+ aFileName)));
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<ProjectPermissions> getProjectPermisions(Project aProject)
{
return entityManager
.createQuery("FROM ProjectPermissions WHERE project =:project",
ProjectPermissions.class).setParameter("project", aProject).getResultList();
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public SourceDocument getSourceDocument(String aDocumentName, Project aProject)
{
return entityManager
.createQuery("FROM SourceDocument WHERE name = :name AND project =:project",
SourceDocument.class).setParameter("name", aDocumentName)
.setParameter("project", aProject).getSingleResult();
}
@Override
@Transactional
public File getSourceDocumentContent(Project aProject, SourceDocument aDocument)
{
String path = dir.getAbsolutePath() + PROJECT + aProject.getId() + DOCUMENT
+ aDocument.getId() + SOURCE;
return new File(path + "/" + aDocument.getName());
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public User getUser(String aUsername)
{
return entityManager.createQuery("FROM User WHERE username =:username", User.class)
.setParameter("username", aUsername).getSingleResult();
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<AnnotationDocument> listAnnotationDocument(SourceDocument aDocument)
{
return entityManager
.createQuery("FROM AnnotationDocument WHERE document = :document",
AnnotationDocument.class).setParameter("document", aDocument)
.getResultList();
}
@Override
public List<String> listAnnotationGuidelineDocument(Project aProject)
{
// list all guideline files
File[] files = new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE)
.listFiles();
// Name of the guideline files
List<String> annotationGuidelineFiles = new ArrayList<String>();
if (files != null) {
for (File file : files) {
annotationGuidelineFiles.add(file.getName());
}
}
return annotationGuidelineFiles;
}
@Override
@Transactional
public List<AnnotationDocument> listAnnotationDocument()
{
return entityManager.createQuery("From AnnotationDocument", AnnotationDocument.class)
.getResultList();
}
@Override
@Transactional
public List<AnnotationDocument> listAnnotationDocument(Project aProject)
{
return entityManager
.createQuery("FROM AnnotationDocument WHERE project = :project",
AnnotationDocument.class).setParameter("project", aProject).getResultList();
}
@Override
@Transactional
public List<String> listFinishedAnnotationDocuments(Project aProject, User aUser,
AnnotationDocumentState aState)
{
return entityManager
.createQuery(
"SELECT name FROM AnnotationDocument WHERE project = :project AND "
+ "user =:user AND state =:state", String.class)
.setParameter("project", aProject).setParameter("user", aUser)
.setParameter("state", aState).getResultList();
}
@Override
@Transactional
public List<Project> listProjects()
{
return entityManager.createQuery("FROM Project", Project.class).getResultList();
}
@Override
@Transactional
public List<String> listProjectUserNames(Project aproject)
{
List<String> users = entityManager
.createQuery(
"SELECT i.username FROM Project s JOIN s.users i WHERE s.id = :projectId",
String.class).setParameter("projectId", aproject.getId()).getResultList();
return users;
}
@Override
@Transactional
public List<User> listProjectUsers(Project aproject)
{
return entityManager
.createQuery("SELECT i FROM Project s JOIN s.users i WHERE s.id = :projectId",
User.class).setParameter("projectId", aproject.getId()).getResultList();
}
@Override
@Transactional(noRollbackFor = NoResultException.class)
public List<SourceDocument> listSourceDocuments(Project aProject)
{
return entityManager
.createQuery("FROM SourceDocument where project =:project", SourceDocument.class)
.setParameter("project", aProject).getResultList();
}
@Override
@Transactional
public List<User> listUsers()
{
return entityManager.createQuery("FROM User", User.class).getResultList();
}
@Override
public Properties loadUserSettings(String aUsername, Project aProject, String aSubject)
throws FileNotFoundException, IOException
{
Properties property = new Properties();
property.load(new FileInputStream(new File(dir.getAbsolutePath() + PROJECT
+ aProject.getId() + SETTINGS + aUsername + "/"
+ annotationPreferencePropertiesFileName)));
return property;
}
@Override
@Transactional
public void removeProject(Project aProject, User aUser)
throws IOException
{
for (SourceDocument document : listSourceDocuments(aProject)) {
removeSourceDocument(document, aUser);
// removeAnnotationDocument(document);
}
for (TagSet tagset : annotationService.listTagSets(aProject)) {
annotationService.removeTagSet(tagset);
}
// remove the project directory from the file system
String path = dir.getAbsolutePath() + PROJECT + aProject.getId();
try {
FileUtils.forceDelete(new File(path));
}
catch (FileNotFoundException e) {
createLog(aProject, aUser).warn(
"Project directory to be deleted was not found: [" + path + "]. Ignoring.");
}
for (ProjectPermissions permisions : getProjectPermisions(aProject)) {
entityManager.remove(permisions);
}
// remove metadata from DB
entityManager.remove(aProject);
createLog(aProject, aUser).info(
" Removed Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]");
createLog(aProject, aUser).removeAllAppenders();
}
@Override
public void removeAnnotationGuideline(Project aProject, String aFileName)
throws IOException
{
FileUtils.forceDelete(new File(dir.getAbsolutePath() + PROJECT + aProject.getId()
+ GUIDELINE + aFileName));
}
@Override
@Transactional
public void removeProjectPermission(ProjectPermissions projectPermission)
throws IOException
{
entityManager.remove(projectPermission);
createLog(projectPermission.getProject(), projectPermission.getUser()).info(
" Removed Project Permission [" + projectPermission.getLevel() + "] for the USer ["
+ projectPermission.getUser().getUsername() + "] From project ["
+ projectPermission.getProject().getId() + "]");
createLog(projectPermission.getProject(), projectPermission.getUser()).removeAllAppenders();
}
@Override
@Transactional
public void removeSourceDocument(SourceDocument aDocument, User aUser)
throws IOException
{
// remove metadata from DB
if (existsAnnotationDocument(aDocument, aUser)) {
entityManager.remove(getAnnotationDocument(aDocument, aUser));
}
entityManager.remove(aDocument);
String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT
+ aDocument.getId();
// remove from file both source and related annotation file
FileUtils.forceDelete(new File(path));
createLog(aDocument.getProject(), aUser).info(
" Removed Document [" + aDocument.getName() + "] with ID [" + aDocument.getId()
+ "] from Project [" + aDocument.getProject().getId() + "]");
createLog(aDocument.getProject(), aUser).removeAllAppenders();
}
public void setDir(File aDir)
{
dir = aDir;
}
@Override
public <T> void saveUserSettings(String aUsername, Project aProject, String aSubject,
T aConfigurationObject)
throws IOException
{
BeanWrapper wrapper = PropertyAccessorFactory.forBeanPropertyAccess(aConfigurationObject);
Properties property = new Properties();
for (PropertyDescriptor value : wrapper.getPropertyDescriptors()) {
property.setProperty(aSubject + "." + value.getName(),
wrapper.getPropertyValue(value.getName()).toString());
}
String propertiesPath = dir.getAbsolutePath() + PROJECT + aProject.getId() + SETTINGS
+ aUsername;
FileUtils.forceMkdir(new File(propertiesPath));
property.save(new FileOutputStream(new File(propertiesPath,
annotationPreferencePropertiesFileName)), null);
createLog(aProject, getUser(aUsername)).info(
" Saved preferences file [" + annotationPreferencePropertiesFileName
+ "] for project [" + aProject.getName() + "] with ID [" + aProject.getId()
+ "] to location: [" + propertiesPath + "]");
createLog(aProject, getUser(aUsername)).removeAllAppenders();
}
@Override
@Transactional
public void uploadSourceDocument(File aFile, SourceDocument aDocument, long aProjectId,
User aUser)
throws IOException
{
String path = dir.getAbsolutePath() + PROJECT + aProjectId + DOCUMENT + aDocument.getId()
+ SOURCE;
FileUtils.forceMkdir(new File(path));
File newTcfFile = new File(path, aDocument.getName());
InputStream is = null;
OutputStream os = null;
try {
os = new FileOutputStream(newTcfFile);
is = new FileInputStream(aFile);
copyLarge(is, os);
}
finally {
closeQuietly(os);
closeQuietly(is);
}
createLog(aDocument.getProject(), aUser).info(
" Imported file [" + aDocument.getName() + "] with ID [" + aDocument.getId()
+ "] to Project [" + aDocument.getProject().getId() + "]");
createLog(aDocument.getProject(), aUser).removeAllAppenders();
}
private void writeContent(AnnotationDocument aAnnotationDocument, JCas aJcas)
throws IOException
{
try {
File targetPath = getAnnotationFolder(aAnnotationDocument);
AnalysisEngine writer = AnalysisEngineFactory.createPrimitive(
SerializedCasWriter.class, SerializedCasWriter.PARAM_PATH, targetPath,
SerializedCasWriter.PARAM_USE_DOCUMENT_ID, true);
DocumentMetaData md;
try {
md = DocumentMetaData.get(aJcas);
}
catch (IllegalArgumentException e) {
md = DocumentMetaData.create(aJcas);
}
md.setDocumentId(aAnnotationDocument.getUser().getUsername());
writer.process(aJcas);
}
catch (ResourceInitializationException e) {
throw new IOException(e);
}
catch (AnalysisEngineProcessException e) {
throw new IOException(e);
}
}
@Override
public Map<String, Class> getReadableFormats()
throws ClassNotFoundException
{
Map<String, Class> readableFormats = new HashMap<String, Class>();
Set<String> key = (Set) readWriteFileFormats.keySet();
for (String keyvalue : key) {
if (keyvalue.contains(".label")) {
String readerLabel = keyvalue.substring(0, keyvalue.lastIndexOf(".label"));
if (readWriteFileFormats.getProperty(readerLabel + ".reader") != null) {
readableFormats.put(readWriteFileFormats.getProperty(keyvalue), Class
.forName(readWriteFileFormats.getProperty(readerLabel + ".reader")));
}
}
}
return readableFormats;
}
@Override
public Map<String, Class> getWritableFormats()
throws ClassNotFoundException
{
Map<String, Class> writableFormats = new HashMap<String, Class>();
Set<String> keys = (Set) readWriteFileFormats.keySet();
for (String key : keys) {
if (key.contains(".label")) {
String writerLabel = key.substring(0, key.lastIndexOf(".label"));
if (readWriteFileFormats.getProperty(writerLabel + ".writer") != null) {
writableFormats.put(readWriteFileFormats.getProperty(key), Class
.forName(readWriteFileFormats.getProperty(writerLabel + ".writer")));
}
}
}
return writableFormats;
}
public String getAnnotationPreferencePropertiesFileName()
{
return annotationPreferencePropertiesFileName;
}
public void setAnnotationPreferencePropertiesFileName(
String aAnnotationPreferencePropertiesFileName)
{
annotationPreferencePropertiesFileName = aAnnotationPreferencePropertiesFileName;
}
}
| NO ISSUE
- the setState() of AnnotationDocument is not saving content to the database from the dialog window. Manualy merge result | de.tudarmstadt.ukp.clarin.webanno.webapp/src/main/java/de/tudarmstadt/ukp/clarin/webanno/brat/dao/RepositoryServiceDbData.java | NO ISSUE - the setState() of AnnotationDocument is not saving content to the database from the dialog window. Manualy merge result |
|
Java | apache-2.0 | 523c001bb003cd13d51ddeca3e3ce11d71724f85 | 0 | kironapublic/vaadin,bmitc/vaadin,shahrzadmn/vaadin,mstahv/framework,bmitc/vaadin,Darsstar/framework,Scarlethue/vaadin,peterl1084/framework,Legioth/vaadin,Flamenco/vaadin,Darsstar/framework,jdahlstrom/vaadin.react,Legioth/vaadin,peterl1084/framework,asashour/framework,fireflyc/vaadin,Legioth/vaadin,fireflyc/vaadin,travisfw/vaadin,magi42/vaadin,sitexa/vaadin,carrchang/vaadin,synes/vaadin,kironapublic/vaadin,jdahlstrom/vaadin.react,magi42/vaadin,shahrzadmn/vaadin,travisfw/vaadin,Peppe/vaadin,Flamenco/vaadin,mittop/vaadin,carrchang/vaadin,kironapublic/vaadin,sitexa/vaadin,travisfw/vaadin,sitexa/vaadin,udayinfy/vaadin,Scarlethue/vaadin,Scarlethue/vaadin,mittop/vaadin,udayinfy/vaadin,cbmeeks/vaadin,Legioth/vaadin,asashour/framework,Scarlethue/vaadin,carrchang/vaadin,synes/vaadin,travisfw/vaadin,mittop/vaadin,magi42/vaadin,mstahv/framework,travisfw/vaadin,peterl1084/framework,asashour/framework,kironapublic/vaadin,Flamenco/vaadin,Peppe/vaadin,Darsstar/framework,udayinfy/vaadin,Darsstar/framework,Flamenco/vaadin,cbmeeks/vaadin,magi42/vaadin,Scarlethue/vaadin,oalles/vaadin,jdahlstrom/vaadin.react,fireflyc/vaadin,Peppe/vaadin,peterl1084/framework,sitexa/vaadin,mittop/vaadin,Legioth/vaadin,asashour/framework,bmitc/vaadin,magi42/vaadin,udayinfy/vaadin,udayinfy/vaadin,oalles/vaadin,cbmeeks/vaadin,sitexa/vaadin,asashour/framework,fireflyc/vaadin,peterl1084/framework,cbmeeks/vaadin,Darsstar/framework,synes/vaadin,jdahlstrom/vaadin.react,bmitc/vaadin,oalles/vaadin,shahrzadmn/vaadin,Peppe/vaadin,oalles/vaadin,shahrzadmn/vaadin,Peppe/vaadin,jdahlstrom/vaadin.react,mstahv/framework,shahrzadmn/vaadin,mstahv/framework,kironapublic/vaadin,synes/vaadin,carrchang/vaadin,synes/vaadin,fireflyc/vaadin,mstahv/framework,oalles/vaadin | /*
@VaadinApache2LicenseForJavaFiles@
*/
package com.vaadin.terminal.gwt.client.communication;
import java.io.Serializable;
import com.vaadin.terminal.gwt.client.Connector;
import com.vaadin.terminal.gwt.client.ServerConnector;
import com.vaadin.terminal.gwt.client.ui.AbstractComponentConnector;
/**
* Interface to be implemented by all shared state classes used to communicate
* basic information about a {@link Connector} from server to client.
*
* Shared state classes have to be declared in client side packages to be
* accessible both for server and client code. They can be static nested classes
* of a {@link ServerConnector}.
*
* Shared state objects are only sent from the server to the client, and any
* modifications from the client should be performed via an RPC call that
* modifies the authoritative state on the server.
*
* A shared state class should be a bean with getters and setters for each
* field. Supported data types are simple Java types, other beans and maps and
* arrays of these.
*
* On the client side the connector should override
* {@link AbstractComponentConnector#createState()} to create the correct state
* class and {@link AbstractComponentConnector#getState()} override the return
* type.
*
* Subclasses of a {@link Connector} using shared state should also provide a
* subclass of the shared state class of the parent class to extend the state. A
* single {@link Connector} can only have one shared state object.
*
* @since 7.0
*/
public class SharedState implements Serializable {
}
| src/com/vaadin/terminal/gwt/client/communication/SharedState.java | /*
@VaadinApache2LicenseForJavaFiles@
*/
package com.vaadin.terminal.gwt.client.communication;
import java.io.Serializable;
import com.vaadin.terminal.gwt.client.ui.AbstractComponentConnector;
/**
* Interface to be implemented by all shared state classes used to communicate
* basic information about a paintable from server to client. These typically
* replace most of the semi-static information sent via the paintContent() and
* updateFromUIDL() mechanism in Vaadin 6 (component sizes, captions, tooltips,
* etc.).
*
* Shared state classes have to be declared in client side packages to be
* accessible both for server and client code. They can be static nested classes
* of the client side widget.
*
* Shared state objects are only sent from the server to the client, and any
* modifications from the client should be performed via an RPC call that
* modifies the authoritative state on the server.
*
* In current Vaadin versions, the whole shared state is sent every time the
* component is painted. Future versions may optimize this so that only the
* necessary (changed or missing on the client side) parts are re-sent to the
* client, but the client will have access to the whole state.
*
* A shared state class should be a bean with getters and setters for each
* field, and should only contain simple data types, or arrays or maps of
* supported data types.
*
* On the client side, for most widgets,
* {@link AbstractComponentConnector#createState()} and
* {@link AbstractComponentConnector#getState()} methods should be overridden to
* create and use a shared state instance of the correct type.
*
* Subclasses of a paintable using shared state should also provide a subclass
* of the shared state class of the parent class to extend the state - a single
* paintable can only have one shared state object.
*
* Future versions of the shared state mechanism may also support custom data
* types as fields of a shared state class.
*
* @since 7.0
*/
public class SharedState implements Serializable {
}
| Javadoc | src/com/vaadin/terminal/gwt/client/communication/SharedState.java | Javadoc |
|
Java | apache-2.0 | 3c58a19b2054773841d823855af7a10469d855e9 | 0 | dhutchis/accumulo,mjwall/accumulo,mjwall/accumulo,dhutchis/accumulo,milleruntime/accumulo,keith-turner/accumulo,lstav/accumulo,apache/accumulo,adamjshook/accumulo,adamjshook/accumulo,adamjshook/accumulo,milleruntime/accumulo,adamjshook/accumulo,ctubbsii/accumulo,lstav/accumulo,phrocker/accumulo-1,mikewalch/accumulo,milleruntime/accumulo,apache/accumulo,phrocker/accumulo-1,phrocker/accumulo-1,mikewalch/accumulo,lstav/accumulo,ctubbsii/accumulo,keith-turner/accumulo,dhutchis/accumulo,phrocker/accumulo-1,keith-turner/accumulo,mjwall/accumulo,lstav/accumulo,milleruntime/accumulo,ivakegg/accumulo,dhutchis/accumulo,phrocker/accumulo-1,ivakegg/accumulo,mikewalch/accumulo,apache/accumulo,dhutchis/accumulo,mjwall/accumulo,ctubbsii/accumulo,apache/accumulo,ctubbsii/accumulo,ctubbsii/accumulo,dhutchis/accumulo,lstav/accumulo,phrocker/accumulo-1,keith-turner/accumulo,mikewalch/accumulo,mikewalch/accumulo,ivakegg/accumulo,ctubbsii/accumulo,phrocker/accumulo-1,mjwall/accumulo,apache/accumulo,dhutchis/accumulo,apache/accumulo,ivakegg/accumulo,ctubbsii/accumulo,keith-turner/accumulo,milleruntime/accumulo,mjwall/accumulo,adamjshook/accumulo,adamjshook/accumulo,adamjshook/accumulo,ivakegg/accumulo,ivakegg/accumulo,adamjshook/accumulo,adamjshook/accumulo,mikewalch/accumulo,milleruntime/accumulo,keith-turner/accumulo,mjwall/accumulo,ivakegg/accumulo,keith-turner/accumulo,dhutchis/accumulo,mikewalch/accumulo,lstav/accumulo,mikewalch/accumulo,lstav/accumulo,apache/accumulo,dhutchis/accumulo,milleruntime/accumulo | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.monitor.servlets;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.client.BatchScanner;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.admin.TableOperations;
import org.apache.accumulo.core.client.replication.ReplicaSystem;
import org.apache.accumulo.core.client.replication.ReplicaSystemFactory;
import org.apache.accumulo.core.client.replication.ReplicationTable;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.metadata.MetadataTable;
import org.apache.accumulo.core.metadata.RootTable;
import org.apache.accumulo.core.replication.ReplicationSchema.WorkSection;
import org.apache.accumulo.core.replication.ReplicationTarget;
import org.apache.accumulo.core.replication.StatusUtil;
import org.apache.accumulo.core.replication.proto.Replication.Status;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.security.Credentials;
import org.apache.accumulo.core.zookeeper.ZooUtil;
import org.apache.accumulo.monitor.util.Table;
import org.apache.accumulo.monitor.util.celltypes.NumberType;
import org.apache.accumulo.server.client.HdfsZooInstance;
import org.apache.accumulo.server.conf.ServerConfiguration;
import org.apache.accumulo.server.replication.AbstractWorkAssigner;
import org.apache.accumulo.server.security.SystemCredentials;
import org.apache.accumulo.server.zookeeper.DistributedWorkQueue;
import org.apache.accumulo.server.zookeeper.ZooCache;
import org.apache.hadoop.io.Text;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.protobuf.InvalidProtocolBufferException;
/**
*
*/
public class ReplicationServlet extends BasicServlet {
private static final Logger log = LoggerFactory.getLogger(ReplicationServlet.class);
private static final long serialVersionUID = 1L;
private ZooCache zooCache = new ZooCache();
@Override
protected String getTitle(HttpServletRequest req) {
return "Replication Overview";
}
@Override
protected void pageBody(HttpServletRequest req, HttpServletResponse response, StringBuilder sb) throws Exception {
Instance inst = HdfsZooInstance.getInstance();
Credentials creds = SystemCredentials.get();
Connector conn = inst.getConnector(creds.getPrincipal(), creds.getToken());
TableOperations tops = conn.tableOperations();
if (!tops.exists(ReplicationTable.NAME)) {
banner(sb, "", "Replication table does not yet exist");
return;
}
Table replicationStats = new Table("replicationStats", "Replication Status");
replicationStats.addSortableColumn("Table");
replicationStats.addSortableColumn("Peer");
replicationStats.addSortableColumn("Remote Identifier");
replicationStats.addSortableColumn("ReplicaSystem Type");
replicationStats.addSortableColumn("Files needing replication", new NumberType<Long>(), null);
Map<String,String> properties = conn.instanceOperations().getSystemConfiguration();
Map<String,String> peers = new HashMap<>();
String definedPeersPrefix = Property.REPLICATION_PEERS.getKey();
// Get the defined peers and what ReplicaSystem impl they're using
for (Entry<String,String> property : properties.entrySet()) {
if (property.getKey().startsWith(definedPeersPrefix)) {
String peerName = property.getKey().substring(definedPeersPrefix.length());
ReplicaSystem replica;
try {
replica = ReplicaSystemFactory.get(property.getValue());
} catch (Exception e) {
log.warn("Could not instantiate ReplicaSystem for {} with configuration {}", property.getKey(), property.getValue(), e);
continue;
}
peers.put(peerName, replica.getClass().getName());
}
}
final String targetPrefix = Property.TABLE_REPLICATION_TARGETS.getKey();
// The total set of configured targets
Set<ReplicationTarget> allConfiguredTargets = new HashSet<>();
// Number of files per target we have to replicate
Map<ReplicationTarget,Long> targetCounts = new HashMap<>();
Map<String,String> tableNameToId = tops.tableIdMap();
Map<String,String> tableIdToName = invert(tableNameToId);
for (String table : tops.list()) {
if (MetadataTable.NAME.equals(table) || RootTable.NAME.equals(table)) {
continue;
}
String localId = tableNameToId.get(table);
if (null == localId) {
log.trace("Could not determine ID for {}", table);
continue;
}
Iterable<Entry<String,String>> propertiesForTable = tops.getProperties(table);
for (Entry<String,String> prop : propertiesForTable) {
if (prop.getKey().startsWith(targetPrefix)) {
String peerName = prop.getKey().substring(targetPrefix.length());
String remoteIdentifier = prop.getValue();
ReplicationTarget target = new ReplicationTarget(peerName, remoteIdentifier, localId);
allConfiguredTargets.add(target);
}
}
}
// Read over the queued work
BatchScanner bs = conn.createBatchScanner(ReplicationTable.NAME, Authorizations.EMPTY, 4);
bs.setRanges(Collections.singleton(new Range()));
WorkSection.limit(bs);
try {
Text buffer = new Text();
for (Entry<Key,Value> entry : bs) {
Key k = entry.getKey();
k.getColumnQualifier(buffer);
ReplicationTarget target = ReplicationTarget.from(buffer);
// TODO ACCUMULO-2835 once explicit lengths are tracked, we can give size-based estimates instead of just file-based
Long count = targetCounts.get(target);
if (null == count) {
targetCounts.put(target, Long.valueOf(1l));
} else {
targetCounts.put(target, count + 1);
}
}
} finally {
bs.close();
}
for (ReplicationTarget configuredTarget : allConfiguredTargets) {
String tableName = tableIdToName.get(configuredTarget.getSourceTableId());
if (null == tableName) {
log.trace("Could not determine table name from id {}", configuredTarget.getSourceTableId());
continue;
}
String replicaSystemClass = peers.get(configuredTarget.getPeerName());
if (null == replicaSystemClass) {
log.trace("Could not determine configured ReplicaSystem for {}", configuredTarget.getPeerName());
continue;
}
Long numFiles = targetCounts.get(configuredTarget);
replicationStats.addRow(tableName, configuredTarget.getPeerName(), configuredTarget.getRemoteIdentifier(), replicaSystemClass, (null == numFiles) ? 0 : numFiles);
}
replicationStats.generate(req, sb);
// Make a table for the replication data in progress
Table replicationInProgress = new Table("replicationInProgress", "In-Progress Replication");
replicationInProgress.addSortableColumn("File");
replicationInProgress.addSortableColumn("Peer");
replicationInProgress.addSortableColumn("Source Table ID");
replicationInProgress.addSortableColumn("Peer Identifier");
replicationInProgress.addUnsortableColumn("Status");
// Read the files from the workqueue in zk
String zkRoot = ZooUtil.getRoot(inst);
final String workQueuePath = zkRoot + Constants.ZREPLICATION_WORK_QUEUE;
DistributedWorkQueue workQueue = new DistributedWorkQueue(workQueuePath, ServerConfiguration.getSystemConfiguration(inst));
for (String queueKey : workQueue.getWorkQueued()) {
Entry<String,ReplicationTarget> queueKeyPair = AbstractWorkAssigner.fromQueueKey(queueKey);
String filename = queueKeyPair.getKey();
ReplicationTarget target = queueKeyPair.getValue();
byte[] data = zooCache.get(workQueuePath + "/" + queueKey);
// We could try to grep over the table, but without knowing the full file path, we
// can't find the status quickly
String status = "Unknown";
if (null != data) {
String path = new String(filename);
Scanner s = ReplicationTable.getScanner(conn);
s.setRange(Range.exact(path));
s.fetchColumn(WorkSection.NAME, target.toText());
// Fetch the work entry for this item
Entry<Key,Value> kv = null;
try {
kv = Iterables.getOnlyElement(s);
} catch (NoSuchElementException e) {
log.trace("Could not find status of {} replicating to {}", filename, target);
status = "Unknown";
} finally {
s.close();
}
// If we found the work entry for it, try to compute some progress
if (null != kv) {
try {
Status stat = Status.parseFrom(kv.getValue().get());
if (StatusUtil.isFullyReplicated(stat)) {
status = "Finished";
} else {
if (stat.getInfiniteEnd()) {
status = stat.getBegin() + "/∞";
} else {
status = stat.getBegin() + "/" + stat.getEnd();
}
}
} catch (InvalidProtocolBufferException e) {
log.warn("Could not deserialize protobuf for {}", kv.getKey(), e);
status = "Unknown";
}
}
}
// Add a row in the table
replicationInProgress.addRow(filename, target.getPeerName(), target.getSourceTableId(), target.getRemoteIdentifier(), status);
}
replicationInProgress.generate(req, sb);
}
protected Map<String,String> invert(Map<String,String> map) {
Map<String,String> newMap = Maps.newHashMapWithExpectedSize(map.size());
for(Entry<String,String> entry : map.entrySet()) {
newMap.put(entry.getValue(), entry.getKey());
}
return newMap;
}
}
| server/monitor/src/main/java/org/apache/accumulo/monitor/servlets/ReplicationServlet.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.monitor.servlets;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.client.BatchScanner;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.admin.TableOperations;
import org.apache.accumulo.core.client.replication.ReplicaSystem;
import org.apache.accumulo.core.client.replication.ReplicaSystemFactory;
import org.apache.accumulo.core.client.replication.ReplicationTable;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.metadata.MetadataTable;
import org.apache.accumulo.core.metadata.RootTable;
import org.apache.accumulo.core.replication.ReplicationSchema.WorkSection;
import org.apache.accumulo.core.replication.ReplicationTarget;
import org.apache.accumulo.core.replication.StatusUtil;
import org.apache.accumulo.core.replication.proto.Replication.Status;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.security.Credentials;
import org.apache.accumulo.core.zookeeper.ZooUtil;
import org.apache.accumulo.monitor.util.Table;
import org.apache.accumulo.monitor.util.celltypes.NumberType;
import org.apache.accumulo.server.client.HdfsZooInstance;
import org.apache.accumulo.server.conf.ServerConfiguration;
import org.apache.accumulo.server.replication.AbstractWorkAssigner;
import org.apache.accumulo.server.security.SystemCredentials;
import org.apache.accumulo.server.zookeeper.DistributedWorkQueue;
import org.apache.accumulo.server.zookeeper.ZooCache;
import org.apache.hadoop.io.Text;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.protobuf.InvalidProtocolBufferException;
/**
*
*/
public class ReplicationServlet extends BasicServlet {
private static final Logger log = LoggerFactory.getLogger(ReplicationServlet.class);
private static final long serialVersionUID = 1L;
private ZooCache zooCache = new ZooCache();
@Override
protected String getTitle(HttpServletRequest req) {
return "Replication Overview";
}
@Override
protected void pageBody(HttpServletRequest req, HttpServletResponse response, StringBuilder sb) throws Exception {
Instance inst = HdfsZooInstance.getInstance();
Credentials creds = SystemCredentials.get();
Connector conn = inst.getConnector(creds.getPrincipal(), creds.getToken());
TableOperations tops = conn.tableOperations();
if (!tops.exists(ReplicationTable.NAME)) {
banner(sb, "", "Replication table does not yet exist");
return;
}
Table replicationStats = new Table("replicationStats", "Replication Status");
replicationStats.addSortableColumn("Table");
replicationStats.addSortableColumn("Peer");
replicationStats.addSortableColumn("Remote Identifier");
replicationStats.addSortableColumn("ReplicaSystem Type");
replicationStats.addSortableColumn("Files needing replication", new NumberType<Long>(), null);
Map<String,String> properties = conn.instanceOperations().getSystemConfiguration();
Map<String,String> peers = new HashMap<>();
String definedPeersPrefix = Property.REPLICATION_PEERS.getKey();
// Get the defined peers and what ReplicaSystem impl they're using
for (Entry<String,String> property : properties.entrySet()) {
if (property.getKey().startsWith(definedPeersPrefix)) {
String peerName = property.getKey().substring(definedPeersPrefix.length());
ReplicaSystem replica;
try {
replica = ReplicaSystemFactory.get(property.getValue());
} catch (Exception e) {
log.warn("Could not instantiate ReplicaSystem for {} with configuration {}", property.getKey(), property.getValue(), e);
continue;
}
peers.put(peerName, replica.getClass().getName());
}
}
final String targetPrefix = Property.TABLE_REPLICATION_TARGETS.getKey();
// The total set of configured targets
Set<ReplicationTarget> allConfiguredTargets = new HashSet<>();
// Number of files per target we have to replicate
Map<ReplicationTarget,Long> targetCounts = new HashMap<>();
Map<String,String> tableNameToId = tops.tableIdMap();
Map<String,String> tableIdToName = invert(tableNameToId);
for (String table : tops.list()) {
if (MetadataTable.NAME.equals(table) || RootTable.NAME.equals(table)) {
continue;
}
String localId = tableNameToId.get(table);
if (null == localId) {
log.trace("Could not determine ID for {}", table);
continue;
}
Iterable<Entry<String,String>> propertiesForTable = tops.getProperties(table);
for (Entry<String,String> prop : propertiesForTable) {
if (prop.getKey().startsWith(targetPrefix)) {
String peerName = prop.getKey().substring(targetPrefix.length());
String remoteIdentifier = prop.getValue();
ReplicationTarget target = new ReplicationTarget(peerName, remoteIdentifier, localId);
allConfiguredTargets.add(target);
}
}
}
// Read over the queued work
BatchScanner bs = conn.createBatchScanner(ReplicationTable.NAME, Authorizations.EMPTY, 4);
bs.setRanges(Collections.singleton(new Range()));
WorkSection.limit(bs);
try {
Text buffer = new Text();
for (Entry<Key,Value> entry : bs) {
Key k = entry.getKey();
k.getColumnQualifier(buffer);
ReplicationTarget target = ReplicationTarget.from(buffer);
// TODO ACCUMULO-2835 once explicit lengths are tracked, we can give size-based estimates instead of just file-based
Long count = targetCounts.get(target);
if (null == count) {
targetCounts.put(target, Long.valueOf(1l));
} else {
targetCounts.put(target, count + 1);
}
}
} finally {
bs.close();
}
for (ReplicationTarget configuredTarget : allConfiguredTargets) {
String tableName = tableIdToName.get(configuredTarget.getSourceTableId());
if (null == tableName) {
log.trace("Could not determine table name from id {}", configuredTarget.getSourceTableId());
continue;
}
String replicaSystemClass = peers.get(configuredTarget.getPeerName());
if (null == replicaSystemClass) {
log.trace("Could not determine configured ReplicaSystem for {}", configuredTarget.getPeerName());
continue;
}
Long numFiles = targetCounts.get(configuredTarget);
replicationStats.addRow(tableName, configuredTarget.getPeerName(), configuredTarget.getRemoteIdentifier(), replicaSystemClass, (null == numFiles) ? 0 : numFiles);
}
replicationStats.generate(req, sb);
// Make a table for the replication data in progress
Table replicationInProgress = new Table("replicationInProgress", "In-Progress Replication");
replicationInProgress.addSortableColumn("File");
replicationInProgress.addSortableColumn("Peer");
replicationInProgress.addSortableColumn("Source Table ID");
replicationInProgress.addSortableColumn("Peer Identifier");
replicationInProgress.addUnsortableColumn("Status");
// Read the files from the workqueue in zk
String zkRoot = ZooUtil.getRoot(inst);
final String workQueuePath = zkRoot + Constants.ZREPLICATION_WORK_QUEUE;
DistributedWorkQueue workQueue = new DistributedWorkQueue(workQueuePath, ServerConfiguration.getSystemConfiguration(inst));
for (String queueKey : workQueue.getWorkQueued()) {
Entry<String,ReplicationTarget> queueKeyPair = AbstractWorkAssigner.fromQueueKey(queueKey);
String filename = queueKeyPair.getKey();
ReplicationTarget target = queueKeyPair.getValue();
Scanner s = ReplicationTable.getScanner(conn);
s.setRange(Range.exact(filename));
s.fetchColumn(WorkSection.NAME, target.toText());
// Fetch the work entry for this item
String status = "Unknown";
Entry<Key,Value> kv = null;
try {
kv = Iterables.getOnlyElement(s);
} catch (NoSuchElementException e) {
log.trace("Could not find status of {} replicating to {}", filename, target);
status = "Unknown";
} finally {
s.close();
}
// If we found the work entry for it, try to compute some progress
if (null != kv) {
try {
Status stat = Status.parseFrom(kv.getValue().get());
if (StatusUtil.isFullyReplicated(stat)) {
status = "Finished";
} else {
if (stat.getInfiniteEnd()) {
status = stat.getBegin() + "/∞";
} else {
status = stat.getBegin() + "/" + stat.getEnd();
}
}
} catch (InvalidProtocolBufferException e) {
log.warn("Could not deserialize protobuf for {}", kv.getKey(), e);
status = "Unknown";
}
}
// Add a row in the table
replicationInProgress.addRow(filename, target.getPeerName(), target.getSourceTableId(), target.getRemoteIdentifier(), status);
}
replicationInProgress.generate(req, sb);
}
protected Map<String,String> invert(Map<String,String> map) {
Map<String,String> newMap = Maps.newHashMapWithExpectedSize(map.size());
for(Entry<String,String> entry : map.entrySet()) {
newMap.put(entry.getValue(), entry.getKey());
}
return newMap;
}
}
| ACCUMULO-2582 Use the full path, not just the file name.
| server/monitor/src/main/java/org/apache/accumulo/monitor/servlets/ReplicationServlet.java | ACCUMULO-2582 Use the full path, not just the file name. |
|
Java | apache-2.0 | 217e93d06d55c13eda982ca51e2180b5c62881d7 | 0 | joansmith/orientdb,allanmoso/orientdb,joansmith/orientdb,wyzssw/orientdb,intfrr/orientdb,wouterv/orientdb,mmacfadden/orientdb,tempbottle/orientdb,giastfader/orientdb,orientechnologies/orientdb,alonsod86/orientdb,jdillon/orientdb,orientechnologies/orientdb,wouterv/orientdb,jdillon/orientdb,mbhulin/orientdb,joansmith/orientdb,mbhulin/orientdb,cstamas/orientdb,giastfader/orientdb,mmacfadden/orientdb,sanyaade-g2g-repos/orientdb,wyzssw/orientdb,sanyaade-g2g-repos/orientdb,mmacfadden/orientdb,sanyaade-g2g-repos/orientdb,allanmoso/orientdb,wyzssw/orientdb,cstamas/orientdb,intfrr/orientdb,cstamas/orientdb,joansmith/orientdb,wouterv/orientdb,orientechnologies/orientdb,rprabhat/orientdb,alonsod86/orientdb,orientechnologies/orientdb,mmacfadden/orientdb,giastfader/orientdb,tempbottle/orientdb,alonsod86/orientdb,rprabhat/orientdb,alonsod86/orientdb,jdillon/orientdb,tempbottle/orientdb,intfrr/orientdb,allanmoso/orientdb,tempbottle/orientdb,mbhulin/orientdb,mbhulin/orientdb,cstamas/orientdb,rprabhat/orientdb,wouterv/orientdb,rprabhat/orientdb,allanmoso/orientdb,intfrr/orientdb,wyzssw/orientdb,sanyaade-g2g-repos/orientdb,giastfader/orientdb | package com.orientechnologies.orient.object.jpa;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import javax.persistence.PersistenceException;
import javax.persistence.SharedCacheMode;
import javax.persistence.ValidationMode;
import javax.persistence.spi.ClassTransformer;
import javax.persistence.spi.PersistenceUnitInfo;
import javax.persistence.spi.PersistenceUnitTransactionType;
import javax.sql.DataSource;
import com.orientechnologies.orient.object.jpa.parsing.JPAVersion;
/**
* An implementation of PersistenceUnit for parsed persistence unit metadata
*
*/
public class OJPAPersistenceUnitInfo implements PersistenceUnitInfo {
/**
* the name of the persistence unit
*/
private final String unitName;
/**
* transaction type of the entity managers created by the EntityManagerFactory
*/
private final PersistenceUnitTransactionType transactionType;
/**
* The JAR file or directory whose META-INF directory contains persistence.xml is called the root of the persistence unit. The
* scope of the persistence unit is determined by the persistence unit's root.
*/
private final URL unitRootUrl;
/**
* the list of mapping file names that the persistence provider must load to determine the mappings for the entity classes
*/
private final List<String> mappingFileNames = new ArrayList<String>();
/**
* the list of the names of the classes that the persistence provider must add to its set of managed classes
*/
private final List<String> managedClassNames = new ArrayList<String>();
/**
* whether classes in the root of the persistence unit that have not been explicitly listed are to be included in the set of
* managed classes. When set to true then only listed classes and jars will be scanned for persistent classes, otherwise the
* enclosing jar or directory will also be scanned. Not applicable to Java SE persistence units.
*
* @see 'Note' http://static.springsource.org/spring/docs/4.0.x/spring-framework-reference/html/orm.html#orm-jpa-setup-lcemfb The
* exclude-unlisted-classes element always indicates that no scanning for annotated entity classes is supposed to occur, in
* order to support the <exclude-unlisted-classes/> shortcut. This is in line with the JPA specification, which suggests that
* shortcut, but unfortunately is in conflict with the JPA XSD, which implies false for that shortcut. Consequently,
* <exclude-unlisted-classes> false </exclude-unlisted-classes/> is not supported. Simply omit the exclude-unlisted-classes
* element if you want entity class scanning to occur.
*/
private boolean excludeUnlistedClasses = false;
/**
* the second-level cache mode that must be used by the provider for the persistence unit
*
*/
private SharedCacheMode sharedCacheMode = SharedCacheMode.UNSPECIFIED;
/**
* the validation mode to be used by the persistence provider for the persistence unit
*/
private ValidationMode validationMode = ValidationMode.AUTO;
/**
* OrientDB Properties object
*/
private final Properties properties = new OJPAProperties();
/**
* TODO: implement transformer provider-supplied transformer that the container invokes at class-(re)definition time
*/
private final Set<ClassTransformer> classTransformers = new HashSet<ClassTransformer>();
private final List<URL> jarFileUrls = new ArrayList<URL>();
private String providerClassName;
private final JPAVersion xmlSchemaVersion;
/**
* Create a new persistence unit with the given name, transaction type, location and defining bundle
*
* @param unitName
* must not be null
* @param transactionType
* may be null
* @param unitRootUrl
* root of the persistence unit
* @param schemaVersion
* The version of the JPA schema used in persistence.xml
*/
public OJPAPersistenceUnitInfo(String unitName, String transactionType, URL unitRootUrl, String xmlSchemaVersion) {
this.unitName = unitName;
this.unitRootUrl = unitRootUrl;
if (unitName == null || unitName.isEmpty()) {
throw new IllegalStateException("PersistenceUnitName for entity manager should not be null or empty");
}
this.xmlSchemaVersion = JPAVersion.parse(xmlSchemaVersion);
this.transactionType = initTransactionType(transactionType);
}
/**
* @param provider
*/
public void setProviderClassName(String providerClassName) {
this.providerClassName = providerClassName;
}
/**
* @param jtaDataSource
*/
public void setJtaDataSource(String jtaDataSource) {
// TODO: implement
}
/**
* @param nonJtaDataSource
*/
public void setNonJtaDataSource(String nonJtaDataSource) {
// TODO: implement
}
/**
* @param mappingFileName
*/
public void addMappingFileName(String mappingFileName) {
mappingFileNames.add(mappingFileName);
}
/**
* @param jarFileName
*/
public void addJarFileName(String jarFileName) {
jarFileUrls.add(initJarFile(jarFileName));
}
/**
* @param className
*/
public void addClassName(String className) {
managedClassNames.add(className);
}
/**
* @param exclude
*/
public void setExcludeUnlisted(boolean exclude) {
excludeUnlistedClasses = exclude;
}
/**
* @param name
* @param value
*/
public void addProperty(String name, String value) {
properties.setProperty(name, value);
}
/**
* @param sharedCacheMode
*/
public void setSharedCacheMode(String sharedCacheMode) {
this.sharedCacheMode = initSharedCacheMode(sharedCacheMode);
}
/**
* @param validationMode
*/
public void setValidationMode(String validationMode) {
this.validationMode = initValidationMode(validationMode);
}
@Override
public String toString() {
return "PersistenceUnit@" + unitName + " " + super.toString();
}
@Override
public String getPersistenceUnitName() {
return unitName;
}
@Override
public String getPersistenceProviderClassName() {
return providerClassName;
}
@Override
public PersistenceUnitTransactionType getTransactionType() {
return transactionType;
}
@Override
public DataSource getJtaDataSource() {
// TODO Auto-generated method stub
return null;
}
@Override
public DataSource getNonJtaDataSource() {
// TODO Auto-generated method stub
return null;
}
@Override
public List<String> getMappingFileNames() {
return mappingFileNames;
}
@Override
public List<URL> getJarFileUrls() {
return jarFileUrls;
}
@Override
public URL getPersistenceUnitRootUrl() {
return unitRootUrl;
}
@Override
public List<String> getManagedClassNames() {
return managedClassNames;
}
@Override
public boolean excludeUnlistedClasses() {
return excludeUnlistedClasses;
}
@Override
public SharedCacheMode getSharedCacheMode() {
return sharedCacheMode;
}
@Override
public ValidationMode getValidationMode() {
return validationMode;
}
@Override
public Properties getProperties() {
return properties;
}
@Override
public String getPersistenceXMLSchemaVersion() {
return xmlSchemaVersion.getVersion();
}
@Override
public ClassLoader getClassLoader() {
return ThreadLocal.class.getClassLoader();
}
@Override
public void addTransformer(ClassTransformer transformer) {
classTransformers.add(transformer);
}
@Override
public ClassLoader getNewTempClassLoader() {
// TODO Auto-generated method stub
return null;
}
@Override
public int hashCode() {
return unitName.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
return unitName.equals(((OJPAPersistenceUnitInfo) obj).getPersistenceUnitName());
}
// ------------- helpers
/**
* TODO: init default value In a Java EE environment, if this element is not specified, the default is JTA. In a Java SE
* environment, if this element is not specified, a default of RESOURCE_LOCAL may be assumed.
*
* @param elementContent
* @return
*/
public static PersistenceUnitTransactionType initTransactionType(String elementContent) {
if (elementContent == null || elementContent.isEmpty()) {
return null;
}
try {
return PersistenceUnitTransactionType.valueOf(elementContent.toUpperCase());
} catch (IllegalArgumentException ex) {
throw new PersistenceException("Unknown TransactionType: " + elementContent, ex);
}
}
public static ValidationMode initValidationMode(String validationMode) {
if (validationMode == null || validationMode.isEmpty()) {
return ValidationMode.AUTO;
}
try {
return ValidationMode.valueOf(validationMode.toUpperCase());
} catch (IllegalArgumentException ex) {
throw new PersistenceException("Unknown ValidationMode: " + validationMode, ex);
}
}
public static SharedCacheMode initSharedCacheMode(String sharedCacheMode) {
if (sharedCacheMode == null || sharedCacheMode.isEmpty()) {
return SharedCacheMode.UNSPECIFIED;
}
try {
return SharedCacheMode.valueOf(sharedCacheMode.toUpperCase());
} catch (IllegalArgumentException ex) {
throw new PersistenceException("Unknown ValidationMode: " + sharedCacheMode, ex);
}
}
public static URL initJarFile(String jarFileName) {
try {
return new URL("file://" + jarFileName);
} catch (MalformedURLException e) {
throw new PersistenceException("Unknown jar file name: " + jarFileName, e);
}
}
}
| object/src/main/java/com/orientechnologies/orient/object/jpa/OJPAPersistenceUnitInfo.java | package com.orientechnologies.orient.object.jpa;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import javax.persistence.PersistenceException;
import javax.persistence.SharedCacheMode;
import javax.persistence.ValidationMode;
import javax.persistence.spi.ClassTransformer;
import javax.persistence.spi.PersistenceUnitInfo;
import javax.persistence.spi.PersistenceUnitTransactionType;
import javax.sql.DataSource;
import com.orientechnologies.orient.object.jpa.parsing.JPAVersion;
/**
* An implementation of PersistenceUnit for parsed persistence unit metadata
*
*/
public class OJPAPersistenceUnitInfo implements PersistenceUnitInfo {
/**
* the name of the persistence unit
*/
private final String unitName;
/**
* transaction type of the entity managers created by the EntityManagerFactory
*/
private final PersistenceUnitTransactionType transactionType;
/**
* The JAR file or directory whose META-INF directory contains persistence.xml is called the root of the persistence unit. The
* scope of the persistence unit is determined by the persistence unit’s root.
*/
private final URL unitRootUrl;
/**
* the list of mapping file names that the persistence provider must load to determine the mappings for the entity classes
*/
private final List<String> mappingFileNames = new ArrayList<String>();
/**
* the list of the names of the classes that the persistence provider must add to its set of managed classes
*/
private final List<String> managedClassNames = new ArrayList<String>();
/**
* whether classes in the root of the persistence unit that have not been explicitly listed are to be included in the set of
* managed classes. When set to true then only listed classes and jars will be scanned for persistent classes, otherwise the
* enclosing jar or directory will also be scanned. Not applicable to Java SE persistence units.
*
* @see 'Note' http://static.springsource.org/spring/docs/4.0.x/spring-framework-reference/html/orm.html#orm-jpa-setup-lcemfb The
* exclude-unlisted-classes element always indicates that no scanning for annotated entity classes is supposed to occur, in
* order to support the <exclude-unlisted-classes/> shortcut. This is in line with the JPA specification, which suggests that
* shortcut, but unfortunately is in conflict with the JPA XSD, which implies false for that shortcut. Consequently,
* <exclude-unlisted-classes> false </exclude-unlisted-classes/> is not supported. Simply omit the exclude-unlisted-classes
* element if you want entity class scanning to occur.
*/
private boolean excludeUnlistedClasses = false;
/**
* the second-level cache mode that must be used by the provider for the persistence unit
*
*/
private SharedCacheMode sharedCacheMode = SharedCacheMode.UNSPECIFIED;
/**
* the validation mode to be used by the persistence provider for the persistence unit
*/
private ValidationMode validationMode = ValidationMode.AUTO;
/**
* OrientDB Properties object
*/
private final Properties properties = new OJPAProperties();
/**
* TODO: implement transformer provider-supplied transformer that the container invokes at class-(re)definition time
*/
private final Set<ClassTransformer> classTransformers = new HashSet<ClassTransformer>();
private final List<URL> jarFileUrls = new ArrayList<URL>();
private String providerClassName;
private final JPAVersion xmlSchemaVersion;
/**
* Create a new persistence unit with the given name, transaction type, location and defining bundle
*
* @param unitName
* must not be null
* @param transactionType
* may be null
* @param unitRootUrl
* root of the persistence unit
* @param schemaVersion
* The version of the JPA schema used in persistence.xml
*/
public OJPAPersistenceUnitInfo(String unitName, String transactionType, URL unitRootUrl, String xmlSchemaVersion) {
this.unitName = unitName;
this.unitRootUrl = unitRootUrl;
if (unitName == null || unitName.isEmpty()) {
throw new IllegalStateException("PersistenceUnitName for entity manager should not be null or empty");
}
this.xmlSchemaVersion = JPAVersion.parse(xmlSchemaVersion);
this.transactionType = initTransactionType(transactionType);
}
/**
* @param provider
*/
public void setProviderClassName(String providerClassName) {
this.providerClassName = providerClassName;
}
/**
* @param jtaDataSource
*/
public void setJtaDataSource(String jtaDataSource) {
// TODO: implement
}
/**
* @param nonJtaDataSource
*/
public void setNonJtaDataSource(String nonJtaDataSource) {
// TODO: implement
}
/**
* @param mappingFileName
*/
public void addMappingFileName(String mappingFileName) {
mappingFileNames.add(mappingFileName);
}
/**
* @param jarFileName
*/
public void addJarFileName(String jarFileName) {
jarFileUrls.add(initJarFile(jarFileName));
}
/**
* @param className
*/
public void addClassName(String className) {
managedClassNames.add(className);
}
/**
* @param exclude
*/
public void setExcludeUnlisted(boolean exclude) {
excludeUnlistedClasses = exclude;
}
/**
* @param name
* @param value
*/
public void addProperty(String name, String value) {
properties.setProperty(name, value);
}
/**
* @param sharedCacheMode
*/
public void setSharedCacheMode(String sharedCacheMode) {
this.sharedCacheMode = initSharedCacheMode(sharedCacheMode);
}
/**
* @param validationMode
*/
public void setValidationMode(String validationMode) {
this.validationMode = initValidationMode(validationMode);
}
@Override
public String toString() {
return "PersistenceUnit@" + unitName + " " + super.toString();
}
@Override
public String getPersistenceUnitName() {
return unitName;
}
@Override
public String getPersistenceProviderClassName() {
return providerClassName;
}
@Override
public PersistenceUnitTransactionType getTransactionType() {
return transactionType;
}
@Override
public DataSource getJtaDataSource() {
// TODO Auto-generated method stub
return null;
}
@Override
public DataSource getNonJtaDataSource() {
// TODO Auto-generated method stub
return null;
}
@Override
public List<String> getMappingFileNames() {
return mappingFileNames;
}
@Override
public List<URL> getJarFileUrls() {
return jarFileUrls;
}
@Override
public URL getPersistenceUnitRootUrl() {
return unitRootUrl;
}
@Override
public List<String> getManagedClassNames() {
return managedClassNames;
}
@Override
public boolean excludeUnlistedClasses() {
return excludeUnlistedClasses;
}
@Override
public SharedCacheMode getSharedCacheMode() {
return sharedCacheMode;
}
@Override
public ValidationMode getValidationMode() {
return validationMode;
}
@Override
public Properties getProperties() {
return properties;
}
@Override
public String getPersistenceXMLSchemaVersion() {
return xmlSchemaVersion.getVersion();
}
@Override
public ClassLoader getClassLoader() {
return ThreadLocal.class.getClassLoader();
}
@Override
public void addTransformer(ClassTransformer transformer) {
classTransformers.add(transformer);
}
@Override
public ClassLoader getNewTempClassLoader() {
// TODO Auto-generated method stub
return null;
}
@Override
public int hashCode() {
return unitName.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
return unitName.equals(((OJPAPersistenceUnitInfo) obj).getPersistenceUnitName());
}
// ------------- helpers
/**
* TODO: init default value In a Java EE environment, if this element is not specified, the default is JTA. In a Java SE
* environment, if this element is not specified, a default of RESOURCE_LOCAL may be assumed.
*
* @param elementContent
* @return
*/
public static PersistenceUnitTransactionType initTransactionType(String elementContent) {
if (elementContent == null || elementContent.isEmpty()) {
return null;
}
try {
return PersistenceUnitTransactionType.valueOf(elementContent.toUpperCase());
} catch (IllegalArgumentException ex) {
throw new PersistenceException("Unknown TransactionType: " + elementContent, ex);
}
}
public static ValidationMode initValidationMode(String validationMode) {
if (validationMode == null || validationMode.isEmpty()) {
return ValidationMode.AUTO;
}
try {
return ValidationMode.valueOf(validationMode.toUpperCase());
} catch (IllegalArgumentException ex) {
throw new PersistenceException("Unknown ValidationMode: " + validationMode, ex);
}
}
public static SharedCacheMode initSharedCacheMode(String sharedCacheMode) {
if (sharedCacheMode == null || sharedCacheMode.isEmpty()) {
return SharedCacheMode.UNSPECIFIED;
}
try {
return SharedCacheMode.valueOf(sharedCacheMode.toUpperCase());
} catch (IllegalArgumentException ex) {
throw new PersistenceException("Unknown ValidationMode: " + sharedCacheMode, ex);
}
}
public static URL initJarFile(String jarFileName) {
try {
return new URL("file://" + jarFileName);
} catch (MalformedURLException e) {
throw new PersistenceException("Unknown jar file name: " + jarFileName, e);
}
}
}
| fix comment to make Mr. Jenkins happy | object/src/main/java/com/orientechnologies/orient/object/jpa/OJPAPersistenceUnitInfo.java | fix comment to make Mr. Jenkins happy |
|
Java | apache-2.0 | 0fdbe8d7203a722ebd9191ddb6820093658dc269 | 0 | leviathan747/bridgepoint,travislondon/bridgepoint,jason-rhodes/bridgepoint,TypeFox/bridgepoint,HebaKhaled/bridgepoint,rmulvey/bridgepoint,jason-rhodes/bridgepoint,nmohamad/bridgepoint,lwriemen/bridgepoint,cortlandstarrett/bridgepoint,leviathan747/bridgepoint,cortlandstarrett/bridgepoint,keithbrown/bridgepoint,nmohamad/bridgepoint,jmvachon/bridgepoint,john-tornblom/bridgepoint,rmulvey/bridgepoint,kirisma/bridgepoint,lwriemen/bridgepoint,perojonsson/bridgepoint,cortlandstarrett/bridgepoint,travislondon/bridgepoint,HebaKhaled/bridgepoint,perojonsson/bridgepoint,keithbrown/bridgepoint,travislondon/bridgepoint,nmohamad/bridgepoint,rmulvey/bridgepoint,kirisma/bridgepoint,TypeFox/bridgepoint,jmvachon/bridgepoint,kirisma/bridgepoint,HebaKhaled/bridgepoint,kirisma/bridgepoint,nmohamad/bridgepoint,xtuml/bridgepoint,john-tornblom/bridgepoint,xtuml/bptest,jmvachon/bridgepoint,HebaKhaled/bridgepoint,rmulvey/bptest,TypeFox/bridgepoint,xtuml/bptest,jmvachon/bridgepoint,cortlandstarrett/bridgepoint,keithbrown/bridgepoint,keithbrown/bptest,lwriemen/bridgepoint,cortlandstarrett/bridgepoint,john-tornblom/bridgepoint,kirisma/bridgepoint,cortlandstarrett/bridgepoint,xtuml/bptest,lwriemen/bridgepoint,jmvachon/bridgepoint,keithbrown/bptest,leviathan747/bridgepoint,cortlandstarrett/bridgepoint,lwriemen/bridgepoint,xtuml/bridgepoint,john-tornblom/bridgepoint,leviathan747/bridgepoint,leviathan747/bridgepoint,nmohamad/bridgepoint,TypeFox/bridgepoint,perojonsson/bridgepoint,leviathan747/bridgepoint,lwriemen/bridgepoint,xtuml/bridgepoint,TypeFox/bridgepoint,nmohamad/bridgepoint,cortlandstarrett/bridgepoint,jason-rhodes/bridgepoint,leviathan747/bridgepoint,john-tornblom/bridgepoint,travislondon/bridgepoint,jmvachon/bridgepoint,john-tornblom/bridgepoint,rmulvey/bptest,HebaKhaled/bridgepoint,perojonsson/bridgepoint,perojonsson/bridgepoint,keithbrown/bridgepoint,rmulvey/bridgepoint,keithbrown/bptest,john-tornblom/bridgepoint,jason-rhodes/bridgepoint,lwriemen/bridgepoint,jason-rhodes/bridgepoint,HebaKhaled/bridgepoint,nmohamad/bridgepoint,xtuml/bridgepoint,leviathan747/bridgepoint,travislondon/bridgepoint,keithbrown/bridgepoint,perojonsson/bridgepoint,rmulvey/bridgepoint,travislondon/bridgepoint,keithbrown/bridgepoint,kirisma/bridgepoint,kirisma/bridgepoint,xtuml/bridgepoint,rmulvey/bridgepoint,rmulvey/bridgepoint,xtuml/bridgepoint,jason-rhodes/bridgepoint,TypeFox/bridgepoint,HebaKhaled/bridgepoint,jason-rhodes/bridgepoint,travislondon/bridgepoint,jmvachon/bridgepoint,xtuml/bridgepoint,keithbrown/bridgepoint,xtuml/bridgepoint,perojonsson/bridgepoint,TypeFox/bridgepoint,lwriemen/bridgepoint,travislondon/bridgepoint,rmulvey/bptest |
//=====================================================================
//
//File: $RCSfile: VerifierTestSuite2.java,v $
//Version: $Revision: 1.3 $
//Modified: $Date: 2013/05/10 04:28:44 $
//
//(c) Copyright 2006-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
import junit.framework.Test;
import junit.framework.TestSuite;
import org.eclipse.core.runtime.CoreException;
import com.mentor.nucleus.bp.core.CorePlugin;
import com.mentor.nucleus.bp.core.common.BridgePointPreferencesStore;
import com.mentor.nucleus.bp.core.util.WorkspaceUtil;
import com.mentor.nucleus.bp.debug.engine.VerifierMessageTestGlobals;
import com.mentor.nucleus.bp.debug.test.VariableViewTests;
import com.mentor.nucleus.bp.debug.ui.launch.DLLRelaunchTest;
import com.mentor.nucleus.bp.debug.ui.test.execute.BlockedComponentExecutionTest;
import com.mentor.nucleus.bp.debug.ui.test.execute.RecursionExecutionTest;
/**
* Test all areas of the core
*/
public class VerifierTestSuite2 extends TestSuite {
/**
* Returns the suite. This is required to
* use the JUnit Launcher.
* @throws CoreException
*/
public static Test suite() throws CoreException {
return new VerifierTestSuite2();
}
/**
* Construct the test suite.
*/
public VerifierTestSuite2() throws CoreException {
// turn off autobuild to stop MC-3020 builders from running
WorkspaceUtil.setAutobuilding(false); // throws CoreException
CorePlugin.getDefault().getPreferenceStore().
setValue(BridgePointPreferencesStore.
USE_DEFAULT_NAME_FOR_CREATION, true);
addTest(new TestSuite(VerifierMessageTestGlobals.class));
addTest(new TestSuite(DLLRelaunchTest.class));
addTest(new TestSuite(RecursionExecutionTest.class));
addTest(new TestSuite(BlockedComponentExecutionTest.class));
addTest(new TestSuite(VariableViewTests.class));
}
}
| src/com.mentor.nucleus.bp.debug.ui.test/src/VerifierTestSuite2.java |
//=====================================================================
//
//File: $RCSfile: VerifierTestSuite2.java,v $
//Version: $Revision: 1.3 $
//Modified: $Date: 2013/05/10 04:28:44 $
//
//(c) Copyright 2006-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
import junit.framework.Test;
import junit.framework.TestSuite;
import org.eclipse.core.runtime.CoreException;
import com.mentor.nucleus.bp.core.CorePlugin;
import com.mentor.nucleus.bp.core.common.BridgePointPreferencesStore;
import com.mentor.nucleus.bp.core.util.WorkspaceUtil;
import com.mentor.nucleus.bp.debug.engine.VerifierMessageTestGlobals;
import com.mentor.nucleus.bp.debug.test.VariableViewTests;
import com.mentor.nucleus.bp.debug.ui.launch.DLLRelaunchTest;
/**
* Test all areas of the core
*/
public class VerifierTestSuite2 extends TestSuite {
/**
* Returns the suite. This is required to
* use the JUnit Launcher.
* @throws CoreException
*/
public static Test suite() throws CoreException {
return new VerifierTestSuite2();
}
/**
* Construct the test suite.
*/
public VerifierTestSuite2() throws CoreException {
// turn off autobuild to stop MC-3020 builders from running
WorkspaceUtil.setAutobuilding(false); // throws CoreException
CorePlugin.getDefault().getPreferenceStore().
setValue(BridgePointPreferencesStore.
USE_DEFAULT_NAME_FOR_CREATION, true);
addTest(new TestSuite(VerifierMessageTestGlobals.class));
addTest(new TestSuite(DLLRelaunchTest.class));
addTest(new TestSuite(VariableViewTests.class));
}
}
| job: #182
resolve conflict with head | src/com.mentor.nucleus.bp.debug.ui.test/src/VerifierTestSuite2.java | job: #182 resolve conflict with head |
|
Java | apache-2.0 | 318f37dabd4cede266c9610167c8383f56e720c7 | 0 | Commit451/GitLabAndroid,Commit451/LabCoat | package com.commit451.gitlab.widget;
import android.appwidget.AppWidgetManager;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.widget.RemoteViews;
import android.widget.RemoteViewsService;
import com.commit451.gitlab.R;
import com.commit451.gitlab.api.GitLabClient;
import com.commit451.gitlab.api.GitLabRss;
import com.commit451.gitlab.model.Account;
import com.commit451.gitlab.model.rss.Entry;
import com.commit451.gitlab.model.rss.Feed;
import com.commit451.gitlab.transformation.CircleTransformation;
import com.squareup.picasso.Picasso;
import java.io.IOException;
import java.util.ArrayList;
import retrofit2.Response;
/**
* Remote all the views
*/
public class FeedRemoteViewsFactory implements RemoteViewsService.RemoteViewsFactory {
private static final int mCount = 10;
private Context mContext;
private int mAppWidgetId;
private ArrayList<Entry> mEntries;
public FeedRemoteViewsFactory(Context context, Intent intent) {
mContext = context;
mAppWidgetId = intent.getIntExtra(AppWidgetManager.EXTRA_APPWIDGET_ID,
AppWidgetManager.INVALID_APPWIDGET_ID);
}
@Override
public void onCreate() {
mEntries = new ArrayList<>();
}
@Override
public void onDestroy() {
// In onDestroy() you should tear down anything that was setup for your data source,
// eg. cursors, connections, etc.
mEntries.clear();
}
@Override
public int getCount() {
return mCount;
}
@Override
public RemoteViews getViewAt(int position) {
// position will always range from 0 to getCount() - 1.
if (position >= mEntries.size()) {
return null;
}
Entry entry = mEntries.get(position);
RemoteViews rv = new RemoteViews(mContext.getPackageName(), R.layout.widget_item_entry);
rv.setTextViewText(R.id.title, entry.getTitle());
rv.setTextViewText(R.id.summary, entry.getSummary());
// Next, we set a fill-intent which will be used to fill-in the pending intent template
// which is set on the collection view in FeedWidgetProvider.
Intent fillInIntent = new Intent();
fillInIntent.putExtra(FeedWidgetProvider.EXTRA_LINK, entry.getLink().getHref().toString());
rv.setOnClickFillInIntent(R.id.root, fillInIntent);
try {
Bitmap image = Picasso.with(mContext)
.load(entry.getThumbnail().getUrl())
.transform(new CircleTransformation())
.get();
rv.setImageViewBitmap(R.id.image, image);
} catch (IOException e) {
//well, thats too bad
}
return rv;
}
@Override
public RemoteViews getLoadingView() {
// You can create a custom loading view (for create when getViewAt() is slow.) If you
// return null here, you will get the default loading view.
return null;
}
@Override
public int getViewTypeCount() {
return 1;
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public boolean hasStableIds() {
return true;
}
@Override
public void onDataSetChanged() {
// This is triggered when you call AppWidgetManager notifyAppWidgetViewDataChanged
// on the collection view corresponding to this factory. You can do heaving lifting in
// here, synchronously. For example, if you need to process an image, fetch something
// from the network, etc., it is ok to do it here, synchronously. The widget will remain
// in its current state while work is being done here, so you don't need to worry about
// locking up the widget.
Account account = FeedWidgetPrefs.getAccount(mContext, mAppWidgetId);
if (account == null || account.getUser() == null || account.getUser().getFeedUrl() == null) {
//TODO show error state?
return;
}
GitLabRss rssClient = GitLabClient.rssInstance(account);
try {
Response<Feed> feedResponse = rssClient.getFeed(account.getUser().getFeedUrl().toString()).execute();
if (feedResponse.isSuccessful()) {
if (feedResponse.body().getEntries() != null) {
mEntries.addAll(feedResponse.body().getEntries());
}
}
} catch (IOException e) {
//maybe let the user know somehow?
}
}
}
| app/src/main/java/com/commit451/gitlab/widget/FeedRemoteViewsFactory.java | package com.commit451.gitlab.widget;
import android.appwidget.AppWidgetManager;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.widget.RemoteViews;
import android.widget.RemoteViewsService;
import com.commit451.gitlab.R;
import com.commit451.gitlab.api.GitLabClient;
import com.commit451.gitlab.api.GitLabRss;
import com.commit451.gitlab.model.Account;
import com.commit451.gitlab.model.rss.Entry;
import com.commit451.gitlab.model.rss.Feed;
import com.commit451.gitlab.transformation.CircleTransformation;
import com.squareup.picasso.Picasso;
import java.io.IOException;
import java.util.ArrayList;
import retrofit2.Response;
/**
* Remote all the views
*/
public class FeedRemoteViewsFactory implements RemoteViewsService.RemoteViewsFactory {
private static final int mCount = 10;
private Context mContext;
private int mAppWidgetId;
private ArrayList<Entry> mEntries;
public FeedRemoteViewsFactory(Context context, Intent intent) {
mContext = context;
mAppWidgetId = intent.getIntExtra(AppWidgetManager.EXTRA_APPWIDGET_ID,
AppWidgetManager.INVALID_APPWIDGET_ID);
}
@Override
public void onCreate() {
mEntries = new ArrayList<>();
}
@Override
public void onDestroy() {
// In onDestroy() you should tear down anything that was setup for your data source,
// eg. cursors, connections, etc.
mEntries.clear();
}
@Override
public int getCount() {
return mCount;
}
@Override
public RemoteViews getViewAt(int position) {
// position will always range from 0 to getCount() - 1.
Entry entry = mEntries.get(position);
RemoteViews rv = new RemoteViews(mContext.getPackageName(), R.layout.widget_item_entry);
rv.setTextViewText(R.id.title, entry.getTitle());
rv.setTextViewText(R.id.summary, entry.getSummary());
// Next, we set a fill-intent which will be used to fill-in the pending intent template
// which is set on the collection view in FeedWidgetProvider.
Intent fillInIntent = new Intent();
fillInIntent.putExtra(FeedWidgetProvider.EXTRA_LINK, entry.getLink().getHref().toString());
rv.setOnClickFillInIntent(R.id.root, fillInIntent);
try {
Bitmap image = Picasso.with(mContext)
.load(entry.getThumbnail().getUrl())
.transform(new CircleTransformation())
.get();
rv.setImageViewBitmap(R.id.image, image);
} catch (IOException e) {
//well, thats too bad
}
return rv;
}
@Override
public RemoteViews getLoadingView() {
// You can create a custom loading view (for create when getViewAt() is slow.) If you
// return null here, you will get the default loading view.
return null;
}
@Override
public int getViewTypeCount() {
return 1;
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public boolean hasStableIds() {
return true;
}
@Override
public void onDataSetChanged() {
// This is triggered when you call AppWidgetManager notifyAppWidgetViewDataChanged
// on the collection view corresponding to this factory. You can do heaving lifting in
// here, synchronously. For example, if you need to process an image, fetch something
// from the network, etc., it is ok to do it here, synchronously. The widget will remain
// in its current state while work is being done here, so you don't need to worry about
// locking up the widget.
Account account = FeedWidgetPrefs.getAccount(mContext, mAppWidgetId);
if (account == null || account.getUser() == null || account.getUser().getFeedUrl() == null) {
//TODO show error state?
return;
}
GitLabRss rssClient = GitLabClient.rssInstance(account);
try {
Response<Feed> feedResponse = rssClient.getFeed(account.getUser().getFeedUrl().toString()).execute();
if (feedResponse.isSuccessful()) {
if (feedResponse.body().getEntries() != null) {
mEntries.addAll(feedResponse.body().getEntries());
}
}
} catch (IOException e) {
//maybe let the user know somehow?
}
}
}
| Don't crash if we have less than 10 feed entries
| app/src/main/java/com/commit451/gitlab/widget/FeedRemoteViewsFactory.java | Don't crash if we have less than 10 feed entries |
|
Java | mit | 0a3d2736ead7f4f51ddd9e4de7da0fe6d8be9855 | 0 | CenturyLinkCloud/mdw,CenturyLinkCloud/mdw,CenturyLinkCloud/mdw,CenturyLinkCloud/mdw,CenturyLinkCloud/mdw,CenturyLinkCloud/mdw | /*
* Copyright (C) 2017 CenturyLink, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.centurylink.mdw.service.data;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import com.centurylink.mdw.common.service.Query;
import com.centurylink.mdw.constant.OwnerType;
import com.centurylink.mdw.dataaccess.DataAccess;
import com.centurylink.mdw.dataaccess.DataAccessException;
import com.centurylink.mdw.dataaccess.DatabaseAccess;
import com.centurylink.mdw.dataaccess.db.CommonDataAccess;
import com.centurylink.mdw.model.workflow.ProcessInstance;
import com.centurylink.mdw.model.workflow.ProcessList;
import com.centurylink.mdw.model.workflow.WorkStatus;
import com.centurylink.mdw.model.workflow.WorkStatuses;
public class WorkflowDataAccess extends CommonDataAccess {
public WorkflowDataAccess() {
super(null, DataAccess.currentSchemaVersion, DataAccess.supportedSchemaVersion);
}
public ProcessList getProcessInstances(Query query) throws DataAccessException {
try {
List<ProcessInstance> procInsts = new ArrayList<ProcessInstance>();
db.openConnection();
long count = -1;
String where;
if (query.getFind() != null) {
try {
// numeric value means instance id or master request id
long findInstId = Long.parseLong(query.getFind());
where = "where (pi.process_instance_id like '" + findInstId
+ "%' or pi.master_request_id like '" + query.getFind() + "%')\n";
}
catch (NumberFormatException ex) {
// otherwise master request id
where = "where pi.master_request_id like '" + query.getFind() + "%'\n";
}
}
else {
where = buildWhere(query);
}
String countSql = "select count(process_instance_id) from process_instance pi\n" + where;
ResultSet rs = db.runSelect(countSql);
if (rs.next())
count = rs.getLong(1);
String orderBy = buildOrderBy(query);
StringBuilder sql = new StringBuilder();
if (query.getMax() != Query.MAX_ALL)
sql.append(db.pagingQueryPrefix());
sql.append("select ").append(PROC_INST_COLS).append(" from process_instance pi\n").append(where).append(orderBy);
if (query.getMax() != Query.MAX_ALL)
sql.append(db.pagingQuerySuffix(query.getStart(), query.getMax()));
rs = db.runSelect(sql.toString());
while (rs.next())
procInsts.add(buildProcessInstance(rs));
ProcessList list = new ProcessList(ProcessList.PROCESS_INSTANCES, procInsts);
list.setTotal(count);
list.setRetrieveDate(DatabaseAccess.getDbDate());
return list;
}
catch (SQLException ex) {
throw new DataAccessException("Failed to retrieve Processes", ex);
}
finally {
db.closeConnection();
}
}
private String buildWhere(Query query) throws DataAccessException {
long instanceId = query.getLongFilter("instanceId");
if (instanceId > 0)
return "where pi.process_instance_id = " + instanceId + "\n"; // ignore other criteria
StringBuilder sb = new StringBuilder();
sb.append("where 1 = 1 ");
// masterRequestId
String masterRequestId = query.getFilter("masterRequestId");
if (masterRequestId != null)
sb.append(" and pi.master_request_id = '" + masterRequestId + "'\n");
String owner = query.getFilter("owner");
if (owner == null) {
// default excludes embedded subprocs - unless searching for activityInstanceId
if (!(query.getLongFilter("activityInstanceId") > 0L))
sb.append(" and pi.owner != '").append(OwnerType.MAIN_PROCESS_INSTANCE).append("'\n");
if ("true".equals(query.getFilter("master")))
sb.append(" and pi.owner NOT IN ( '").append(OwnerType.PROCESS_INSTANCE).append("' , '").append(OwnerType.ERROR).append("' )\n");
}
else {
String ownerId = query.getFilter("ownerId");
sb.append(" and pi.owner = '").append(owner).append("' and pi.owner_id = ").append(ownerId).append("\n");
}
// processId
String processId = query.getFilter("processId");
if (processId != null) {
sb.append(" and pi.process_id = ").append(processId).append("\n");
}
else {
// processIds
String[] processIds = query.getArrayFilter("processIds");
if (processIds != null && processIds.length > 0) {
sb.append(" and pi.process_id in (");
for (int i = 0; i < processIds.length; i++) {
sb.append(processIds[i]);
if (i < processIds.length - 1)
sb.append(",");
}
sb.append(")\n");
}
}
// SecondaryOwnerId
long secondaryOwnerId = query.getLongFilter("secondaryOwnerId");
if (secondaryOwnerId > 0) {
sb.append(" and (pi.secondary_owner_id is null or pi.secondary_owner_id = ");
sb.append(secondaryOwnerId).append(")\n");
}
// activityInstanceId
long activityInstanceId = query.getLongFilter("activityInstanceId");
if (activityInstanceId > 0) {
sb.append(" and pi.process_instance_id in (select process_instance_id from activity_instance where activity_instance_id =");
sb.append(activityInstanceId).append(")\n");
}
// status
String status = query.getFilter("status");
if (status != null && !status.equals("[Any]")) {
if (status.equals(WorkStatus.STATUSNAME_ACTIVE)) {
sb.append(" and pi.status_cd not in (")
.append(WorkStatus.STATUS_COMPLETED)
.append(",").append(WorkStatus.STATUS_FAILED)
.append(",").append(WorkStatus.STATUS_CANCELLED)
.append(",").append(WorkStatus.STATUS_PURGE)
.append(")\n");
}
else {
sb.append(" and pi.status_cd = ").append(WorkStatuses.getCode(status)).append("\n");
}
}
// startDate
try {
Date startDate = query.getDateFilter("startDate");
if (startDate != null) {
String start = getDateFormat().format(startDate);
if (db.isMySQL())
sb.append(" and pi.start_dt >= STR_TO_DATE('").append(start).append("','%d-%M-%Y')\n");
else
sb.append(" and pi.start_dt >= '").append(start).append("'\n");
}
}
catch (ParseException ex) {
throw new DataAccessException(ex.getMessage(), ex);
}
// template
String template = query.getFilter("template");
if (template != null)
sb.append(" and template = '" + template + "'");
// values
Map<String,String> values = query.getMapFilter("values");
if (values != null) {
for (String varName : values.keySet()) {
String varValue = values.get(varName);
sb.append("\n and exists (select vi.variable_inst_id from variable_instance vi ");
sb.append(" where vi.process_inst_id = pi.process_instance_id and vi.variable_name = '").append(varName).append("'");
sb.append(" and vi.variable_value = '").append(varValue).append("')");
}
}
return sb.toString();
}
private String buildOrderBy(Query query) {
StringBuilder sb = new StringBuilder();
sb.append(" order by process_instance_id");
if (query.isDescending())
sb.append(" desc");
sb.append("\n");
return sb.toString();
}
} | mdw-services/src/com/centurylink/mdw/service/data/WorkflowDataAccess.java | /*
* Copyright (C) 2017 CenturyLink, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.centurylink.mdw.service.data;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import com.centurylink.mdw.common.service.Query;
import com.centurylink.mdw.constant.OwnerType;
import com.centurylink.mdw.dataaccess.DataAccess;
import com.centurylink.mdw.dataaccess.DataAccessException;
import com.centurylink.mdw.dataaccess.DatabaseAccess;
import com.centurylink.mdw.dataaccess.db.CommonDataAccess;
import com.centurylink.mdw.model.workflow.ProcessInstance;
import com.centurylink.mdw.model.workflow.ProcessList;
import com.centurylink.mdw.model.workflow.WorkStatus;
import com.centurylink.mdw.model.workflow.WorkStatuses;
public class WorkflowDataAccess extends CommonDataAccess {
public WorkflowDataAccess() {
super(null, DataAccess.currentSchemaVersion, DataAccess.supportedSchemaVersion);
}
public ProcessList getProcessInstances(Query query) throws DataAccessException {
try {
List<ProcessInstance> procInsts = new ArrayList<ProcessInstance>();
db.openConnection();
long count = -1;
String where;
if (query.getFind() != null) {
try {
// numeric value means instance id or master request id
long findInstId = Long.parseLong(query.getFind());
where = "where (pi.process_instance_id like '" + findInstId
+ "%' or pi.master_request_id like '" + query.getFind() + "%')\n";
}
catch (NumberFormatException ex) {
// otherwise master request id
where = "where pi.master_request_id like '" + query.getFind() + "%'\n";
}
}
else {
where = buildWhere(query);
}
String countSql = "select count(process_instance_id) from process_instance pi\n" + where;
ResultSet rs = db.runSelect(countSql);
if (rs.next())
count = rs.getLong(1);
String orderBy = buildOrderBy(query);
StringBuilder sql = new StringBuilder();
if (query.getMax() != Query.MAX_ALL)
sql.append(db.pagingQueryPrefix());
sql.append("select ").append(PROC_INST_COLS).append(" from process_instance pi\n").append(where).append(orderBy);
if (query.getMax() != Query.MAX_ALL)
sql.append(db.pagingQuerySuffix(query.getStart(), query.getMax()));
rs = db.runSelect(sql.toString());
while (rs.next())
procInsts.add(buildProcessInstance(rs));
ProcessList list = new ProcessList(ProcessList.PROCESS_INSTANCES, procInsts);
list.setTotal(count);
list.setRetrieveDate(DatabaseAccess.getDbDate());
return list;
}
catch (SQLException ex) {
throw new DataAccessException("Failed to retrieve Processes", ex);
}
finally {
db.closeConnection();
}
}
private String buildWhere(Query query) throws DataAccessException {
long instanceId = query.getLongFilter("instanceId");
if (instanceId > 0)
return "where pi.process_instance_id = " + instanceId + "\n"; // ignore other criteria
StringBuilder sb = new StringBuilder();
sb.append("where 1 = 1 ");
// masterRequestId
String masterRequestId = query.getFilter("masterRequestId");
if (masterRequestId != null)
sb.append(" and pi.master_request_id = '" + masterRequestId + "'\n");
String owner = query.getFilter("owner");
if (owner == null) {
// default excludes embedded subprocs - unless searching for activityInstanceId
if (!(query.getLongFilter("activityInstanceId") > 0L))
sb.append(" and pi.owner != '").append(OwnerType.MAIN_PROCESS_INSTANCE).append("'\n");
if ("true".equals(query.getFilter("master")))
sb.append(" and pi.owner NOT IN ( '").append(OwnerType.PROCESS_INSTANCE).append("' , '").append(OwnerType.ERROR).append("' )\n");
}
else {
String ownerId = query.getFilter("ownerId");
sb.append(" and pi.owner = '").append(owner).append("' and pi.owner_id = ").append(ownerId).append("\n");
}
// processId
String processId = query.getFilter("processId");
if (processId != null) {
sb.append(" and pi.process_id = ").append(processId).append("\n");
}
else {
// processIds
String[] processIds = query.getArrayFilter("processIds");
if (processIds != null && processIds.length > 0) {
sb.append(" and pi.process_id in (");
for (int i = 0; i < processIds.length; i++) {
sb.append(processIds[i]);
if (i < processIds.length - 1)
sb.append(",");
}
sb.append(")\n");
}
}
// SecondaryOwnerId
long secondaryOwnerId = query.getLongFilter("secondaryOwnerId");
if (secondaryOwnerId > 0) {
sb.append(" and pi.secondary_owner_id =");
sb.append(secondaryOwnerId).append("\n");
}
// activityInstanceId
long activityInstanceId = query.getLongFilter("activityInstanceId");
if (activityInstanceId > 0) {
sb.append(" and pi.process_instance_id in (select process_instance_id from activity_instance where activity_instance_id =");
sb.append(activityInstanceId).append(")\n");
}
// status
String status = query.getFilter("status");
if (status != null && !status.equals("[Any]")) {
if (status.equals(WorkStatus.STATUSNAME_ACTIVE)) {
sb.append(" and pi.status_cd not in (")
.append(WorkStatus.STATUS_COMPLETED)
.append(",").append(WorkStatus.STATUS_FAILED)
.append(",").append(WorkStatus.STATUS_CANCELLED)
.append(",").append(WorkStatus.STATUS_PURGE)
.append(")\n");
}
else {
sb.append(" and pi.status_cd = ").append(WorkStatuses.getCode(status)).append("\n");
}
}
// startDate
try {
Date startDate = query.getDateFilter("startDate");
if (startDate != null) {
String start = getDateFormat().format(startDate);
if (db.isMySQL())
sb.append(" and pi.start_dt >= STR_TO_DATE('").append(start).append("','%d-%M-%Y')\n");
else
sb.append(" and pi.start_dt >= '").append(start).append("'\n");
}
}
catch (ParseException ex) {
throw new DataAccessException(ex.getMessage(), ex);
}
// template
String template = query.getFilter("template");
if (template != null)
sb.append(" and template = '" + template + "'");
// values
Map<String,String> values = query.getMapFilter("values");
if (values != null) {
for (String varName : values.keySet()) {
String varValue = values.get(varName);
sb.append("\n and exists (select vi.variable_inst_id from variable_instance vi ");
sb.append(" where vi.process_inst_id = pi.process_instance_id and vi.variable_name = '").append(varName).append("'");
sb.append(" and vi.variable_value = '").append(varValue).append("')");
}
}
return sb.toString();
}
private String buildOrderBy(Query query) {
StringBuilder sb = new StringBuilder();
sb.append(" order by process_instance_id");
if (query.isDescending())
sb.append(" desc");
sb.append("\n");
return sb.toString();
}
} | Issue #576
| mdw-services/src/com/centurylink/mdw/service/data/WorkflowDataAccess.java | Issue #576 |
|
Java | mit | 6a082d6eddb13e42127f465b00060d57d2eb1d11 | 0 | mzmine/mzmine3,mzmine/mzmine3 | package net.sf.mzmine.modules.rawdatamethods.rawdataimport.fileformats;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.google.common.collect.Range;
import de.unijena.bioinf.ChemistryBase.ms.SpectrumProperty;
import net.sf.mzmine.datamodel.DataPoint;
import net.sf.mzmine.datamodel.MZmineProject;
import net.sf.mzmine.datamodel.MassSpectrumType;
import net.sf.mzmine.datamodel.PolarityType;
import net.sf.mzmine.datamodel.RawDataFile;
import net.sf.mzmine.datamodel.RawDataFileWriter;
import net.sf.mzmine.datamodel.Scan;
import net.sf.mzmine.datamodel.impl.SimpleDataPoint;
import net.sf.mzmine.datamodel.impl.SimpleScan;
import net.sf.mzmine.project.impl.RawDataFileImpl;
import net.sf.mzmine.taskcontrol.AbstractTask;
import net.sf.mzmine.taskcontrol.TaskStatus;
public class CsvReadTask extends AbstractTask {
private Logger logger = Logger.getLogger(CsvReadTask.class.getName());
protected String dataSource;
private File file;
private MZmineProject project;
private RawDataFileImpl newMZmineFile;
private RawDataFile finalRawDataFile;
private int totalScans, parsedScans;
public CsvReadTask(MZmineProject project, File fileToOpen, RawDataFileWriter newMZmineFile) {
this.project = project;
this.file = fileToOpen;
this.newMZmineFile = (RawDataFileImpl) newMZmineFile;
}
@Override
public String getTaskDescription() {
// TODO Auto-generated method stub
return null;
}
@Override
public double getFinishedPercentage() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void run() {
setStatus(TaskStatus.PROCESSING);
Scanner scanner;
logger.setLevel(Level.ALL);
try {
scanner = new Scanner(file);
dataSource = getFileName(scanner);
if (dataSource == null) {
setErrorMessage("Could not open data file " + file.getAbsolutePath());
setStatus(TaskStatus.ERROR);
return;
}
logger.info("opening raw file " + dataSource);
String acquisitionDate = getAcqusitionDate(scanner);
if (acquisitionDate == null) {
setErrorMessage("Could not find acquisition date in file " + file.getAbsolutePath());
setStatus(TaskStatus.ERROR);
return;
}
logger.info("Date of acquisition " + acquisitionDate);
// scanner.useDelimiter(",");
List<String> mzsList = new ArrayList<String>();
String mstype = "";
String ions = "";
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
logger.fine("checking line: " + line + " for 'Time [sec]'...");
if (line.startsWith("Time [Sec]")) {
String[] axes = line.split(",");
logger.fine("Found axes" + Arrays.toString(axes));
for (int i = 1; i < axes.length; i++) {
String axis = axes[i];
ions += axis + ", ";
if (axis.contains("->")) {
mstype = "MS/MS";
logger.fine("axis " + axis + " is an ms^2 scan");
String mz = axis.substring(axis.indexOf("-> ") + 3);
mz.trim();
logger.fine("Axis " + axis + " was scanned at m/z = '" + mz + "'");
mzsList.add(mz);
} else {
logger.severe("Invalid axis labelling, please contact the developers.");
}
}
break;
}
}
int[] mzs = new int[mzsList.size()];
for (int i = 0; i < mzsList.size(); i++)
mzs[i] = Integer.valueOf(mzsList.get(i));
Range<Double> mzRange = Range.closed((double) mzs[0] - 10, (double) mzs[1] + 10);
int scanNumber = 1;
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (line == null || line.trim().equals(""))
continue;
String[] columns = line.split(",");
if (columns == null || columns.length != mzs.length + 1)
continue;
double rt = Double.valueOf(columns[0]);
DataPoint dataPoints[] = new SimpleDataPoint[mzs.length];
for (int i = 0; i < dataPoints.length; i++) {
String intensity = columns[i + 1];
dataPoints[i] = new SimpleDataPoint(mzs[i], Double.valueOf(intensity));
}
Scan scan = new SimpleScan(null, scanNumber, 1, rt, 0.0, 1, null, dataPoints,
MassSpectrumType.CENTROIDED, PolarityType.POSITIVE,
"ICP-" + mstype + " " + ions.substring(0, ions.length() - 2), mzRange);
newMZmineFile.addScan(scan);
scanNumber++;
}
finalRawDataFile = newMZmineFile.finishWriting();
project.addFile(finalRawDataFile);
} catch (Exception e) {
setErrorMessage(e.getMessage());
setStatus(TaskStatus.ERROR);
return;
}
this.setStatus(TaskStatus.FINISHED);
}
private @Nullable String getFileName(@Nonnull Scanner scanner) {
String path = null;
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (line.contains(":") && line.contains("\\")) {
path = line;
return path;
}
}
return path;
}
private @Nullable String getAcqusitionDate(@Nonnull Scanner scanner) {
String acquisitionDate = null;
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (line.startsWith("Acquired")) {
int begin = line.indexOf(":") + 2;
line.subSequence(begin, begin + (new String("00/00/0000 00:00:00")).length());
return line;
}
}
return acquisitionDate;
}
}
| src/main/java/net/sf/mzmine/modules/rawdatamethods/rawdataimport/fileformats/CsvReadTask.java | package net.sf.mzmine.modules.rawdatamethods.rawdataimport.fileformats;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.google.common.collect.Range;
import de.unijena.bioinf.ChemistryBase.ms.SpectrumProperty;
import net.sf.mzmine.datamodel.DataPoint;
import net.sf.mzmine.datamodel.MZmineProject;
import net.sf.mzmine.datamodel.MassSpectrumType;
import net.sf.mzmine.datamodel.PolarityType;
import net.sf.mzmine.datamodel.RawDataFile;
import net.sf.mzmine.datamodel.RawDataFileWriter;
import net.sf.mzmine.datamodel.Scan;
import net.sf.mzmine.datamodel.impl.SimpleDataPoint;
import net.sf.mzmine.datamodel.impl.SimpleScan;
import net.sf.mzmine.project.impl.RawDataFileImpl;
import net.sf.mzmine.taskcontrol.AbstractTask;
import net.sf.mzmine.taskcontrol.TaskStatus;
public class CsvReadTask extends AbstractTask {
private Logger logger = Logger.getLogger(CsvReadTask.class.getName());
protected String dataSource;
private File file;
private MZmineProject project;
private RawDataFileImpl newMZmineFile;
private RawDataFile finalRawDataFile;
private int totalScans, parsedScans;
public CsvReadTask(MZmineProject project, File fileToOpen, RawDataFileWriter newMZmineFile) {
this.project = project;
this.file = fileToOpen;
this.newMZmineFile = (RawDataFileImpl) newMZmineFile;
}
@Override
public String getTaskDescription() {
// TODO Auto-generated method stub
return null;
}
@Override
public double getFinishedPercentage() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void run() {
setStatus(TaskStatus.PROCESSING);
Scanner scanner;
logger.setLevel(Level.ALL);
try {
scanner = new Scanner(file);
dataSource = getFileName(scanner);
if (dataSource == null) {
setErrorMessage("Could not open data file " + file.getAbsolutePath());
setStatus(TaskStatus.ERROR);
return;
}
logger.info("opening raw file " + dataSource);
String acquisitionDate = getAcqusitionDate(scanner);
if (acquisitionDate == null) {
setErrorMessage("Could not find acquisition date in file " + file.getAbsolutePath());
setStatus(TaskStatus.ERROR);
return;
}
logger.info("Date of acquisition " + acquisitionDate);
// scanner.useDelimiter(",");
List<String> mzsList = new ArrayList<String>();
String mstype = "";
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
logger.fine("checking line: " + line + " for 'Time [sec]'...");
if (line.startsWith("Time [Sec]")) {
String[] axes = line.split(",");
logger.fine("Found axes" + Arrays.toString(axes));
for (int i = 1; i < axes.length; i++) {
String axis = axes[i];
if (axis.contains("->")) {
mstype = "MS/MS";
logger.fine("axis " + axis + " is an ms^2 scan");
String mz = axis.substring(axis.indexOf("-> ") + 3);
mz.trim();
logger.fine("Axis " + axis + " was scanned at m/z = '" + mz + "'");
mzsList.add(mz);
} else {
logger.severe("Invalid axis labelling, please contact the developers.");
}
}
break;
}
}
int[] mzs = new int[mzsList.size()];
for (int i = 0; i < mzsList.size(); i++)
mzs[i] = Integer.valueOf(mzsList.get(i));
Range<Double> mzRange = Range.closed((double)mzs[0]-10, (double)mzs[1]+10);
int scanNumber = 1;
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if(line == null || line.trim().equals(""))
continue;
String[] columns = line.split(",");
if(columns == null || columns.length != mzs.length + 1)
continue;
// if(columns.length != mzs.length) {
// logger.info("ended with " + scanNumber + " scans.");
// continue;
// }
double rt = Double.valueOf(columns[0]);
DataPoint dataPoints[] = new SimpleDataPoint[mzs.length];
for (int i = 0; i < dataPoints.length; i++) {
String intensity = columns[i+1];
dataPoints[i] = new SimpleDataPoint(mzs[i], Double.valueOf(intensity));
// logger.info("added data point dp " + dataPoints[i]);
}
Scan scan = new SimpleScan(null, scanNumber, 1, rt, 0.0, 1, null, dataPoints,
MassSpectrumType.CENTROIDED, PolarityType.POSITIVE, "ICP-" + mstype, mzRange);
newMZmineFile.addScan(scan);
scanNumber++;
logger.info("scan #" + scanNumber + " with " + scan.getDataPoints().length);
}
finalRawDataFile = newMZmineFile.finishWriting();
project.addFile(finalRawDataFile);
} catch (Exception e) {
setErrorMessage(e.getMessage());
setStatus(TaskStatus.ERROR);
return;
}
this.setStatus(TaskStatus.FINISHED);
}
private @Nullable String getFileName(@Nonnull Scanner scanner) {
String path = null;
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (line.contains(":") && line.contains("\\")) {
path = line;
return path;
}
}
return path;
}
private @Nullable String getAcqusitionDate(@Nonnull Scanner scanner) {
String acquisitionDate = null;
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (line.startsWith("Acquired")) {
int begin = line.indexOf(":") + 2;
line.subSequence(begin, begin + (new String("00/00/0000 00:00:00")).length());
return line;
}
}
return acquisitionDate;
}
}
| added column description to scan definition | src/main/java/net/sf/mzmine/modules/rawdatamethods/rawdataimport/fileformats/CsvReadTask.java | added column description to scan definition |
|
Java | mit | 45a1b34c1d1f2c61242fe9efa894d384e1ec88f1 | 0 | cacheflowe/haxademic,cacheflowe/haxademic,cacheflowe/haxademic,cacheflowe/haxademic,cacheflowe/haxademic,cacheflowe/haxademic | package com.haxademic.app.haxmapper.mappers;
import java.util.ArrayList;
import oscP5.OscMessage;
import processing.core.PApplet;
import com.haxademic.app.haxmapper.HaxMapper;
import com.haxademic.app.haxmapper.distribution.AudioPixelInterface;
import com.haxademic.app.haxmapper.overlays.MeshLines.MODE;
import com.haxademic.app.haxmapper.polygons.IMappedPolygon;
import com.haxademic.app.haxmapper.textures.TextureColorAudioSlide;
import com.haxademic.app.haxmapper.textures.TextureEQColumns;
import com.haxademic.app.haxmapper.textures.TextureEQConcentricCircles;
import com.haxademic.app.haxmapper.textures.TextureEQGrid;
import com.haxademic.app.haxmapper.textures.TextureImageTimeStepper;
import com.haxademic.app.haxmapper.textures.TextureScrollingColumns;
import com.haxademic.app.haxmapper.textures.TextureShaderTimeStepper;
import com.haxademic.app.haxmapper.textures.TextureSphereRotate;
import com.haxademic.app.haxmapper.textures.TextureTwistingSquares;
import com.haxademic.app.haxmapper.textures.TextureVideoPlayer;
import com.haxademic.app.haxmapper.textures.TextureWaveformSimple;
import com.haxademic.core.math.MathUtil;
import com.haxademic.core.system.FileUtil;
@SuppressWarnings("serial")
public class CMKY2014Mapper
extends HaxMapper{
protected AudioPixelInterface _audioPixel;
protected int[] _audioPixelColors;
protected float BEAT_DIVISOR = 1; // 10 to test
protected int BEAT_INTERVAL_COLOR = (int) Math.ceil(6f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_ROTATION = (int) Math.ceil(8f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_TRAVERSE = (int) Math.ceil(20f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_ALL_SAME = (int) Math.ceil(150f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_NEW_TIMING = (int) Math.ceil(40f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_BIG_CHANGE = (int) Math.ceil(400f / BEAT_DIVISOR);
public static void main(String args[]) {
_isFullScreen = false;
PApplet.main(new String[] { "--hide-stop", "--bgcolor=000000", CMKY2014Mapper.class.getName() });
}
protected void overridePropsFile() {
super.overridePropsFile();
_appConfig.setProperty( "mapping_file", FileUtil.getHaxademicDataPath() + "text/mapping/mapping-2015-04-18-21-34-34.txt" );
_appConfig.setProperty( "rendering", "false" );
_appConfig.setProperty( "fullscreen", "false" );
_appConfig.setProperty( "fills_screen", "false" );
_appConfig.setProperty( "osc_active", "true" );
_appConfig.setProperty( "audio_debug", "true" );
_appConfig.setProperty( "width", "1200" );
_appConfig.setProperty( "height", "1000" );
}
public void oscEvent(OscMessage theOscMessage) {
super.oscEvent(theOscMessage);
}
protected void buildMappingGroups() {
// give each group a texture to start with
for( int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).pushTexture( _texturePool.get(0) );
_mappingGroups.get(i).pushTexture( _texturePool.get(1) );
}
// set initial mapping properties - make all fully contain their textures
for(int i=0; i < _mappingGroups.size(); i++ ) {
ArrayList<IMappedPolygon> polygons = _mappingGroups.get(i).polygons();
for(int j=0; j < polygons.size(); j++ ) {
IMappedPolygon polygon = polygons.get(j);
polygon.setTextureStyle( IMappedPolygon.MAP_STYLE_MASK );
}
}
}
protected void addTexturesToPool() {
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/smoke-loop.mov" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/tree-loop.mp4" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/ink-in-water.mp4" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/ink-grow-shrink.mp4" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/fire.mp4" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/bubbles.mp4" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/clouds-timelapse.mov" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/water.mp4" ));
//
//
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-00-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-01-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-02-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-03-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-04-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-08-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-09-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-10-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-11-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-12-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-13-desktop.m4v" ));
//
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/crystal-growth-2.mp4" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/crystal-growth-3-desktop.m4v" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/crystal-growth-4.mp4" ));
// _texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/crystal-growth-desktop.m4v" ));
int shaderW = 300;
int shaderH = 300;
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "basic-checker.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "basic-diagonal-stripes.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "bw-eye-jacker-01.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "bw-eye-jacker-02.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "bw-clouds.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "bw-expand-loop.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "bw-kaleido.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "bw-motion-illusion.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "bw-simple-sin.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "circle-parts-rotate.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "cog-tunnel.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "dots-orbit.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "flexi-spiral.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "glowwave.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "gradient-line.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "hex-alphanumerics.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "iq-iterations-shiny.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "light-leak.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "lines-scroll-diag.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "matrix-rain.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "morphing-bokeh-shape.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "sin-grey.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "sin-waves.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "space-swirl.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "square-fade.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "square-twist.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "star-field.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "stars-screensaver.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "stars-scroll.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "stars-fractal-field.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "supershape-2d.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "swirl.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "warped-tunnel.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "water-smoke.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( shaderW, shaderH, "wavy-checker-planes.glsl" ));
_texturePool.add( new TextureScrollingColumns( 100, 100 ));
_texturePool.add( new TextureTwistingSquares( shaderW, shaderH ));
_texturePool.add( new TextureImageTimeStepper( 600, 600 ));
_texturePool.add( new TextureEQColumns( shaderW, shaderH ));
_texturePool.add( new TextureEQColumns( shaderW, shaderH ));
_texturePool.add( new TextureEQGrid( shaderW, shaderH ));
_texturePool.add( new TextureEQGrid( shaderW, shaderH ));
_texturePool.add( new TextureWaveformSimple( shaderW, shaderH ));
_texturePool.add( new TextureWaveformSimple( shaderW, shaderH ));
// _texturePool.add( new TextureColorAudioFade( 200, 200 ));
// _texturePool.add( new TextureColorAudioFade( 200, 200 ));
_texturePool.add( new TextureColorAudioSlide( 200, 200 ));
// _texturePool.add( new TextureColorAudioSlide( 200, 200 ));
_texturePool.add( new TextureSphereRotate( shaderW, shaderH ));
_texturePool.add( new TextureEQConcentricCircles( shaderW, shaderH ) );
// _texturePool.add( new TextureWebCam() );
// store just movies to restrain the number of concurrent movies
for( int i=0; i < _texturePool.size(); i++ ) {
if( _texturePool.get(i) instanceof TextureVideoPlayer ) {
_movieTexturePool.add( _texturePool.get(i) );
}
}
// add 1 inital texture to current array
_curTexturePool.add( _texturePool.get( MathUtil.randRange(0, _texturePool.size()-1 ) ) );
}
protected int numMovieTextures() {
int numMovieTextures = 0;
for( int i=0; i < _curTexturePool.size(); i++ ) {
if( _curTexturePool.get(i) instanceof TextureVideoPlayer ) numMovieTextures++;
}
return numMovieTextures;
}
protected void removeOldestMovieTexture() {
for( int i=0; i < _curTexturePool.size(); i++ ) {
if( _curTexturePool.get(i) instanceof TextureVideoPlayer ) {
_curTexturePool.remove(i);
return;
}
}
}
public void setup() {
super.setup();
_audioPixel = new AudioPixelInterface();
_audioPixelColors = new int[ _mappingGroups.size() ];
}
public void drawApp() {
super.drawApp();
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).getAudioPixelColor();
_audioPixelColors[i] = _mappingGroups.get(i).colorEaseInt();
}
}
protected void checkBeat() {
if( audioIn.isBeat() == true && isBeatDetectMode() == true ) {
updateTiming();
}
}
protected boolean isBeatDetectMode() {
return ( p.millis() - 10000 > _lastInputMillis );
}
protected void updateColor() {
// sometimes do all groups, but mostly pick a random one to change
if( MathUtil.randRange(0, 100) > 80 ) {
super.updateColor();
} else {
int randGroup = MathUtil.randRange( 0, _mappingGroups.size() - 1 );
_mappingGroups.get(randGroup).newColor();
_mappingGroups.get(randGroup).pulseColor();
}
}
protected void updateLineMode() {
// sometimes do all groups, but mostly pick a random one to change
if( MathUtil.randRange(0, 100) > 80 ) {
super.updateLineMode();
} else {
int randGroup = MathUtil.randRange( 0, _mappingGroups.size() - 1 );
_mappingGroups.get(randGroup).newLineMode();
}
}
protected void updateTiming() {
super.updateTiming();
if( isBeatDetectMode() == true ) numBeatsDetected++;
changeGroupsRandomPolygonMapStyle();
// make sure textures are timed to the beat
for( int i=0; i < _activeTextures.size(); i++ ) {
_activeTextures.get(i).updateTiming();
}
if( numBeatsDetected % BEAT_INTERVAL_COLOR == 0 ) {
// P.println("BEAT_INTERVAL_COLOR");
updateColor();
}
if( numBeatsDetected % BEAT_INTERVAL_ROTATION == 0 ) {
// P.println("BEAT_INTERVAL_ROTATION");
updateRotation();
}
if( numBeatsDetected % BEAT_INTERVAL_TRAVERSE == 0 ) {
// P.println("BEAT_INTERVAL_TRAVERSE");
traverseTrigger();
}
// updateColor();
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).newAudioPixelColor();
}
if( numBeatsDetected % BEAT_INTERVAL_ALL_SAME == 0 ) {
// P.println("BEAT_INTERVAL_ALL_SAME");
setGroupsMappingStylesToTheSame();
setGroupsTextureToTheSameMaybe();
updateLineMode();
}
if( numBeatsDetected % BEAT_INTERVAL_NEW_TIMING == 0 ) {
// P.println("BEAT_INTERVAL_NEW_TIMING");
updateTimingSection();
}
// every 40 beats, do something bigger
if( numBeatsDetected % BEAT_INTERVAL_BIG_CHANGE == 0 ) {
// P.println("BEAT_INTERVAL_BIG_CHANGE");
bigChangeTrigger();
}
}
protected void updateTimingSection() {
super.updateTimingSection();
newLineModeForRandomGroup();
// cycleANewTexture();
}
protected void bigChangeTrigger() {
cycleANewTexture();
newTexturesForAllGroups();
newLineModesForAllGroups();
// set longer timing updates
updateTimingSection();
updateColor();
// reset rotations
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).resetRotation();
}
}
// cool rules =========================================================
protected void setGroupsTextureToTheSameMaybe() {
// maybe also set a group to all to be the same texture
for(int i=0; i < _mappingGroups.size(); i++ ) {
if( MathUtil.randRange(0, 100) < 25 ) {
_mappingGroups.get(i).setAllPolygonsToSameRandomTexture();
}
}
}
protected void setGroupsMappingStylesToTheSame() {
// every once in a while, set all polygons' styles to be the same per group
for(int i=0; i < _mappingGroups.size(); i++ ) {
if( MathUtil.randRange(0, 100) < 90 ) {
_mappingGroups.get(i).setAllPolygonsTextureStyle( MathUtil.randRange(0, 2) );
} else {
_mappingGroups.get(i).setAllPolygonsTextureStyle( IMappedPolygon.MAP_STYLE_EQ ); // less likely to go to EQ fill
}
_mappingGroups.get(i).newColor();
}
}
protected void changeGroupsRandomPolygonMapStyle() {
// every beat, change a polygon mapping style or texture
for(int i=0; i < _mappingGroups.size(); i++ ) {
if( MathUtil.randBoolean(p) == true ) {
_mappingGroups.get(i).randomTextureToRandomPolygon();
} else {
_mappingGroups.get(i).randomPolygonRandomMappingStyle();
}
}
}
protected void newLineModeForRandomGroup() {
int randGroup = MathUtil.randRange( 0, _mappingGroups.size() - 1 );
_mappingGroups.get(randGroup).newLineMode();
}
protected void newLineModesForAllGroups() {
// set new line mode
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).newLineMode();
}
// once in a while, reset all mesh lines to the same random mode
if( MathUtil.randRange(0, 100) < 10 ) {
int newLineMode = MathUtil.randRange(0, MODE.values().length - 1);
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).resetLineModeToIndex( newLineMode );
}
}
}
protected void newTexturesForAllGroups() {
// give each group a new texture
for( int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).shiftTexture();
_mappingGroups.get(i).pushTexture( _curTexturePool.get( MathUtil.randRange(0, _curTexturePool.size()-1 )) );
_mappingGroups.get(i).reloadTextureAtIndex();
}
}
protected void cycleANewTexture() {
// rebuild the array of currently-available textures
// check number of movie textures, and make sure we never have more than 2
_curTexturePool.add( _texturePool.get( MathUtil.randRange(0, _texturePool.size()-1 ) ) );
while( numMovieTextures() > MAX_ACTIVE_MOVIE_TEXTURES ) {
removeOldestMovieTexture();
_curTexturePool.add( _texturePool.get( MathUtil.randRange(0, _texturePool.size()-1 ) ) );
}
// remove oldest texture if more than max
if( _curTexturePool.size() >= MAX_ACTIVE_TEXTURES ) {
// P.println(_curTexturePool.size());
_curTexturePool.remove(0);
}
// make sure polygons update their textures
for( int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).shiftTexture();
_mappingGroups.get(i).pushTexture( _curTexturePool.get( MathUtil.randRange(0, _curTexturePool.size()-1 )) );
_mappingGroups.get(i).reloadTextureAtIndex();
}
}
}
| src/com/haxademic/app/haxmapper/mappers/CMKY2014Mapper.java | package com.haxademic.app.haxmapper.mappers;
import java.util.ArrayList;
import oscP5.OscMessage;
import processing.core.PApplet;
import com.haxademic.app.haxmapper.HaxMapper;
import com.haxademic.app.haxmapper.distribution.AudioPixelInterface;
import com.haxademic.app.haxmapper.overlays.MeshLines.MODE;
import com.haxademic.app.haxmapper.polygons.IMappedPolygon;
import com.haxademic.app.haxmapper.textures.TextureColorAudioSlide;
import com.haxademic.app.haxmapper.textures.TextureEQColumns;
import com.haxademic.app.haxmapper.textures.TextureEQConcentricCircles;
import com.haxademic.app.haxmapper.textures.TextureEQGrid;
import com.haxademic.app.haxmapper.textures.TextureImageTimeStepper;
import com.haxademic.app.haxmapper.textures.TextureScrollingColumns;
import com.haxademic.app.haxmapper.textures.TextureShaderTimeStepper;
import com.haxademic.app.haxmapper.textures.TextureSphereRotate;
import com.haxademic.app.haxmapper.textures.TextureTwistingSquares;
import com.haxademic.app.haxmapper.textures.TextureVideoPlayer;
import com.haxademic.app.haxmapper.textures.TextureWaveformSimple;
import com.haxademic.core.math.MathUtil;
import com.haxademic.core.system.FileUtil;
@SuppressWarnings("serial")
public class CMKY2014Mapper
extends HaxMapper{
protected AudioPixelInterface _audioPixel;
protected int[] _audioPixelColors;
protected float BEAT_DIVISOR = 1; // 10 to test
protected int BEAT_INTERVAL_COLOR = (int) Math.ceil(6f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_ROTATION = (int) Math.ceil(8f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_TRAVERSE = (int) Math.ceil(20f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_ALL_SAME = (int) Math.ceil(150f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_NEW_TIMING = (int) Math.ceil(40f / BEAT_DIVISOR);
protected int BEAT_INTERVAL_BIG_CHANGE = (int) Math.ceil(400f / BEAT_DIVISOR);
public static void main(String args[]) {
_isFullScreen = true;
PApplet.main(new String[] { "--hide-stop", "--bgcolor=000000", CMKY2014Mapper.class.getName() });
}
protected void overridePropsFile() {
super.overridePropsFile();
_appConfig.setProperty( "mapping_file", FileUtil.getHaxademicDataPath() + "text/mapping/mapping-2014-08-09-20-40-59.txt" );
_appConfig.setProperty( "rendering", "false" );
_appConfig.setProperty( "fullscreen", "true" );
_appConfig.setProperty( "fills_screen", "true" );
_appConfig.setProperty( "osc_active", "true" );
}
public void oscEvent(OscMessage theOscMessage) {
super.oscEvent(theOscMessage);
}
protected void buildMappingGroups() {
// give each group a texture to start with
for( int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).pushTexture( _texturePool.get(0) );
_mappingGroups.get(i).pushTexture( _texturePool.get(1) );
}
// set initial mapping properties - make all fully contain their textures
for(int i=0; i < _mappingGroups.size(); i++ ) {
ArrayList<IMappedPolygon> polygons = _mappingGroups.get(i).polygons();
for(int j=0; j < polygons.size(); j++ ) {
IMappedPolygon polygon = polygons.get(j);
polygon.setTextureStyle( IMappedPolygon.MAP_STYLE_MASK );
}
}
}
protected void addTexturesToPool() {
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/smoke-loop.mov" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/tree-loop.mp4" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/ink-in-water.mp4" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/ink-grow-shrink.mp4" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/fire.mp4" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/bubbles.mp4" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/clouds-timelapse.mov" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/loops/water.mp4" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-00-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-01-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-02-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-03-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-04-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-08-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-09-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-10-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-11-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-12-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/LL-13-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/crystal-growth-2.mp4" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/crystal-growth-3-desktop.m4v" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/crystal-growth-4.mp4" ));
_texturePool.add( new TextureVideoPlayer( 640, 360, "video/lunar-lodge/crystal-growth-desktop.m4v" ));
_texturePool.add( new TextureScrollingColumns( 100, 100 ));
_texturePool.add( new TextureTwistingSquares( 500, 500 ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "bw-eye-jacker-01.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "bw-eye-jacker-02.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "bw-clouds.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "bw-expand-loop.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "bw-kaleido.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "bw-motion-illusion.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "bw-simple-sin.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "circle-parts-rotate.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "cog-tunnel.glsl" ));
// _texturePool.add( new TextureShaderTimeStepper( 500, 500, "dots-orbit.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "glowwave.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "gradient-line.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "hex-alphanumerics.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "iq-iterations-shiny.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "lines-scroll-diag.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "matrix-rain.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "sin-grey.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "sin-waves.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "space-swirl.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "square-fade.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "square-twist.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "star-field.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "stars-screensaver.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "stars-scroll.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "supershape-2d.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 600, 600, "swirl.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "warped-tunnel.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "water-smoke.glsl" ));
_texturePool.add( new TextureShaderTimeStepper( 500, 500, "wavy-checker-planes.glsl" ));
_texturePool.add( new TextureImageTimeStepper( 600, 600 ));
_texturePool.add( new TextureEQColumns( 200, 100 ));
_texturePool.add( new TextureEQColumns( 200, 100 ));
_texturePool.add( new TextureEQGrid( 320, 160 ));
_texturePool.add( new TextureEQGrid( 320, 160 ));
_texturePool.add( new TextureWaveformSimple( 500, 500 ));
// _texturePool.add( new TextureColorAudioFade( 200, 200 ));
// _texturePool.add( new TextureColorAudioFade( 200, 200 ));
_texturePool.add( new TextureColorAudioSlide( 200, 200 ));
// _texturePool.add( new TextureColorAudioSlide( 200, 200 ));
_texturePool.add( new TextureSphereRotate( 500, 500 ));
_texturePool.add( new TextureEQConcentricCircles( 500, 500 ) );
// _texturePool.add( new TextureWebCam() );
// store just movies to restrain the number of concurrent movies
for( int i=0; i < _texturePool.size(); i++ ) {
if( _texturePool.get(i) instanceof TextureVideoPlayer ) {
_movieTexturePool.add( _texturePool.get(i) );
}
}
// add 1 inital texture to current array
_curTexturePool.add( _texturePool.get( MathUtil.randRange(0, _texturePool.size()-1 ) ) );
}
protected int numMovieTextures() {
int numMovieTextures = 0;
for( int i=0; i < _curTexturePool.size(); i++ ) {
if( _curTexturePool.get(i) instanceof TextureVideoPlayer ) numMovieTextures++;
}
return numMovieTextures;
}
protected void removeOldestMovieTexture() {
for( int i=0; i < _curTexturePool.size(); i++ ) {
if( _curTexturePool.get(i) instanceof TextureVideoPlayer ) {
_curTexturePool.remove(i);
return;
}
}
}
public void setup() {
super.setup();
_audioPixel = new AudioPixelInterface();
_audioPixelColors = new int[ _mappingGroups.size() ];
}
public void drawApp() {
super.drawApp();
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).getAudioPixelColor();
_audioPixelColors[i] = _mappingGroups.get(i).colorEaseInt();
}
}
protected void checkBeat() {
if( audioIn.isBeat() == true && isBeatDetectMode() == true ) {
updateTiming();
}
}
protected boolean isBeatDetectMode() {
return ( p.millis() - 10000 > _lastInputMillis );
}
protected void updateColor() {
// sometimes do all groups, but mostly pick a random one to change
if( MathUtil.randRange(0, 100) > 80 ) {
super.updateColor();
} else {
int randGroup = MathUtil.randRange( 0, _mappingGroups.size() - 1 );
_mappingGroups.get(randGroup).newColor();
_mappingGroups.get(randGroup).pulseColor();
}
}
protected void updateLineMode() {
// sometimes do all groups, but mostly pick a random one to change
if( MathUtil.randRange(0, 100) > 80 ) {
super.updateLineMode();
} else {
int randGroup = MathUtil.randRange( 0, _mappingGroups.size() - 1 );
_mappingGroups.get(randGroup).newLineMode();
}
}
protected void updateTiming() {
super.updateTiming();
if( isBeatDetectMode() == true ) numBeatsDetected++;
changeGroupsRandomPolygonMapStyle();
// make sure textures are timed to the beat
for( int i=0; i < _activeTextures.size(); i++ ) {
_activeTextures.get(i).updateTiming();
}
if( numBeatsDetected % BEAT_INTERVAL_COLOR == 0 ) {
// P.println("BEAT_INTERVAL_COLOR");
updateColor();
}
if( numBeatsDetected % BEAT_INTERVAL_ROTATION == 0 ) {
// P.println("BEAT_INTERVAL_ROTATION");
updateRotation();
}
if( numBeatsDetected % BEAT_INTERVAL_TRAVERSE == 0 ) {
// P.println("BEAT_INTERVAL_TRAVERSE");
traverseTrigger();
}
// updateColor();
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).newAudioPixelColor();
}
if( numBeatsDetected % BEAT_INTERVAL_ALL_SAME == 0 ) {
// P.println("BEAT_INTERVAL_ALL_SAME");
setGroupsMappingStylesToTheSame();
setGroupsTextureToTheSameMaybe();
updateLineMode();
}
if( numBeatsDetected % BEAT_INTERVAL_NEW_TIMING == 0 ) {
// P.println("BEAT_INTERVAL_NEW_TIMING");
updateTimingSection();
}
// every 40 beats, do something bigger
if( numBeatsDetected % BEAT_INTERVAL_BIG_CHANGE == 0 ) {
// P.println("BEAT_INTERVAL_BIG_CHANGE");
bigChangeTrigger();
}
}
protected void updateTimingSection() {
super.updateTimingSection();
newLineModeForRandomGroup();
// cycleANewTexture();
}
protected void bigChangeTrigger() {
cycleANewTexture();
newTexturesForAllGroups();
newLineModesForAllGroups();
// set longer timing updates
updateTimingSection();
updateColor();
// reset rotations
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).resetRotation();
}
}
// cool rules =========================================================
protected void setGroupsTextureToTheSameMaybe() {
// maybe also set a group to all to be the same texture
for(int i=0; i < _mappingGroups.size(); i++ ) {
if( MathUtil.randRange(0, 100) < 25 ) {
_mappingGroups.get(i).setAllPolygonsToSameRandomTexture();
}
}
}
protected void setGroupsMappingStylesToTheSame() {
// every once in a while, set all polygons' styles to be the same per group
for(int i=0; i < _mappingGroups.size(); i++ ) {
if( MathUtil.randRange(0, 100) < 90 ) {
_mappingGroups.get(i).setAllPolygonsTextureStyle( MathUtil.randRange(0, 2) );
} else {
_mappingGroups.get(i).setAllPolygonsTextureStyle( IMappedPolygon.MAP_STYLE_EQ ); // less likely to go to EQ fill
}
_mappingGroups.get(i).newColor();
}
}
protected void changeGroupsRandomPolygonMapStyle() {
// every beat, change a polygon mapping style or texture
for(int i=0; i < _mappingGroups.size(); i++ ) {
if( MathUtil.randBoolean(p) == true ) {
_mappingGroups.get(i).randomTextureToRandomPolygon();
} else {
_mappingGroups.get(i).randomPolygonRandomMappingStyle();
}
}
}
protected void newLineModeForRandomGroup() {
int randGroup = MathUtil.randRange( 0, _mappingGroups.size() - 1 );
_mappingGroups.get(randGroup).newLineMode();
}
protected void newLineModesForAllGroups() {
// set new line mode
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).newLineMode();
}
// once in a while, reset all mesh lines to the same random mode
if( MathUtil.randRange(0, 100) < 10 ) {
int newLineMode = MathUtil.randRange(0, MODE.values().length - 1);
for(int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).resetLineModeToIndex( newLineMode );
}
}
}
protected void newTexturesForAllGroups() {
// give each group a new texture
for( int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).shiftTexture();
_mappingGroups.get(i).pushTexture( _curTexturePool.get( MathUtil.randRange(0, _curTexturePool.size()-1 )) );
_mappingGroups.get(i).reloadTextureAtIndex();
}
}
protected void cycleANewTexture() {
// rebuild the array of currently-available textures
// check number of movie textures, and make sure we never have more than 2
_curTexturePool.add( _texturePool.get( MathUtil.randRange(0, _texturePool.size()-1 ) ) );
while( numMovieTextures() > MAX_ACTIVE_MOVIE_TEXTURES ) {
removeOldestMovieTexture();
_curTexturePool.add( _texturePool.get( MathUtil.randRange(0, _texturePool.size()-1 ) ) );
}
// remove oldest texture if more than max
if( _curTexturePool.size() >= MAX_ACTIVE_TEXTURES ) {
// P.println(_curTexturePool.size());
_curTexturePool.remove(0);
}
// make sure polygons update their textures
for( int i=0; i < _mappingGroups.size(); i++ ) {
_mappingGroups.get(i).shiftTexture();
_mappingGroups.get(i).pushTexture( _curTexturePool.get( MathUtil.randRange(0, _curTexturePool.size()-1 )) );
_mappingGroups.get(i).reloadTextureAtIndex();
}
}
}
| CMKY 2015 updates
| src/com/haxademic/app/haxmapper/mappers/CMKY2014Mapper.java | CMKY 2015 updates |
|
Java | mit | 7b76629f4f02b76f9865e3aa2a4a9c0438dfccf6 | 0 | IgorGee/PendantCreator3D,IgorGee/3D-Image-Creator,IgorGee/Carbonizr | package xyz.igorgee.imagecreator3d;
import android.app.Activity;
import android.app.Fragment;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.os.ParcelFileDescriptor;
import android.provider.MediaStore;
import android.support.design.widget.Snackbar;
import android.support.v4.content.FileProvider;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.soundcloud.android.crop.Crop;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
import xyz.igorgee.Api.ServerInterface;
import xyz.igorgee.floatingactionbutton.FloatingActionsMenu;
import xyz.igorgee.shapejs.ShapeJS;
import xyz.igorgee.utilities.ImageHelper;
import xyz.igorgee.utilities.JavaUtilities;
import static xyz.igorgee.utilities.UIUtilities.makeAlertDialog;
import static xyz.igorgee.utilities.UIUtilities.makeSnackbar;
public class HomePageFragment extends Fragment {
private final static int TAKE_PICTURE = 7428873;
private final static String MODELS_DIRECTORY_NAME = "models";
public static final String MODEL_NAMES = "ModelNames";
private static final String CAPTURE_IMAGE_FILE_PROVIDER = "xyz.igorgee.carbonizr.fileprovider";
public static final String TEMPORARY_IMAGE_FOLDER = "temp";
public static final String TEMPORARY_IMAGE_NAME = "image.jpg";
public static SharedPreferences sharedPreferences;
public static File filesDirectory;
public static File modelsDirectory;
@Bind(R.id.empty_home_page_text) TextView textView;
@Bind(R.id.image_options_fam) FloatingActionsMenu fam;
@Bind(R.id.list) RecyclerView list;
ArrayList<Model> models;
CustomAdapter adapter;
RecyclerView.LayoutManager linearLayoutManager;
public static final String BASE_URL = "http://52.90.86.247/";
private static Retrofit retrofit = new Retrofit.Builder()
.baseUrl(BASE_URL)
.addConverterFactory(GsonConverterFactory.create())
.build();
public static final ServerInterface apiService = retrofit.create(ServerInterface.class);
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_homepage, container, false);
ButterKnife.bind(this, view);
setHasOptionsMenu(true);
filesDirectory = Environment.getExternalStorageDirectory();
modelsDirectory = new File(filesDirectory, MODELS_DIRECTORY_NAME);
modelsDirectory.mkdirs();
list.setHasFixedSize(true);
linearLayoutManager = new GridLayoutManager(getActivity(), 1);
list.setLayoutManager(linearLayoutManager);
models = new ArrayList<>();
adapter = CustomAdapter.getInstance(getActivity(), models);
list.setAdapter(adapter);
list.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
if (dy > 0) {
fam.hide();
} else {
fam.show();
}
}
});
sharedPreferences = getActivity().getSharedPreferences
(HomePageFragment.MODEL_NAMES, Context.MODE_PRIVATE);
((AppCompatActivity) getActivity()).getSupportActionBar().setSubtitle("Powered by Shapeways");
return view;
}
public void refresh() {
adapter.updateList(models);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
checkExistingFiles();
}
private void checkExistingFiles() {
if (modelsDirectory.listFiles() != null) {
for (final File directory : modelsDirectory.listFiles()) {
for (final File file : directory.listFiles()) {
if (file.getName().endsWith(".stl")) {
String fileName = file.getName().substring(0, file.getName().length() - 4);
models.add(new Model(fileName, directory));
textView.setVisibility(View.GONE);
}
}
Collections.reverse(models);
refresh();
}
}
}
@OnClick(R.id.gallery_fab)
public void selectImage(View view) {
Crop.pickImage(getActivity(), this);
fam.collapse();
}
@OnClick(R.id.camera_fab)
public void takePicture(View view) {
File path = new File(getActivity().getFilesDir(), TEMPORARY_IMAGE_FOLDER);
if (!path.exists())
path.mkdirs();
File image = new File(path, TEMPORARY_IMAGE_NAME);
Uri imageUri = FileProvider.getUriForFile(getActivity(), CAPTURE_IMAGE_FILE_PROVIDER, image);
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, imageUri);
startActivityForResult(intent, TAKE_PICTURE);
fam.collapse();
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == Activity.RESULT_OK) {
if (requestCode == TAKE_PICTURE) {
File path = new File(getActivity().getFilesDir(), TEMPORARY_IMAGE_FOLDER);
if (!path.exists())
path.mkdirs();
File imageFile = new File(path, TEMPORARY_IMAGE_NAME);
Uri tempUri = Uri.fromFile(imageFile);
Uri destination = Uri.fromFile(new File(getActivity().getCacheDir(), imageFile.getName()));
Crop.of(tempUri, destination).asSquare().start(getActivity(), this);
} else if (requestCode == Crop.REQUEST_PICK) {
Uri pickedImage = data.getData();
File imageFile;
try {
imageFile = getImageFileFromUri(pickedImage);
Uri destination = Uri.fromFile(new File(getActivity().getCacheDir(), imageFile.getName()));
Crop.of(pickedImage, destination).asSquare().start(getActivity(), this);
} catch (IOException e) {
e.printStackTrace();
}
} else if (requestCode == Crop.REQUEST_CROP) {
Uri croppedImage = Crop.getOutput(data);
File imagePath = new File(croppedImage.getPath());
new GenerateObject(imagePath, getActivity()).execute();
textView.setVisibility(View.GONE);
}
}
}
private File getImageFileFromUri(Uri uri) throws IOException {
ParcelFileDescriptor parcelFileDescriptor =
getActivity().getContentResolver().openFileDescriptor(uri, "r");
FileDescriptor fileDescriptor;
Bitmap image = null;
if (parcelFileDescriptor != null) {
fileDescriptor = parcelFileDescriptor.getFileDescriptor();
image = BitmapFactory.decodeFileDescriptor(fileDescriptor);
parcelFileDescriptor.close();
}
String fileName = null;
String[] projection = {MediaStore.MediaColumns.DISPLAY_NAME};
Cursor metaCursor = getActivity().getContentResolver().query(uri, projection, null, null, null);
if (metaCursor != null) {
try {
if (metaCursor.moveToFirst()) {
fileName = metaCursor.getString(0);
}
} finally {
metaCursor.close();
}
}
File imageFile = new File(getActivity().getCacheDir(), fileName);
imageFile.createNewFile();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
if (image != null) {
image.compress(Bitmap.CompressFormat.PNG, 0, bos);
}
byte[] bitmapdata = bos.toByteArray();
FileOutputStream fileOutputStream = new FileOutputStream(imageFile);
fileOutputStream.write(bitmapdata);
fileOutputStream.flush();
fileOutputStream.close();
return imageFile;
}
private class GenerateObject extends AsyncTask<Void, Void, Void> {
File file;
String filename;
File modelDirectory;
Bitmap bitmap;
Context context;
ShapeJS shapeJS = new ShapeJS();
boolean error = false;
GenerateObject(File file, Context context) {
this.file = file;
if (file.getName().contains("."))
filename = file.getName().substring(0, file.getName().indexOf('.'));
else
filename = file.getName();
this.context = context;
Log.d("FILENAMEDATE", filename);
String modelDirectoryName = (filename + new Date().toString()).replace(" ", "");
for (Character c : JavaUtilities.ILLEGAL_CHARACTERS)
modelDirectoryName = modelDirectoryName.replace(c.toString(), "");
modelDirectory = new File(modelsDirectory, modelDirectoryName);
modelDirectory.mkdirs();
Log.d("FILELOCATION", modelDirectory.getAbsolutePath());
Log.d("FILENAMEDATE", modelDirectory.getName());
bitmap = ImageHelper.decodeSampledBitmapFromResource(file.getAbsoluteFile());
}
@Override
protected Void doInBackground(Void... params) {
InputStream inputStream = null;
FileOutputStream outputStream = null;
FileOutputStream bitmapOutputStream = null;
File zipFile = new File(modelDirectory, filename + ".zip");
try {
makeSnackbar(textView,
"Uploaded!\nGenerating model. Please wait.",
Snackbar.LENGTH_LONG);
inputStream = shapeJS.uploadImage(file);
outputStream = new FileOutputStream(zipFile);
int numRead;
int total = 0;
byte[] buffer = new byte[102400];
while ((numRead = inputStream.read(buffer)) >= 0) {
total += numRead;
outputStream.write(buffer, 0, numRead);
if (total > 1024 * 1024) {
total = 0;
outputStream.flush();
}
}
JavaUtilities.unzip(zipFile, modelDirectory);
for (File file : modelDirectory.listFiles()) {
if (file.getName().endsWith(".jpg")) {
file.renameTo(new File(modelDirectory, "preview.jpg"));
} else {
file.renameTo(new File(modelDirectory,
filename + file.getName().substring(file.getName().indexOf('.'))));
}
}
bitmapOutputStream = new FileOutputStream(
new File(modelDirectory, filename + ".jpg"));
bitmap.compress(Bitmap.CompressFormat.PNG, 100, bitmapOutputStream);
} catch (IOException e) {
error = true;
e.printStackTrace();
} finally {
try {
if (!zipFile.delete())
Log.e("GENERATEOBJECT", "Zip file wasn't deleted");
if (inputStream != null)
inputStream.close();
if (outputStream != null)
outputStream.close();
if (bitmapOutputStream != null)
bitmapOutputStream.close();
} catch (IOException e ) {
e.printStackTrace();
}
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
if (error) {
makeAlertDialog(context, "Error", "Sorry, something went wrong. Try again in a few minutes.");
} else {
final Model model = new Model(filename, modelDirectory);
models.add(0, model);
list.smoothScrollToPosition(0);
refresh();
}
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.homepage_toolbar, menu);
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
switch (id) {
case R.id.cart:
String urlString="https://shapeways.com/shops/carbonizr";
Intent intent=new Intent(Intent.ACTION_VIEW, Uri.parse(urlString));
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setPackage("com.android.chrome");
try {
this.startActivity(intent);
} catch (ActivityNotFoundException ex) {
// Chrome browser presumably not installed so allow user to choose instead
intent.setPackage(null);
this.startActivity(intent);
}
break;
case R.id.faq:
getFragmentManager()
.beginTransaction()
.replace(R.id.fragmentPlaceholder, new FAQFragment(), "FAQ")
.addToBackStack(null)
.commit();
MainActivity.drawerToggle.setDrawerIndicatorEnabled(false);
}
return super.onOptionsItemSelected(item);
}
}
| android/src/xyz/igorgee/imagecreator3d/HomePageFragment.java | package xyz.igorgee.imagecreator3d;
import android.app.Activity;
import android.app.Fragment;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.os.ParcelFileDescriptor;
import android.provider.MediaStore;
import android.support.design.widget.Snackbar;
import android.support.v4.content.FileProvider;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.soundcloud.android.crop.Crop;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
import xyz.igorgee.Api.ServerInterface;
import xyz.igorgee.floatingactionbutton.FloatingActionsMenu;
import xyz.igorgee.shapejs.ShapeJS;
import xyz.igorgee.utilities.ImageHelper;
import xyz.igorgee.utilities.JavaUtilities;
import static xyz.igorgee.utilities.UIUtilities.makeAlertDialog;
import static xyz.igorgee.utilities.UIUtilities.makeSnackbar;
public class HomePageFragment extends Fragment {
private final static int TAKE_PICTURE = 7428873;
private final static String MODELS_DIRECTORY_NAME = "models";
public static final String MODEL_NAMES = "ModelNames";
private static final String CAPTURE_IMAGE_FILE_PROVIDER = "xyz.igorgee.carbonizr.fileprovider";
public static final String TEMPORARY_IMAGE_FOLDER = "temp";
public static final String TEMPORARY_IMAGE_NAME = "image.jpg";
public static SharedPreferences sharedPreferences;
public static File filesDirectory;
public static File modelsDirectory;
@Bind(R.id.empty_home_page_text) TextView textView;
@Bind(R.id.image_options_fam) FloatingActionsMenu fam;
@Bind(R.id.list) RecyclerView list;
ArrayList<Model> models;
CustomAdapter adapter;
RecyclerView.LayoutManager linearLayoutManager;
public static final String BASE_URL = "http://52.90.86.247/";
private static Retrofit retrofit = new Retrofit.Builder()
.baseUrl(BASE_URL)
.addConverterFactory(GsonConverterFactory.create())
.build();
public static final ServerInterface apiService = retrofit.create(ServerInterface.class);
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_homepage, container, false);
ButterKnife.bind(this, view);
setHasOptionsMenu(true);
filesDirectory = Environment.getExternalStorageDirectory();
modelsDirectory = new File(filesDirectory, MODELS_DIRECTORY_NAME);
modelsDirectory.mkdirs();
list.setHasFixedSize(true);
linearLayoutManager = new GridLayoutManager(getActivity(), 1);
list.setLayoutManager(linearLayoutManager);
models = new ArrayList<>();
adapter = CustomAdapter.getInstance(getActivity(), models);
list.setAdapter(adapter);
list.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
if (dy > 0) {
fam.hide();
} else {
fam.show();
}
}
});
sharedPreferences = getActivity().getSharedPreferences
(HomePageFragment.MODEL_NAMES, Context.MODE_PRIVATE);
((AppCompatActivity) getActivity()).getSupportActionBar().setSubtitle("Powered by Shapeways");
return view;
}
public void refresh() {
adapter.updateList(models);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
checkExistingFiles();
}
private void checkExistingFiles() {
if (modelsDirectory.listFiles() != null) {
for (final File directory : modelsDirectory.listFiles()) {
for (final File file : directory.listFiles()) {
if (file.getName().endsWith(".stl")) {
String fileName = file.getName().substring(0, file.getName().length() - 4);
models.add(new Model(fileName, directory));
textView.setVisibility(View.GONE);
}
}
Collections.reverse(models);
refresh();
}
}
}
@OnClick(R.id.gallery_fab)
public void selectImage(View view) {
Crop.pickImage(getActivity(), this);
fam.collapse();
}
@OnClick(R.id.camera_fab)
public void takePicture(View view) {
File path = new File(getActivity().getFilesDir(), TEMPORARY_IMAGE_FOLDER);
if (!path.exists())
path.mkdirs();
File image = new File(path, TEMPORARY_IMAGE_NAME);
Uri imageUri = FileProvider.getUriForFile(getActivity(), CAPTURE_IMAGE_FILE_PROVIDER, image);
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, imageUri);
startActivityForResult(intent, TAKE_PICTURE);
fam.collapse();
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == Activity.RESULT_OK) {
if (requestCode == TAKE_PICTURE) {
File path = new File(getActivity().getFilesDir(), TEMPORARY_IMAGE_FOLDER);
if (!path.exists())
path.mkdirs();
File imageFile = new File(path, TEMPORARY_IMAGE_NAME);
Uri tempUri = Uri.fromFile(imageFile);
Uri destination = Uri.fromFile(new File(getActivity().getCacheDir(), imageFile.getName()));
Crop.of(tempUri, destination).asSquare().start(getActivity(), this);
} else if (requestCode == Crop.REQUEST_PICK) {
Uri pickedImage = data.getData();
File imageFile;
try {
imageFile = getImageFileFromUri(pickedImage);
Uri destination = Uri.fromFile(new File(getActivity().getCacheDir(), imageFile.getName()));
Crop.of(pickedImage, destination).asSquare().start(getActivity(), this);
} catch (IOException e) {
e.printStackTrace();
}
} else if (requestCode == Crop.REQUEST_CROP) {
Uri croppedImage = Crop.getOutput(data);
File imagePath = new File(croppedImage.getPath());
new GenerateObject(imagePath, getActivity()).execute();
textView.setVisibility(View.GONE);
}
}
}
private File getImageFileFromUri(Uri uri) throws IOException {
ParcelFileDescriptor parcelFileDescriptor =
getActivity().getContentResolver().openFileDescriptor(uri, "r");
FileDescriptor fileDescriptor;
Bitmap image = null;
if (parcelFileDescriptor != null) {
fileDescriptor = parcelFileDescriptor.getFileDescriptor();
image = BitmapFactory.decodeFileDescriptor(fileDescriptor);
parcelFileDescriptor.close();
}
String fileName = null;
String[] projection = {MediaStore.MediaColumns.DISPLAY_NAME};
Cursor metaCursor = getActivity().getContentResolver().query(uri, projection, null, null, null);
if (metaCursor != null) {
try {
if (metaCursor.moveToFirst()) {
fileName = metaCursor.getString(0);
}
} finally {
metaCursor.close();
}
}
File imageFile = new File(getActivity().getCacheDir(), fileName);
imageFile.createNewFile();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
if (image != null) {
image.compress(Bitmap.CompressFormat.PNG, 0, bos);
}
byte[] bitmapdata = bos.toByteArray();
FileOutputStream fileOutputStream = new FileOutputStream(imageFile);
fileOutputStream.write(bitmapdata);
fileOutputStream.flush();
fileOutputStream.close();
return imageFile;
}
private class GenerateObject extends AsyncTask<Void, Void, Void> {
File file;
String filename;
File modelDirectory;
Bitmap bitmap;
Context context;
ShapeJS shapeJS = new ShapeJS();
boolean error = false;
GenerateObject(File file, Context context) {
this.file = file;
if (file.getName().contains("."))
filename = file.getName().substring(0, file.getName().indexOf('.'));
else
filename = file.getName();
this.context = context;
Log.d("FILENAMEDATE", filename);
String modelDirectoryName = (filename + new Date().toString()).replace(" ", "");
for (Character c : JavaUtilities.ILLEGAL_CHARACTERS)
modelDirectoryName = modelDirectoryName.replace(c.toString(), "");
modelDirectory = new File(modelsDirectory, modelDirectoryName);
modelDirectory.mkdirs();
Log.d("FILELOCATION", modelDirectory.getAbsolutePath());
Log.d("FILENAMEDATE", modelDirectory.getName());
bitmap = ImageHelper.decodeSampledBitmapFromResource(file.getAbsoluteFile());
}
@Override
protected Void doInBackground(Void... params) {
InputStream inputStream = null;
FileOutputStream outputStream = null;
FileOutputStream bitmapOutputStream = null;
File zipFile = new File(modelDirectory, filename + ".zip");
try {
makeSnackbar(textView,
"Uploaded!\nGenerating model. Please wait.",
Snackbar.LENGTH_LONG);
inputStream = shapeJS.uploadImage(file);
outputStream = new FileOutputStream(zipFile);
int b;
while ((b = inputStream.read()) != -1) {
outputStream.write(b);
}
JavaUtilities.unzip(zipFile, modelDirectory);
for (File file : modelDirectory.listFiles()) {
if (file.getName().endsWith(".jpg")) {
file.renameTo(new File(modelDirectory, "preview.jpg"));
} else {
file.renameTo(new File(modelDirectory,
filename + file.getName().substring(file.getName().indexOf('.'))));
}
}
bitmapOutputStream = new FileOutputStream(
new File(modelDirectory, filename + ".jpg"));
bitmap.compress(Bitmap.CompressFormat.PNG, 100, bitmapOutputStream);
} catch (IOException e) {
error = true;
e.printStackTrace();
} finally {
try {
if (!zipFile.delete())
Log.e("GENERATEOBJECT", "Zip file wasn't deleted");
if (inputStream != null)
inputStream.close();
if (outputStream != null)
outputStream.close();
if (bitmapOutputStream != null)
bitmapOutputStream.close();
} catch (IOException e ) {
e.printStackTrace();
}
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
if (error) {
makeAlertDialog(context, "Error", "Sorry, something went wrong. Try again in a few minutes.");
} else {
final Model model = new Model(filename, modelDirectory);
models.add(0, model);
list.smoothScrollToPosition(0);
refresh();
}
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.homepage_toolbar, menu);
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
switch (id) {
case R.id.cart:
String urlString="https://shapeways.com/shops/carbonizr";
Intent intent=new Intent(Intent.ACTION_VIEW, Uri.parse(urlString));
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setPackage("com.android.chrome");
try {
this.startActivity(intent);
} catch (ActivityNotFoundException ex) {
// Chrome browser presumably not installed so allow user to choose instead
intent.setPackage(null);
this.startActivity(intent);
}
break;
case R.id.faq:
getFragmentManager()
.beginTransaction()
.replace(R.id.fragmentPlaceholder, new FAQFragment(), "FAQ")
.addToBackStack(null)
.commit();
MainActivity.drawerToggle.setDrawerIndicatorEnabled(false);
}
return super.onOptionsItemSelected(item);
}
}
| Buffer the zip file retrieval
| android/src/xyz/igorgee/imagecreator3d/HomePageFragment.java | Buffer the zip file retrieval |
|
Java | mit | 72c5191bb4fb0f512d5b920cd2d3f51c4b8adaa4 | 0 | zalando/nakadi,zalando/nakadi | package org.zalando.nakadi.webservice;
import org.apache.http.HttpStatus;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Test;
import org.zalando.nakadi.domain.Subscription;
import org.zalando.nakadi.utils.TestUtils;
import org.zalando.nakadi.webservice.utils.NakadiTestUtils;
import org.zalando.nakadi.webservice.utils.TestStreamingClient;
import java.util.List;
import java.util.Map;
import static com.jayway.restassured.RestAssured.given;
public class BinaryEventPublisherAT extends BaseAT {
private static final String NAKADI_ACCESS_LOG = "nakadi.access.log";
private static final String NAKADI_SUBSCRIPTION_LOG = "nakadi.subscription.log";
@Test
public void testNakadiAccessLogInAvro() throws Exception {
// lets read nakadi.access.log to validate if there is an event
final Subscription subscription = NakadiTestUtils
.createSubscriptionForEventType(NAKADI_ACCESS_LOG);
final TestStreamingClient client = TestStreamingClient
.create(subscription.getId())
.start();
// let log any request to nakadi.access.log event type
final String path = "/event-types/" + NAKADI_ACCESS_LOG;
given()
.get(path)
.then()
.statusCode(HttpStatus.SC_OK);
TestUtils.waitFor(() -> MatcherAssert.assertThat(
client.getBatches().size(), Matchers.greaterThanOrEqualTo(1)), 10000);
final List<Map> events = client.getBatches().get(0).getEvents();
Assert.assertFalse(events.isEmpty());
// when tests are run in parallel it is hard to get specific event,
// that's why check that events are in the event type
Assert.assertEquals(
NAKADI_ACCESS_LOG,
((Map) events.get(0).get("metadata")).get("event_type"));
}
@Test
public void testNakadiSubscriptionLogInAvro() throws Exception {
// lets read nakadi.access.log to validate if there is an event
final Subscription subscription = NakadiTestUtils
.createSubscriptionForEventType(NAKADI_SUBSCRIPTION_LOG);
final TestStreamingClient client = TestStreamingClient
.create(subscription.getId())
.start();
NakadiTestUtils.createSubscriptionForEventType(NAKADI_ACCESS_LOG);
TestUtils.waitFor(() -> MatcherAssert.assertThat(
client.getBatches().size(), Matchers.greaterThanOrEqualTo(1)), 10000);
final List<Map> events = client.getBatches().get(0).getEvents();
Assert.assertFalse(events.isEmpty());
// when tests are run in parallel it is hard to get specific event,
// that's why check that events are in the event type
Assert.assertEquals(
NAKADI_SUBSCRIPTION_LOG,
((Map) events.get(0).get("metadata")).get("event_type"));
Assert.assertEquals("created", events.get(0).get("status"));
}
}
| acceptance-test/src/acceptance-test/java/org/zalando/nakadi/webservice/BinaryEventPublisherAT.java | package org.zalando.nakadi.webservice;
import org.apache.http.HttpStatus;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Test;
import org.zalando.nakadi.domain.Subscription;
import org.zalando.nakadi.utils.TestUtils;
import org.zalando.nakadi.webservice.utils.NakadiTestUtils;
import org.zalando.nakadi.webservice.utils.TestStreamingClient;
import java.util.List;
import java.util.Map;
import static com.jayway.restassured.RestAssured.given;
public class BinaryEventPublisherAT extends BaseAT {
private static final String NAKADI_ACCESS_LOG = "nakadi.access.log";
@Test
public void testNakadiAccessLogInAvro() throws Exception {
// lets read nakadi.access.log to validate if there is an event
final Subscription subscription = NakadiTestUtils
.createSubscriptionForEventType(NAKADI_ACCESS_LOG);
final TestStreamingClient client = TestStreamingClient
.create(subscription.getId())
.start();
// let log any request to nakadi.access.log event type
final String path = "/event-types/" + NAKADI_ACCESS_LOG;
given()
.get(path)
.then()
.statusCode(HttpStatus.SC_OK);
TestUtils.waitFor(() -> MatcherAssert.assertThat(
client.getBatches().size(), Matchers.greaterThanOrEqualTo(1)), 10000);
final List<Map> events = client.getBatches().get(0).getEvents();
Assert.assertFalse(events.isEmpty());
// when tests are run in parallel it is hard to get specific event,
// that's why check that events are in the event type
Assert.assertEquals(
NAKADI_ACCESS_LOG,
((Map) events.get(0).get("metadata")).get("event_type"));
}
}
| Added acceptance test
| acceptance-test/src/acceptance-test/java/org/zalando/nakadi/webservice/BinaryEventPublisherAT.java | Added acceptance test |
|
Java | epl-1.0 | 23a8aae29ea065da45552c96c995c38cf21194a7 | 0 | stzilli/kapua,stzilli/kapua,stzilli/kapua,stzilli/kapua,LeoNerdoG/kapua,LeoNerdoG/kapua,stzilli/kapua,LeoNerdoG/kapua,LeoNerdoG/kapua,LeoNerdoG/kapua | /*******************************************************************************
* Copyright (c) 2011, 2016 Eurotech and/or its affiliates and others
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eurotech - initial API and implementation
*******************************************************************************/
package org.eclipse.kapua.app.api;
import java.security.acl.Permission;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.JAXBContext;
import org.eclipse.kapua.app.api.v1.resources.model.CountResult;
import org.eclipse.kapua.app.api.v1.resources.model.ErrorBean;
import org.eclipse.kapua.model.config.metatype.KapuaTad;
import org.eclipse.kapua.model.config.metatype.KapuaTicon;
import org.eclipse.kapua.model.config.metatype.KapuaTocd;
import org.eclipse.kapua.model.config.metatype.KapuaToption;
import org.eclipse.kapua.service.account.Account;
import org.eclipse.kapua.service.account.AccountCreator;
import org.eclipse.kapua.service.account.AccountListResult;
import org.eclipse.kapua.service.account.AccountQuery;
import org.eclipse.kapua.service.account.AccountXmlRegistry;
import org.eclipse.kapua.service.authentication.ApiKeyCredentials;
import org.eclipse.kapua.service.authentication.AuthenticationCredentials;
import org.eclipse.kapua.service.authentication.AuthenticationXmlRegistry;
import org.eclipse.kapua.service.authentication.JwtCredentials;
import org.eclipse.kapua.service.authentication.UsernamePasswordCredentials;
import org.eclipse.kapua.service.authentication.credential.Credential;
import org.eclipse.kapua.service.authentication.credential.CredentialCreator;
import org.eclipse.kapua.service.authentication.credential.CredentialListResult;
import org.eclipse.kapua.service.authentication.credential.CredentialQuery;
import org.eclipse.kapua.service.authentication.credential.CredentialType;
import org.eclipse.kapua.service.authentication.credential.CredentialXmlRegistry;
import org.eclipse.kapua.service.authentication.token.shiro.AccessTokenImpl;
import org.eclipse.kapua.service.authorization.access.AccessInfo;
import org.eclipse.kapua.service.authorization.access.AccessInfoCreator;
import org.eclipse.kapua.service.authorization.access.AccessInfoListResult;
import org.eclipse.kapua.service.authorization.access.AccessInfoQuery;
import org.eclipse.kapua.service.authorization.access.AccessInfoXmlRegistry;
import org.eclipse.kapua.service.authorization.access.AccessPermission;
import org.eclipse.kapua.service.authorization.access.AccessPermissionCreator;
import org.eclipse.kapua.service.authorization.access.AccessPermissionListResult;
import org.eclipse.kapua.service.authorization.access.AccessPermissionQuery;
import org.eclipse.kapua.service.authorization.access.AccessPermissionXmlRegistry;
import org.eclipse.kapua.service.authorization.access.AccessRole;
import org.eclipse.kapua.service.authorization.access.AccessRoleCreator;
import org.eclipse.kapua.service.authorization.access.AccessRoleListResult;
import org.eclipse.kapua.service.authorization.access.AccessRoleQuery;
import org.eclipse.kapua.service.authorization.access.AccessRoleXmlRegistry;
import org.eclipse.kapua.service.authorization.domain.Domain;
import org.eclipse.kapua.service.authorization.domain.DomainListResult;
import org.eclipse.kapua.service.authorization.domain.DomainQuery;
import org.eclipse.kapua.service.authorization.domain.DomainXmlRegistry;
import org.eclipse.kapua.service.authorization.group.Group;
import org.eclipse.kapua.service.authorization.group.GroupCreator;
import org.eclipse.kapua.service.authorization.group.GroupListResult;
import org.eclipse.kapua.service.authorization.group.GroupQuery;
import org.eclipse.kapua.service.authorization.group.GroupXmlRegistry;
import org.eclipse.kapua.service.authorization.role.Role;
import org.eclipse.kapua.service.authorization.role.RoleCreator;
import org.eclipse.kapua.service.authorization.role.RoleListResult;
import org.eclipse.kapua.service.authorization.role.RolePermission;
import org.eclipse.kapua.service.authorization.role.RolePermissionCreator;
import org.eclipse.kapua.service.authorization.role.RolePermissionListResult;
import org.eclipse.kapua.service.authorization.role.RolePermissionQuery;
import org.eclipse.kapua.service.authorization.role.RolePermissionXmlRegistry;
import org.eclipse.kapua.service.authorization.role.RoleQuery;
import org.eclipse.kapua.service.authorization.role.RoleXmlRegistry;
import org.eclipse.kapua.service.device.call.kura.model.configuration.KuraDeviceConfiguration;
import org.eclipse.kapua.service.device.call.kura.model.deploy.KuraDeploymentPackage;
import org.eclipse.kapua.service.device.call.kura.model.deploy.KuraDeploymentPackages;
import org.eclipse.kapua.service.device.call.kura.model.snapshot.KuraSnapshotIds;
import org.eclipse.kapua.service.device.management.command.DeviceCommandInput;
import org.eclipse.kapua.service.device.management.command.DeviceCommandOutput;
import org.eclipse.kapua.service.device.management.command.DeviceCommandXmlRegistry;
import org.eclipse.kapua.service.device.management.configuration.DeviceComponentConfiguration;
import org.eclipse.kapua.service.device.management.configuration.DeviceConfiguration;
import org.eclipse.kapua.service.device.management.configuration.DeviceConfigurationXmlRegistry;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackage;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackageBundleInfo;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackageBundleInfos;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackageXmlRegistry;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackages;
import org.eclipse.kapua.service.device.management.packages.model.download.DevicePackageDownloadRequest;
import org.eclipse.kapua.service.device.management.packages.model.uninstall.DevicePackageUninstallRequest;
import org.eclipse.kapua.service.device.management.snapshot.DeviceSnapshot;
import org.eclipse.kapua.service.device.management.snapshot.DeviceSnapshotXmlRegistry;
import org.eclipse.kapua.service.device.management.snapshot.DeviceSnapshots;
import org.eclipse.kapua.service.device.registry.Device;
import org.eclipse.kapua.service.device.registry.DeviceCreator;
import org.eclipse.kapua.service.device.registry.DeviceListResult;
import org.eclipse.kapua.service.device.registry.DeviceQuery;
import org.eclipse.kapua.service.device.registry.DeviceXmlRegistry;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnection;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnectionListResult;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnectionQuery;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnectionSummary;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnectionXmlRegistry;
import org.eclipse.kapua.service.device.registry.event.DeviceEvent;
import org.eclipse.kapua.service.device.registry.event.DeviceEventListResult;
import org.eclipse.kapua.service.device.registry.event.DeviceEventQuery;
import org.eclipse.kapua.service.device.registry.event.DeviceEventXmlRegistry;
import org.eclipse.kapua.service.user.User;
import org.eclipse.kapua.service.user.UserCreator;
import org.eclipse.kapua.service.user.UserListResult;
import org.eclipse.kapua.service.user.UserQuery;
import org.eclipse.kapua.service.user.UserXmlRegistry;
import org.eclipse.persistence.jaxb.JAXBContextFactory;
/**
* Provide a customized JAXBContext that makes the concrete implementations
* known and available for marshalling
*/
@Provider
@Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
public class JaxbContextResolver implements ContextResolver<JAXBContext> {
private JAXBContext jaxbContext;
public JaxbContextResolver() {
try {
jaxbContext = JAXBContextFactory.createContext(new Class[] {
// REST API stuff
ErrorBean.class,
CountResult.class,
// Account
Account.class,
AccountCreator.class,
AccountListResult.class,
AccountQuery.class,
AccountXmlRegistry.class,
User.class,
UserCreator.class,
UserListResult.class,
UserQuery.class,
UserXmlRegistry.class,
// Device
Device.class,
DeviceCreator.class,
DeviceListResult.class,
DeviceQuery.class,
DeviceXmlRegistry.class,
// Device Connection
DeviceConnection.class,
DeviceConnectionListResult.class,
DeviceConnectionQuery.class,
DeviceConnectionXmlRegistry.class,
// Device Event
DeviceEvent.class,
DeviceEventListResult.class,
DeviceEventQuery.class,
DeviceEventXmlRegistry.class,
DeviceCommandInput.class,
DeviceCommandXmlRegistry.class,
DeviceCommandOutput.class,
DeviceConfiguration.class,
DeviceComponentConfiguration.class,
DeviceConfigurationXmlRegistry.class,
DeviceSnapshot.class,
DeviceSnapshots.class,
DeviceSnapshotXmlRegistry.class,
DevicePackage.class,
DevicePackages.class,
DevicePackageBundleInfo.class,
DevicePackageBundleInfos.class,
DevicePackageXmlRegistry.class,
DevicePackageDownloadRequest.class,
DevicePackageUninstallRequest.class,
KuraSnapshotIds.class,
KuraDeviceConfiguration.class,
KuraDeploymentPackages.class,
KuraDeploymentPackage.class,
KapuaTocd.class,
KapuaTad.class,
KapuaTicon.class,
KapuaToption.class,
DeviceConnectionSummary.class,
DeviceConnectionXmlRegistry.class,
AuthenticationCredentials.class,
AuthenticationXmlRegistry.class,
AccessTokenImpl.class,
ApiKeyCredentials.class,
JwtCredentials.class,
UsernamePasswordCredentials.class,
// Credential
Credential.class,
CredentialListResult.class,
CredentialCreator.class,
CredentialType.class,
CredentialQuery.class,
CredentialXmlRegistry.class,
// Permission
Permission.class,
// Roles
Role.class,
RoleListResult.class,
RoleCreator.class,
RoleQuery.class,
RoleXmlRegistry.class,
// Role Permissions
RolePermission.class,
RolePermissionListResult.class,
RolePermissionCreator.class,
RolePermissionQuery.class,
RolePermissionXmlRegistry.class,
// Domains
Domain.class,
DomainListResult.class,
DomainQuery.class,
DomainXmlRegistry.class,
// Groups
Group.class,
GroupListResult.class,
GroupCreator.class,
GroupQuery.class,
GroupXmlRegistry.class,
// Access Info
AccessInfo.class,
AccessInfoListResult.class,
AccessInfoCreator.class,
AccessInfoQuery.class,
AccessInfoXmlRegistry.class,
// Access Permissions
AccessPermission.class,
AccessPermissionListResult.class,
AccessPermissionCreator.class,
AccessPermissionQuery.class,
AccessPermissionXmlRegistry.class,
// Access Roles
AccessRole.class,
AccessRoleListResult.class,
AccessRoleCreator.class,
AccessRoleQuery.class,
AccessRoleXmlRegistry.class
}, null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public JAXBContext getContext(Class<?> type) {
return jaxbContext;
}
}
| rest-api/src/main/java/org/eclipse/kapua/app/api/JaxbContextResolver.java | /*******************************************************************************
* Copyright (c) 2011, 2016 Eurotech and/or its affiliates and others
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eurotech - initial API and implementation
*******************************************************************************/
package org.eclipse.kapua.app.api;
import java.security.acl.Permission;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.JAXBContext;
import org.eclipse.kapua.app.api.v1.resources.model.CountResult;
import org.eclipse.kapua.app.api.v1.resources.model.ErrorBean;
import org.eclipse.kapua.model.config.metatype.KapuaTad;
import org.eclipse.kapua.model.config.metatype.KapuaTicon;
import org.eclipse.kapua.model.config.metatype.KapuaTocd;
import org.eclipse.kapua.model.config.metatype.KapuaToption;
import org.eclipse.kapua.service.account.Account;
import org.eclipse.kapua.service.account.AccountCreator;
import org.eclipse.kapua.service.account.AccountListResult;
import org.eclipse.kapua.service.account.AccountQuery;
import org.eclipse.kapua.service.account.AccountXmlRegistry;
import org.eclipse.kapua.service.authentication.ApiKeyCredentials;
import org.eclipse.kapua.service.authentication.AuthenticationCredentials;
import org.eclipse.kapua.service.authentication.AuthenticationXmlRegistry;
import org.eclipse.kapua.service.authentication.JwtCredentials;
import org.eclipse.kapua.service.authentication.UsernamePasswordCredentials;
import org.eclipse.kapua.service.authentication.credential.Credential;
import org.eclipse.kapua.service.authentication.credential.CredentialCreator;
import org.eclipse.kapua.service.authentication.credential.CredentialListResult;
import org.eclipse.kapua.service.authentication.credential.CredentialQuery;
import org.eclipse.kapua.service.authentication.credential.CredentialType;
import org.eclipse.kapua.service.authentication.credential.CredentialXmlRegistry;
import org.eclipse.kapua.service.authentication.token.shiro.AccessTokenImpl;
import org.eclipse.kapua.service.authorization.access.AccessInfo;
import org.eclipse.kapua.service.authorization.access.AccessInfoCreator;
import org.eclipse.kapua.service.authorization.access.AccessInfoListResult;
import org.eclipse.kapua.service.authorization.access.AccessInfoQuery;
import org.eclipse.kapua.service.authorization.access.AccessInfoXmlRegistry;
import org.eclipse.kapua.service.authorization.access.AccessPermission;
import org.eclipse.kapua.service.authorization.access.AccessPermissionCreator;
import org.eclipse.kapua.service.authorization.access.AccessPermissionListResult;
import org.eclipse.kapua.service.authorization.access.AccessPermissionQuery;
import org.eclipse.kapua.service.authorization.access.AccessPermissionXmlRegistry;
import org.eclipse.kapua.service.authorization.access.AccessRole;
import org.eclipse.kapua.service.authorization.access.AccessRoleCreator;
import org.eclipse.kapua.service.authorization.access.AccessRoleListResult;
import org.eclipse.kapua.service.authorization.access.AccessRoleQuery;
import org.eclipse.kapua.service.authorization.access.AccessRoleXmlRegistry;
import org.eclipse.kapua.service.authorization.group.Group;
import org.eclipse.kapua.service.authorization.group.GroupCreator;
import org.eclipse.kapua.service.authorization.group.GroupListResult;
import org.eclipse.kapua.service.authorization.group.GroupQuery;
import org.eclipse.kapua.service.authorization.group.GroupXmlRegistry;
import org.eclipse.kapua.service.authorization.role.Role;
import org.eclipse.kapua.service.authorization.role.RoleCreator;
import org.eclipse.kapua.service.authorization.role.RoleListResult;
import org.eclipse.kapua.service.authorization.role.RolePermission;
import org.eclipse.kapua.service.authorization.role.RolePermissionCreator;
import org.eclipse.kapua.service.authorization.role.RolePermissionListResult;
import org.eclipse.kapua.service.authorization.role.RolePermissionQuery;
import org.eclipse.kapua.service.authorization.role.RolePermissionXmlRegistry;
import org.eclipse.kapua.service.authorization.role.RoleQuery;
import org.eclipse.kapua.service.authorization.role.RoleXmlRegistry;
import org.eclipse.kapua.service.device.call.kura.model.configuration.KuraDeviceConfiguration;
import org.eclipse.kapua.service.device.call.kura.model.deploy.KuraDeploymentPackage;
import org.eclipse.kapua.service.device.call.kura.model.deploy.KuraDeploymentPackages;
import org.eclipse.kapua.service.device.call.kura.model.snapshot.KuraSnapshotIds;
import org.eclipse.kapua.service.device.management.command.DeviceCommandInput;
import org.eclipse.kapua.service.device.management.command.DeviceCommandOutput;
import org.eclipse.kapua.service.device.management.command.DeviceCommandXmlRegistry;
import org.eclipse.kapua.service.device.management.configuration.DeviceComponentConfiguration;
import org.eclipse.kapua.service.device.management.configuration.DeviceConfiguration;
import org.eclipse.kapua.service.device.management.configuration.DeviceConfigurationXmlRegistry;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackage;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackageBundleInfo;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackageBundleInfos;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackageXmlRegistry;
import org.eclipse.kapua.service.device.management.packages.model.DevicePackages;
import org.eclipse.kapua.service.device.management.packages.model.download.DevicePackageDownloadRequest;
import org.eclipse.kapua.service.device.management.packages.model.uninstall.DevicePackageUninstallRequest;
import org.eclipse.kapua.service.device.management.snapshot.DeviceSnapshot;
import org.eclipse.kapua.service.device.management.snapshot.DeviceSnapshotXmlRegistry;
import org.eclipse.kapua.service.device.management.snapshot.DeviceSnapshots;
import org.eclipse.kapua.service.device.registry.Device;
import org.eclipse.kapua.service.device.registry.DeviceCreator;
import org.eclipse.kapua.service.device.registry.DeviceListResult;
import org.eclipse.kapua.service.device.registry.DeviceQuery;
import org.eclipse.kapua.service.device.registry.DeviceXmlRegistry;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnection;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnectionListResult;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnectionQuery;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnectionSummary;
import org.eclipse.kapua.service.device.registry.connection.DeviceConnectionXmlRegistry;
import org.eclipse.kapua.service.device.registry.event.DeviceEvent;
import org.eclipse.kapua.service.device.registry.event.DeviceEventListResult;
import org.eclipse.kapua.service.device.registry.event.DeviceEventQuery;
import org.eclipse.kapua.service.device.registry.event.DeviceEventXmlRegistry;
import org.eclipse.kapua.service.user.User;
import org.eclipse.kapua.service.user.UserCreator;
import org.eclipse.kapua.service.user.UserListResult;
import org.eclipse.kapua.service.user.UserQuery;
import org.eclipse.kapua.service.user.UserXmlRegistry;
import org.eclipse.persistence.jaxb.JAXBContextFactory;
/**
* Provide a customized JAXBContext that makes the concrete implementations
* known and available for marshalling
*/
@Provider
@Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
public class JaxbContextResolver implements ContextResolver<JAXBContext> {
private JAXBContext jaxbContext;
public JaxbContextResolver() {
try {
jaxbContext = JAXBContextFactory.createContext(new Class[] {
// REST API stuff
ErrorBean.class,
CountResult.class,
// Account
Account.class,
AccountCreator.class,
AccountListResult.class,
AccountQuery.class,
AccountXmlRegistry.class,
User.class,
UserCreator.class,
UserListResult.class,
UserQuery.class,
UserXmlRegistry.class,
// Device
Device.class,
DeviceCreator.class,
DeviceListResult.class,
DeviceQuery.class,
DeviceXmlRegistry.class,
// Device Connection
DeviceConnection.class,
DeviceConnectionListResult.class,
DeviceConnectionQuery.class,
DeviceConnectionXmlRegistry.class,
// Device Event
DeviceEvent.class,
DeviceEventListResult.class,
DeviceEventQuery.class,
DeviceEventXmlRegistry.class,
DeviceCommandInput.class,
DeviceCommandXmlRegistry.class,
DeviceCommandOutput.class,
DeviceConfiguration.class,
DeviceComponentConfiguration.class,
DeviceConfigurationXmlRegistry.class,
DeviceSnapshot.class,
DeviceSnapshots.class,
DeviceSnapshotXmlRegistry.class,
DevicePackage.class,
DevicePackages.class,
DevicePackageBundleInfo.class,
DevicePackageBundleInfos.class,
DevicePackageXmlRegistry.class,
DevicePackageDownloadRequest.class,
DevicePackageUninstallRequest.class,
KuraSnapshotIds.class,
KuraDeviceConfiguration.class,
KuraDeploymentPackages.class,
KuraDeploymentPackage.class,
KapuaTocd.class,
KapuaTad.class,
KapuaTicon.class,
KapuaToption.class,
DeviceConnectionSummary.class,
DeviceConnectionXmlRegistry.class,
AuthenticationCredentials.class,
AuthenticationXmlRegistry.class,
AccessTokenImpl.class,
ApiKeyCredentials.class,
JwtCredentials.class,
UsernamePasswordCredentials.class,
// Credential
Credential.class,
CredentialListResult.class,
CredentialCreator.class,
CredentialType.class,
CredentialQuery.class,
CredentialXmlRegistry.class,
// Permission
Permission.class,
// Roles
Role.class,
RoleListResult.class,
RoleCreator.class,
RoleQuery.class,
RoleXmlRegistry.class,
// Role Permissions
RolePermission.class,
RolePermissionListResult.class,
RolePermissionCreator.class,
RolePermissionQuery.class,
RolePermissionXmlRegistry.class,
// Groups
Group.class,
GroupListResult.class,
GroupCreator.class,
GroupQuery.class,
GroupXmlRegistry.class,
// Access Info
AccessInfo.class,
AccessInfoListResult.class,
AccessInfoCreator.class,
AccessInfoQuery.class,
AccessInfoXmlRegistry.class,
// Access Permissions
AccessPermission.class,
AccessPermissionListResult.class,
AccessPermissionCreator.class,
AccessPermissionQuery.class,
AccessPermissionXmlRegistry.class,
// Access Roles
AccessRole.class,
AccessRoleListResult.class,
AccessRoleCreator.class,
AccessRoleQuery.class,
AccessRoleXmlRegistry.class
}, null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public JAXBContext getContext(Class<?> type) {
return jaxbContext;
}
}
| Updated JAXB context resolver
Signed-off-by: coduz <[email protected]>
| rest-api/src/main/java/org/eclipse/kapua/app/api/JaxbContextResolver.java | Updated JAXB context resolver |
|
Java | epl-1.0 | 14bf8b8e8a4f2014cdef244166d284c2e54a4b0a | 0 | JimMa0312/PongGame | package pong.view;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.ResourceBundle;
import javafx.animation.AnimationTimer;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.image.Image;
import javafx.scene.image.WritableImage;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.BorderPane;
import javafx.scene.shape.Circle;
import javafx.scene.shape.Rectangle;
import pong.model.Ball;
import pong.model.Input;
import pong.model.Player;
import pong.model.Setting;
import pong.model.SpriteBase;
import pong.model.en.viewResources;
public class OutlayerController implements Initializable, ControlledStage {
Random random = new Random();
@FXML
AnchorPane gamePane;
Image playerImage;
Image ballImage;
List<Player> players = new ArrayList<>();
List<Ball> balls = new ArrayList<>();
private StageController myStageController;
@FXML
private BorderPane primaryPane;
@Override
public void setStageController(StageController stageController) {
this.myStageController = stageController;
}
@Override
public void initialize(URL arg0, ResourceBundle arg1) {
}
@FXML
private void handleCLoseAllStage() {
System.exit(0);
}
@FXML
private void handleStartGame() {
System.out.println("Game Start");
loadGame();
createPlayers();
spawnBalls();
AnimationTimer gameloop = new AnimationTimer() {
@Override
public void handle(long now) {
// player.processInput();
// player.checkRemovability();
//
// player.move();
players.forEach(sprite->sprite.processInput());
players.forEach(sprite->sprite.checkRemovability());
players.forEach(sprite->sprite.move());
balls.forEach(sprite -> sprite.move());
checkCollosons();
players.forEach(sprite->sprite.updateUI());
balls.forEach(sprite->sprite.updateUI());
balls.forEach(sprite->sprite.checkRemovability());
removeSprites(balls);
}
};
gameloop.start();
}
@FXML
private void handleShowRankList() {
myStageController.setStage(viewResources.rankList.getName());
}
/*
* 加载游戏,绘制像素,并储存成Image
*/
private void loadGame() {
WritableImage wImage;
double w = Setting.PADDLE_WIDTH;
double h = Setting.PADDLE_HEIGHT;
Rectangle rectangle = new Rectangle(w, h);
wImage = new WritableImage((int) w, (int) h);
rectangle.snapshot(null, wImage);
playerImage = wImage;
double r = Setting.BALL_RADIUS;
Circle circle = new Circle(r);
wImage = new WritableImage((int) r * 2, (int) r * 2);
circle.snapshot(null, wImage);
ballImage = wImage;
}
private void createPlayers(){
players.add(createPlayer());
}
private Player createPlayer() {
Input input = new Input(myStageController.getStage(viewResources.ouLayer.getName()).getScene());
input.addListeners();
Image image = playerImage;
double x = (Setting.PANE_WIDTH - image.getWidth()) * 0.5;
double y = Setting.PANE_HEIGHT - Setting.PADDLE_OFFSET_y;
Player player = new Player(playerImage, gamePane, x, y, 0, 0, 0, 0, 1, 0, input, Setting.PADDLE_SPEED);
return player;
}
private void spawnBalls() {
if (balls.size() == 0) {
createBall();
}
}
private void createBall() {
Image image = ballImage;
double x = (Setting.PANE_WIDTH - 2 * image.getWidth()) * 0.5;
double y = (Setting.PANE_HEIGHT - playerImage.getHeight() - 2 * image.getHeight()) * 0.5;
Ball ball = new Ball(image, gamePane, x, y, 0, 0, Setting.BALL_SPEED, 0, 1, 1);
balls.add(ball);
}
protected void checkCollosons() {
for (Player player : players) {
for (Ball ball : balls) {
if (player.collidesWith(ball)) {
ball.bounceOff(player);
player.addScore(1);
}
}
}
}
private void removeSprites(List<? extends SpriteBase> spriteList) {
Iterator<? extends SpriteBase> iter = spriteList.iterator();
while (iter.hasNext()) {
SpriteBase spriteBase = iter.next();
if (spriteBase.isRemovable()) {
spriteBase.removeFromLayer();
iter.remove();
}
}
}
}
| src/pong/view/OutlayerController.java | package pong.view;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.ResourceBundle;
import javafx.animation.AnimationTimer;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.image.Image;
import javafx.scene.image.WritableImage;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.BorderPane;
import javafx.scene.shape.Circle;
import javafx.scene.shape.Rectangle;
import pong.model.Ball;
import pong.model.Input;
import pong.model.Player;
import pong.model.Setting;
import pong.model.SpriteBase;
import pong.model.en.viewResources;
public class OutlayerController implements Initializable, ControlledStage {
Random random = new Random();
@FXML
AnchorPane gamePane;
Image playerImage;
Image ballImage;
List<Player> players = new ArrayList<>();
List<Ball> balls = new ArrayList<>();
private StageController myStageController;
@FXML
private BorderPane primaryPane;
@Override
public void setStageController(StageController stageController) {
this.myStageController = stageController;
}
@Override
public void initialize(URL arg0, ResourceBundle arg1) {
}
@FXML
private void handleCLoseAllStage() {
System.exit(0);
}
@FXML
private void handleStartGame() {
System.out.println("Game Start");
loadGame();
createPlayers();
spawnBalls();
AnimationTimer gameloop = new AnimationTimer() {
@Override
public void handle(long now) {
// player.processInput();
// player.checkRemovability();
//
// player.move();
players.forEach(sprite->sprite.processInput());
players.forEach(sprite->sprite.checkRemovability());
players.forEach(sprite->sprite.move());
balls.forEach(sprite -> sprite.move());
checkCollosons();
players.forEach(sprite->sprite.updateUI());
balls.forEach(sprite->sprite.updateUI());
balls.forEach(sprite->sprite.checkRemovability());
removeSprites(balls);
}
};
gameloop.start();
}
@FXML
private void handleShowRankList() {
myStageController.setStage(viewResources.rankList.getName());
}
/*
* 加载游戏,绘制像素,并储存成Image
*/
private void loadGame() {
WritableImage wImage;
double w = Setting.PADDLE_WIDTH;
double h = Setting.PADDLE_HEIGHT;
Rectangle rectangle = new Rectangle(w, h);
wImage = new WritableImage((int) w, (int) h);
rectangle.snapshot(null, wImage);
playerImage = wImage;
double r = Setting.BALL_RADIUS;
Circle circle = new Circle(r);
wImage = new WritableImage((int) r * 2, (int) r * 2);
circle.snapshot(null, wImage);
ballImage = wImage;
}
private void createPlayers(){
players.add(createPlayer());
}
private Player createPlayer() {
Input input = new Input(myStageController.getStage(viewResources.ouLayer.getName()).getScene());
Image image = playerImage;
double x = (Setting.PANE_WIDTH - image.getWidth()) * 0.5;
double y = Setting.PANE_HEIGHT - Setting.PADDLE_OFFSET_y;
Player player = new Player(playerImage, gamePane, x, y, 0, 0, 0, 0, 1, 0, input, Setting.PADDLE_SPEED);
return player;
}
private void spawnBalls() {
if (balls.size() == 0) {
createBall();
}
}
private void createBall() {
Image image = ballImage;
double x = (Setting.PANE_WIDTH - 2 * image.getWidth()) * 0.5;
double y = (Setting.PANE_HEIGHT - playerImage.getHeight() - 2 * image.getHeight()) * 0.5;
Ball ball = new Ball(image, gamePane, x, y, 0, 0, Setting.BALL_SPEED, 0, 1, 1);
balls.add(ball);
}
protected void checkCollosons() {
for (Player player : players) {
for (Ball ball : balls) {
if (player.collidesWith(ball)) {
ball.bounceOff(player);
player.addScore(1);
}
}
}
}
private void removeSprites(List<? extends SpriteBase> spriteList) {
Iterator<? extends SpriteBase> iter = spriteList.iterator();
while (iter.hasNext()) {
SpriteBase spriteBase = iter.next();
if (spriteBase.isRemovable()) {
spriteBase.removeFromLayer();
iter.remove();
}
}
}
}
| 针对Input添加Lisener,解决问题. | src/pong/view/OutlayerController.java | 针对Input添加Lisener,解决问题. |
|
Java | epl-1.0 | 9a3a42aef7562c61ed0da2ccb8fc2e7b9d9abb69 | 0 | ivannov/core,oscerd/core,forge/core,oscerd/core,jerr/jbossforge-core,agoncal/core,agoncal/core,oscerd/core,D9110/core,jerr/jbossforge-core,pplatek/core,oscerd/core,D9110/core,D9110/core,agoncal/core,forge/core,agoncal/core,pplatek/core,D9110/core,pplatek/core,oscerd/core,D9110/core,pplatek/core,forge/core,agoncal/core,pplatek/core,forge/core,jerr/jbossforge-core,pplatek/core,forge/core,agoncal/core,oscerd/core,D9110/core,pplatek/core,pplatek/core,D9110/core,ivannov/core,pplatek/core,ivannov/core,pplatek/core,jerr/jbossforge-core,forge/core,ivannov/core,jerr/jbossforge-core,ivannov/core,ivannov/core,oscerd/core,forge/core,forge/core,oscerd/core,forge/core,agoncal/core,ivannov/core,jerr/jbossforge-core,D9110/core,ivannov/core,agoncal/core,D9110/core,agoncal/core,jerr/jbossforge-core,jerr/jbossforge-core,jerr/jbossforge-core,jerr/jbossforge-core,ivannov/core,agoncal/core,D9110/core,oscerd/core,oscerd/core,forge/core,ivannov/core | package org.jboss.forge.addon.parser.java;
/*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Eclipse Public License version 1.0, available at
* http://www.eclipse.org/legal/epl-v10.html
*/
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import java.io.File;
import javax.inject.Inject;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.forge.addon.parser.java.resources.JavaResource;
import org.jboss.forge.addon.resource.Resource;
import org.jboss.forge.addon.resource.ResourceFactory;
import org.jboss.forge.arquillian.AddonDependency;
import org.jboss.forge.arquillian.Dependencies;
import org.jboss.forge.arquillian.archive.ForgeArchive;
import org.jboss.forge.furnace.repositories.AddonDependencyEntry;
import org.jboss.forge.roaster.Roaster;
import org.jboss.forge.roaster.model.source.JavaClassSource;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(Arquillian.class)
public class JavaParserResourcesTest
{
@Deployment
@Dependencies({
@AddonDependency(name = "org.jboss.forge.furnace.container:cdi"),
@AddonDependency(name = "org.jboss.forge.addon:resources"),
@AddonDependency(name = "org.jboss.forge.addon:parser-java")
})
public static ForgeArchive getDeployment()
{
ForgeArchive archive = ShrinkWrap
.create(ForgeArchive.class)
.addBeansXML()
.addAsAddonDependencies(
AddonDependencyEntry.create("org.jboss.forge.furnace.container:cdi"),
AddonDependencyEntry.create("org.jboss.forge.addon:parser-java"),
AddonDependencyEntry.create("org.jboss.forge.addon:resources")
);
return archive;
}
@Inject
private ResourceFactory factory;
@Test
public void testJavaResourceCreation() throws Exception
{
JavaClassSource javaClass = Roaster.create(JavaClassSource.class).setPackage("org.jboss.forge.test")
.setName("Example");
JavaResource resource = factory.create(JavaResource.class, File.createTempFile("forge", ".java"));
resource.createNewFile();
resource.setContents(javaClass);
Assert.assertEquals("Example", resource.getJavaType().getName());
Assert.assertEquals("org.jboss.forge.test", resource.getJavaType().getPackage());
}
@Test
public void testJavaResourceCreationSpecialized() throws Exception
{
JavaClassSource javaClass = Roaster.create(JavaClassSource.class).setPackage("org.jboss.forge.test")
.setName("Example");
JavaResource resource = factory.create(JavaResource.class, File.createTempFile("forge", ".java"));
resource.createNewFile();
resource.setContents(javaClass);
Resource<File> newResource = factory.create(resource.getUnderlyingResourceObject());
Assert.assertThat(newResource, is(instanceOf(JavaResource.class)));
Assert.assertEquals(resource, newResource);
}
}
| parser-java/tests/src/test/java/org/jboss/forge/addon/parser/java/JavaParserResourcesTest.java | package org.jboss.forge.addon.parser.java;
/*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Eclipse Public License version 1.0, available at
* http://www.eclipse.org/legal/epl-v10.html
*/
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import java.io.File;
import javax.inject.Inject;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.forge.addon.parser.java.resources.JavaResource;
import org.jboss.forge.addon.resource.Resource;
import org.jboss.forge.addon.resource.ResourceFactory;
import org.jboss.forge.arquillian.AddonDependency;
import org.jboss.forge.arquillian.Dependencies;
import org.jboss.forge.arquillian.archive.ForgeArchive;
import org.jboss.forge.furnace.repositories.AddonDependencyEntry;
import org.jboss.forge.roaster.Roaster;
import org.jboss.forge.roaster.model.source.JavaClassSource;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(Arquillian.class)
public class JavaParserResourcesTest
{
@Deployment
@Dependencies({
@AddonDependency(name = "org.jboss.forge.furnace.container:cdi"),
@AddonDependency(name = "org.jboss.forge.addon:resources"),
@AddonDependency(name = "org.jboss.forge.addon:projects"),
@AddonDependency(name = "org.jboss.forge.addon:parser-java")
})
public static ForgeArchive getDeployment()
{
ForgeArchive archive = ShrinkWrap
.create(ForgeArchive.class)
.addBeansXML()
.addAsAddonDependencies(
AddonDependencyEntry.create("org.jboss.forge.furnace.container:cdi"),
AddonDependencyEntry.create("org.jboss.forge.addon:projects"),
AddonDependencyEntry.create("org.jboss.forge.addon:parser-java"),
AddonDependencyEntry.create("org.jboss.forge.addon:resources")
);
return archive;
}
@Inject
private ResourceFactory factory;
@Test
public void testJavaResourceCreation() throws Exception
{
JavaClassSource javaClass = Roaster.create(JavaClassSource.class).setPackage("org.jboss.forge.test")
.setName("Example");
JavaResource resource = factory.create(JavaResource.class, File.createTempFile("forge", ".java"));
resource.createNewFile();
resource.setContents(javaClass);
Assert.assertEquals("Example", resource.getJavaType().getName());
Assert.assertEquals("org.jboss.forge.test", resource.getJavaType().getPackage());
}
@Test
public void testJavaResourceCreationSpecialized() throws Exception
{
JavaClassSource javaClass = Roaster.create(JavaClassSource.class).setPackage("org.jboss.forge.test")
.setName("Example");
JavaResource resource = factory.create(JavaResource.class, File.createTempFile("forge", ".java"));
resource.createNewFile();
resource.setContents(javaClass);
Resource<File> newResource = factory.create(resource.getUnderlyingResourceObject());
Assert.assertThat(newResource, is(instanceOf(JavaResource.class)));
Assert.assertEquals(resource, newResource);
}
}
| Revert "Fixing test JavaParserResourcesTest after the maven-optional cleanup"
This reverts commit 7f76c845525fce389cd20597b2df21516d41792f.
| parser-java/tests/src/test/java/org/jboss/forge/addon/parser/java/JavaParserResourcesTest.java | Revert "Fixing test JavaParserResourcesTest after the maven-optional cleanup" |
|
Java | mpl-2.0 | d547ad358a880ef01f778f5bba470672f2972540 | 0 | JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core | /*************************************************************************
*
* OpenOffice.org - a multi-platform office productivity suite
*
* $RCSfile: LinuxHelper.java,v $
*
* $Revision: 1.2 $
*
* last change: $Author: rt $ $Date: 2007-07-03 11:54:40 $
*
* The Contents of this file are made available subject to
* the terms of GNU Lesser General Public License Version 2.1.
*
*
* GNU Lesser General Public License Version 2.1
* =============================================
* Copyright 2005 by Sun Microsystems, Inc.
* 901 San Antonio Road, Palo Alto, CA 94303, USA
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License version 2.1, as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston,
* MA 02111-1307 USA
*
************************************************************************/
package org.openoffice.setup.InstallerHelper;
import org.openoffice.setup.InstallData;
import org.openoffice.setup.SetupData.PackageDescription;
import org.openoffice.setup.Util.Converter;
import org.openoffice.setup.Util.ExecuteProcess;
import org.openoffice.setup.Util.LogManager;
import org.openoffice.setup.Util.SystemManager;
import java.io.File;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Vector;public class LinuxHelper {
public LinuxHelper() {
super();
}
private void getPackageNamesContent(PackageDescription packageData, Vector packageNames) {
if (( packageData.getPackageName() != null ) && ( ! packageData.getPackageName().equals(""))) {
packageNames.add(packageData.getPackageName() + "=" + packageData.getFullPackageName());
}
for (Enumeration e = packageData.children(); e.hasMoreElements(); ) {
PackageDescription child = (PackageDescription) e.nextElement();
getPackageNamesContent(child, packageNames);
}
}
private String getPackageNameFromRpm(PackageDescription packageData, InstallData installData) {
String fullPackageName = null;
String packagePath = installData.getPackagePath();
if (( packageData.getPkgSubdir() != null ) && ( ! packageData.getPkgSubdir().equals("") )) {
File completePackageFile = new File(packagePath, packageData.getPkgSubdir());
packagePath = completePackageFile.getPath();
}
String rpmFileName = packageData.getPackageName();
File rpmFile = new File(packagePath, rpmFileName);
if ( rpmFile.exists() ) {
String rpmCommand = "rpm -qp " + rpmFile.getPath();
String[] rpmCommandArray = new String[3];
rpmCommandArray[0] = "rpm";
rpmCommandArray[1] = "-qp";
rpmCommandArray[2] = rpmFile.getPath();
Vector returnVector = new Vector();
Vector returnErrorVector = new Vector();
int returnValue = ExecuteProcess.executeProcessReturnVector(rpmCommandArray, returnVector, returnErrorVector);
String returnString = (String) returnVector.get(0);
String log = rpmCommand + "<br><b>Returns: " + returnString + "</b><br>";
LogManager.addCommandsLogfileComment(log);
fullPackageName = returnString;
} else {
System.err.println("Error: Could not find file " + rpmFile.getPath());
}
return fullPackageName;
}
private boolean checkPackageExistence(PackageDescription packageData, InstallData installData) {
boolean fileExists = false;
String packagePath = installData.getPackagePath();
if (( packageData.getPkgSubdir() != null ) && ( ! packageData.getPkgSubdir().equals("") )) {
File completePackageFile = new File(packagePath, packageData.getPkgSubdir());
packagePath = completePackageFile.getPath();
}
String rpmFileName = packageData.getPackageName();
File rpmFile = new File(packagePath, rpmFileName);
if ( rpmFile.exists() ) {
fileExists = true;
}
return fileExists;
}
private HashMap analyzeVersionString(String versionString) {
boolean errorOccured = false;
Integer micro = null;
Integer minor = null;
Integer major = null;
Integer release = null;
String microString = null;
String minorString = null;
String majorString = null;
String releaseString = null;
int pos = versionString.lastIndexOf("_"); // this is a jre RPM (1.5.0_06)
if ( pos > -1 ) {
try {
releaseString = versionString.substring(pos+1, versionString.length());
versionString = versionString.substring(0, pos);
} catch (IndexOutOfBoundsException ex) {
System.err.println("Error: Could not get substring from " + versionString);
errorOccured = true;
}
try {
int releaseInt = Integer.parseInt(releaseString);
release = new Integer(releaseInt);
} catch (NumberFormatException ex) {
System.err.println("Error: Could not convert " + releaseString + " to integer");
errorOccured = true;
}
}
// Problem: Some rpms have "2.3" instead of "2.3.0"
String compareString = versionString;
pos = compareString.lastIndexOf("."); // returns "-1", if not found
if ( pos > -1 ) {
String substring = compareString.substring(0, pos);
pos = substring.lastIndexOf("."); // returns "-1", if not found
if ( pos == -1 ) {
versionString = versionString + ".0";
// System.err.println("Warning: Changing from " + compareString + " to " + versionString);
}
} else {
versionString = versionString + ".0.0";
}
// the standard analyzing mechanism
pos = versionString.lastIndexOf("."); // returns "-1", if not found
if ( pos > -1 )
{
try {
microString = versionString.substring(pos+1, versionString.length());
versionString = versionString.substring(0, pos);
} catch (IndexOutOfBoundsException ex) {
System.err.println("Error: Could not get substring from " + versionString);
errorOccured = true;
}
pos = versionString.lastIndexOf(".");
if ( pos > -1 ) {
try {
minorString = versionString.substring(pos+1, versionString.length());
majorString = versionString.substring(0, pos);
} catch (IndexOutOfBoundsException ex) {
System.err.println("Error: Could not get substring from " + versionString);
errorOccured = true;
}
try {
int microInt = Integer.parseInt(microString);
int minorInt = Integer.parseInt(minorString);
int majorInt = Integer.parseInt(majorString);
micro = new Integer(microInt);
minor = new Integer(minorInt);
major = new Integer(majorInt);
} catch (NumberFormatException ex) {
System.err.println("Error: Could not convert " + microString + "," +
minorString + " or " + majorString + " to integer");
errorOccured = true;
}
}
}
// if ( microString == null ) { microString = ""; }
// if ( majorString == null ) { majorString = ""; }
// if ( releaseString == null ) { releaseString = ""; }
// if ( minorString == null ) { minorString = ""; }
// System.err.println("Major " + majorString + " Minor: " + minorString + " Micro: " + microString + " Release: " + releaseString);
if ( errorOccured ) {
micro = null;
minor = null;
major = null;
release = null;
}
HashMap hashRpm = new HashMap();
hashRpm.put("micro", micro);
hashRpm.put("minor", minor);
hashRpm.put("major", major);
hashRpm.put("release", release);
// If one of this values is "null", procedure "compareTwoRpms" always delivers false.
// This means, that the installed package is not older.
// System.err.println("Analyzed: " + "micro: " + hashRpm.get("micro").toString() + " minor: " + hashRpm.get("minor").toString() + " major: " + hashRpm.get("major").toString());
return hashRpm;
}
private HashMap analyzeReleaseString(HashMap hashRpm, String releaseString) {
int release;
try {
release = Integer.parseInt(releaseString);
Integer releaseObj = new Integer(release);
hashRpm.put("release", releaseObj);
}
catch (NumberFormatException ex) {
// JRE often contain a string like "FCS"
// System.err.println("Error: Could not convert " + releaseString + " to integer");
hashRpm.put("release", null);
}
return hashRpm;
}
private boolean compareTwoRpms(HashMap hash1, HashMap hash2) {
boolean hash1IsOlder = false;
if (( hash1.get("major") != null ) && ( hash2.get("major") != null )) {
if ( ((Integer)hash1.get("major")).intValue() < ((Integer)hash2.get("major")).intValue() ) {
hash1IsOlder = true;
} else {
if (( hash1.get("minor") != null ) && ( hash2.get("minor") != null )) {
if ( ((Integer)hash1.get("minor")).intValue() < ((Integer)hash2.get("minor")).intValue() ) {
hash1IsOlder = true;
} else {
if (( hash1.get("micro") != null ) && ( hash2.get("micro") != null )) {
if ( ((Integer)hash1.get("micro")).intValue() < ((Integer)hash2.get("micro")).intValue() ) {
hash1IsOlder = true;
} else {
if (( hash1.get("release") != null ) && ( hash2.get("release") != null )) {
if ( ((Integer)hash1.get("release")).intValue() < ((Integer)hash2.get("release")).intValue() ) {
hash1IsOlder = true;
}
}
}
}
}
}
}
}
return hash1IsOlder;
}
public boolean compareVersionAndRelease(String versionString, String releaseString, PackageDescription packageData, boolean checkIfInstalledIsOlder) {
// version and release are gotten from the rpm database. packageData contains
// the information about the rpm, that shall be installed. It has to be installed,
// if the installed product defined by version and release is older.
// version is something like "2.0.3", release something like "164".
// An exception is the jre package, where version is "1.5.0_06" and release "fcs".
HashMap installedRpm = analyzeVersionString(versionString);
if ( installedRpm.get("release") == null ) {
installedRpm = analyzeReleaseString(installedRpm, releaseString);
}
// System.err.println("Package: " + packageData.getPackageName());
// String outputString = "Installed RPM: ";
// if ( installedRpm.get("major") != null ) { outputString = outputString + " major: " + installedRpm.get("major").toString(); }
// else { outputString = outputString + " major is null"; }
// if ( installedRpm.get("minor") != null ) { outputString = outputString + " minor: " + installedRpm.get("minor").toString(); }
// else { outputString = outputString + " minor is null"; }
// if ( installedRpm.get("micro") != null ) { outputString = outputString + " micro: " + installedRpm.get("micro").toString(); }
// else { outputString = outputString + " micro is null"; }
// if ( installedRpm.get("release") != null ) { outputString = outputString + " release: " + installedRpm.get("release").toString(); }
// else { outputString = outputString + " release is null"; }
// System.err.println(outputString);
HashMap notInstalledRpm = analyzeVersionString(packageData.getPkgVersion());
if ( notInstalledRpm.get("release") == null ) {
notInstalledRpm = analyzeReleaseString(notInstalledRpm, packageData.getPkgRelease());
}
// outputString = "Not installed RPM: ";
// if ( notInstalledRpm.get("major") != null ) { outputString = outputString + " major: " + notInstalledRpm.get("major").toString(); }
// else { outputString = outputString + " major is null"; }
// if ( notInstalledRpm.get("minor") != null ) { outputString = outputString + " minor: " + notInstalledRpm.get("minor").toString(); }
// else { outputString = outputString + " minor is null"; }
// if ( notInstalledRpm.get("micro") != null ) { outputString = outputString + " micro: " + notInstalledRpm.get("micro").toString(); }
// else { outputString = outputString + " micro is null"; }
// if ( notInstalledRpm.get("release") != null ) { outputString = outputString + " release: " + notInstalledRpm.get("release").toString(); }
// else { outputString = outputString + " release is null"; }
// System.err.println(outputString);
boolean firstIsOlder = false;
if ( checkIfInstalledIsOlder ) {
firstIsOlder = compareTwoRpms(installedRpm, notInstalledRpm);
// System.err.println("Result: Installed RPM is older: " + firstIsOlder);
} else {
firstIsOlder = compareTwoRpms(notInstalledRpm, installedRpm);
// System.err.println("Result: Not installed RPM is older: " + firstIsOlder);
}
return firstIsOlder;
}
public void getLinuxPackageNamesFromRpmquery(PackageDescription packageData, InstallData installData) {
if ((packageData.getPackageName() != null) && ( ! packageData.getPackageName().equals(""))) {
boolean rpmExists = checkPackageExistence(packageData, installData);
if ( rpmExists ) {
// Full package name not defined in xpd file
if (( packageData.getFullPackageName() == null ) || ( packageData.getFullPackageName().equals(""))) {
// Now it is possible to query the rpm database for the packageName, if it is not defined in xpd file!
String fullPackageName = getPackageNameFromRpm(packageData, installData);
if ( fullPackageName != null ) {
packageData.setFullPackageName(fullPackageName);
} else {
System.err.println("Error: Linux package name not defined in xpd file and could not be determined: "
+ packageData.getPackageName());
}
}
packageData.setPkgExists(true);
} else {
packageData.setPkgExists(false);
}
}
for (Enumeration e = packageData.children(); e.hasMoreElements(); ) {
PackageDescription child = (PackageDescription) e.nextElement();
getLinuxPackageNamesFromRpmquery(child, installData);
}
}
public String getLinuxDatabasePath(InstallData data) {
String databasePath = null;
String installDir = data.getInstallDir();
String databaseDir = SystemManager.getParentDirectory(installDir);
String linuxDatabaseName = ".RPM_OFFICE_DATABASE";
File databaseFile = new File(databaseDir, linuxDatabaseName);
databasePath = databaseFile.getPath();
return databasePath;
}
public void getLinuxFileInfo(PackageDescription packageData) {
// analyzing a string like "openoffice-core01-2.0.3-159" as "name-version-release"
InstallData data = InstallData.getInstance();
if ( packageData.pkgExists() ) {
if (( packageData.getFullPackageName() != null ) && ( ! packageData.getFullPackageName().equals(""))) {
String longName = packageData.getFullPackageName();
int pos = longName.lastIndexOf("-");
if (data.isInstallationMode()) {
// not saving at uninstallation, because it can be updated without GUI installer
packageData.setPkgRelease(longName.substring(pos+1, longName.length()));
}
longName = longName.substring(0, pos);
pos = longName.lastIndexOf("-");
if (data.isInstallationMode()) {
// not saving at uninstallation, because it can be updated without GUI installer
packageData.setPkgVersion(longName.substring(pos+1, longName.length()));
}
packageData.setPkgRealName(longName.substring(0, pos));
}
}
for (Enumeration e = packageData.children(); e.hasMoreElements(); ) {
PackageDescription child = (PackageDescription) e.nextElement();
getLinuxFileInfo(child);
}
}
public void setFullPackageNameAtUninstall(PackageDescription packageData, HashMap packageNames) {
if (( packageData.getPackageName() != null ) && ( ! packageData.getPackageName().equals(""))) {
if (( packageData.getFullPackageName() == null ) || ( packageData.getFullPackageName().equals(""))) {
String packageName = packageData.getPackageName();
// Does this always exist? Should not be required!
// But is there another way to get the packageNames, without this file?
// During installation the packageNames can be determined by querying the rpm file
// -> this is not possible during uninstallation
String fullPackageName = (String) packageNames.get(packageName);
packageData.setFullPackageName(fullPackageName);
}
}
for (Enumeration e = packageData.children(); e.hasMoreElements(); ) {
PackageDescription child = (PackageDescription) e.nextElement();
setFullPackageNameAtUninstall(child, packageNames);
}
}
public String getRelocationString(PackageDescription packageData, String packageName) {
String relocationString = null;
if ( packageData.isRelocatable() ) {
// String rpmQuery = "rpm -qp --qf %{PREFIXES}=" + " " + packageName;
String[] rpmQueryArray = new String[5];
rpmQueryArray[0] = "rpm";
rpmQueryArray[1] = "-qp";
rpmQueryArray[2] = "--qf";
rpmQueryArray[3] = "%{PREFIXES}=";
rpmQueryArray[4] = packageName;
Vector returnVector = new Vector();
Vector returnErrorVector = new Vector();
int returnValue = ExecuteProcess.executeProcessReturnVector(rpmQueryArray, returnVector, returnErrorVector);
relocationString = (String) returnVector.get(0);
}
return relocationString;
}
public void createPackageNameFileAtPostinstall(InstallData data, PackageDescription packageData) {
// The file "packageNames" must not be an own database! It must be possible to install
// and deinstall RPMs without this GUI installer. Therefore the file packageNames is
// not always up to date. Nevertheless it makes the deinstallation faster, because of
// all packages, whose "real" package name is not defined in xpd files (for example
// "openoffice-core01-2.0.3-159.rpm" hat the "real" name "openoffice-core01" that is
// used for deinstallation) this can be read in this file. Otherwise it would be
// neccessary to determine the "real" name with a database question.
// The version and release that are also stored in file "packageNames" must not be
// used for deinstallation because they are probably not up to date.
String destDirStr = data.getInstallDir();
File uninstallDir = new File(destDirStr, data.getUninstallDirName());
String fileName = "packageNames";
File packageNamesFile = new File(uninstallDir, fileName);
Vector packageNames = new Vector();
getPackageNamesContent(packageData, packageNames);
SystemManager.saveCharFileVector(packageNamesFile.getPath(), packageNames);
}
public HashMap readPackageNamesFile() {
// package names are stored in file "packageNames" in data.getInfoRoot() directory
String fileName = "packageNames";
InstallData data = InstallData.getInstance();
File dir = data.getInfoRoot();
File file = new File(dir, fileName);
Vector fileContent = SystemManager.readCharFileVector(file.getPath());
HashMap map = Converter.convertVectorToHashmap(fileContent);
return map;
}
public void saveModulesLogFile(InstallData data) {
if ( data.logModuleStates() ) {
Vector logContent = LogManager.getModulesLogFile();
String destDirStr = data.getInstallDir();
File uninstallDir = new File(destDirStr, data.getUninstallDirName());
File modulesLogFile = new File(uninstallDir, "moduleSettingsLog.txt");
SystemManager.saveCharFileVector(modulesLogFile.getPath(), logContent);
}
}
public String fixInstallationDirectory(String installDir) {
// inject a second slash to the last path segment to avoid rpm 3 concatenation bug
int lastSlashPos = installDir.lastIndexOf('/');
String sub1 = installDir.substring(0,lastSlashPos);
String sub2 = installDir.substring(lastSlashPos);
String fixedInstallDir = sub1 + "/" + sub2;
// fixedInstallDir.replaceAll(" ", "%20");
return fixedInstallDir;
}
}
| javainstaller2/src/JavaSetup/org/openoffice/setup/InstallerHelper/LinuxHelper.java | package org.openoffice.setup.InstallerHelper;
import org.openoffice.setup.InstallData;
import org.openoffice.setup.SetupData.PackageDescription;
import org.openoffice.setup.Util.Converter;
import org.openoffice.setup.Util.ExecuteProcess;
import org.openoffice.setup.Util.LogManager;
import org.openoffice.setup.Util.SystemManager;
import java.io.File;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Vector;public class LinuxHelper {
public LinuxHelper() {
super();
}
private void getPackageNamesContent(PackageDescription packageData, Vector packageNames) {
if (( packageData.getPackageName() != null ) && ( ! packageData.getPackageName().equals(""))) {
packageNames.add(packageData.getPackageName() + "=" + packageData.getFullPackageName());
}
for (Enumeration e = packageData.children(); e.hasMoreElements(); ) {
PackageDescription child = (PackageDescription) e.nextElement();
getPackageNamesContent(child, packageNames);
}
}
private String getPackageNameFromRpm(String rpmFileName, InstallData installData) {
String fullPackageName = null;
String packagePath = installData.getPackagePath();
File rpmFile = new File(packagePath, rpmFileName);
if ( rpmFile.exists() ) {
String rpmCommand = "rpm -qp " + rpmFile.getPath();
String[] rpmCommandArray = new String[3];
rpmCommandArray[0] = "rpm";
rpmCommandArray[1] = "-qp";
rpmCommandArray[2] = rpmFile.getPath();
Vector returnVector = new Vector();
Vector returnErrorVector = new Vector();
int returnValue = ExecuteProcess.executeProcessReturnVector(rpmCommandArray, returnVector, returnErrorVector);
String returnString = (String) returnVector.get(0);
String log = rpmCommand + "<br><b>Returns: " + returnString + "</b><br>";
LogManager.addCommandsLogfileComment(log);
fullPackageName = returnString;
} else {
System.err.println("Error: Could not find file " + rpmFile.getPath());
}
return fullPackageName;
}
private boolean checkPackageExistence(String rpmFileName, InstallData installData) {
boolean fileExists = false;
String packagePath = installData.getPackagePath();
File rpmFile = new File(packagePath, rpmFileName);
if ( rpmFile.exists() ) {
fileExists = true;
}
return fileExists;
}
private HashMap analyzeVersionString(String versionString) {
// System.err.println("Version String: " + versionString);
boolean errorOccured = false;
Integer micro = null;
Integer minor = null;
Integer major = null;
Integer release = null;
String microString = null;
String minorString = null;
String majorString = null;
String releaseString = null;
int pos = versionString.lastIndexOf("_"); // this is a jre RPM (1.5.0_06)
if ( pos > -1 ) {
try {
releaseString = versionString.substring(pos+1, versionString.length());
versionString = versionString.substring(0, pos);
} catch (IndexOutOfBoundsException ex) {
System.err.println("Error: Could not get substring from " + versionString);
errorOccured = true;
}
try {
int releaseInt = Integer.parseInt(releaseString);
release = new Integer(releaseInt);
} catch (NumberFormatException ex) {
System.err.println("Error: Could not convert " + releaseString + " to integer");
errorOccured = true;
}
}
// the standard analyzing mechanism
pos = versionString.lastIndexOf("."); // returns "-1", if not found
if ( pos > -1 )
{
try {
microString = versionString.substring(pos+1, versionString.length());
versionString = versionString.substring(0, pos);
} catch (IndexOutOfBoundsException ex) {
System.err.println("Error: Could not get substring from " + versionString);
errorOccured = true;
}
pos = versionString.lastIndexOf(".");
if ( pos > -1 ) {
try {
minorString = versionString.substring(pos+1, versionString.length());
majorString = versionString.substring(0, pos);
} catch (IndexOutOfBoundsException ex) {
System.err.println("Error: Could not get substring from " + versionString);
errorOccured = true;
}
try {
int microInt = Integer.parseInt(microString);
int minorInt = Integer.parseInt(minorString);
int majorInt = Integer.parseInt(majorString);
micro = new Integer(microInt);
minor = new Integer(minorInt);
major = new Integer(majorInt);
} catch (NumberFormatException ex) {
System.err.println("Error: Could not convert " + microString + "," +
minorString + " or " + majorString + " to integer");
errorOccured = true;
}
}
}
if ( errorOccured ) {
micro = null;
minor = null;
major = null;
release = null;
}
HashMap hashRpm = new HashMap();
hashRpm.put("micro", micro);
hashRpm.put("minor", minor);
hashRpm.put("major", major);
hashRpm.put("release", release);
// If one of this values is "null", procedure "compareTwoRpms" always delivers false.
// This means, that the installed package is not older.
// System.err.println("Analyzed: " + "micro: " + hashRpm.get("micro").toString() + " minor: " + hashRpm.get("minor").toString() + " major: " + hashRpm.get("major").toString());
return hashRpm;
}
private HashMap analyzeReleaseString(HashMap hashRpm, String releaseString) {
int release;
try {
release = Integer.parseInt(releaseString);
Integer releaseObj = new Integer(release);
hashRpm.put("release", releaseObj);
}
catch (NumberFormatException ex) {
System.err.println("Error: Could not convert " + releaseString + " to integer");
}
return hashRpm;
}
private boolean compareTwoRpms(HashMap hash1, HashMap hash2) {
boolean hash1IsOlder = false;
if ( (! hash1.containsValue(null) ) && (! hash2.containsValue(null) )) {
if ( ((Integer)hash1.get("major")).intValue() < ((Integer)hash2.get("major")).intValue() ) {
hash1IsOlder = true;
} else {
if ( ((Integer)hash1.get("minor")).intValue() < ((Integer)hash2.get("minor")).intValue() ) {
hash1IsOlder = true;
} else {
if ( ((Integer)hash1.get("micro")).intValue() < ((Integer)hash2.get("micro")).intValue() ) {
hash1IsOlder = true;
} else {
if ( ((Integer)hash1.get("release")).intValue() < ((Integer)hash2.get("release")).intValue() ) {
hash1IsOlder = true;
}
}
}
}
}
return hash1IsOlder;
}
public boolean compareVersionAndRelease(String versionString, String releaseString, PackageDescription packageData, boolean checkIfInstalledIsOlder) {
// version and release are gotten from the rpm database. packageData contains
// the information about the rpm, that shall be installed. It has to be installed,
// if the installed product defined by version and release is older.
// version is something like "2.0.3", release something like "164".
// An exception is the jre package, where version is "1.5.0_06" and release "fcs".
HashMap installedRpm = analyzeVersionString(versionString);
if ( installedRpm.get("release") == null ) {
installedRpm = analyzeReleaseString(installedRpm, releaseString);
}
// System.err.println("Package: " + packageData.getPackageName());
// System.err.println("Installed RPM: " + "major: " + installedRpm.get("major").toString() +
// " minor: " + installedRpm.get("minor").toString() +
// " micro: " + installedRpm.get("micro").toString() +
// " release: " + installedRpm.get("release").toString());
HashMap notInstalledRpm = analyzeVersionString(packageData.getPkgVersion());
if ( notInstalledRpm.get("release") == null ) {
notInstalledRpm = analyzeReleaseString(notInstalledRpm, packageData.getPkgRelease());
}
// System.err.println("Not installed RPM: " + "major: " + notInstalledRpm.get("major").toString() +
// " minor: " + notInstalledRpm.get("minor").toString() +
// " micro: " + notInstalledRpm.get("micro").toString() +
// " release: " + notInstalledRpm.get("release").toString());
boolean firstIsOlder = false;
if ( checkIfInstalledIsOlder ) {
firstIsOlder = compareTwoRpms(installedRpm, notInstalledRpm);
// System.err.println("Result: Installed RPM is older: " + firstIsOlder);
} else {
firstIsOlder = compareTwoRpms(notInstalledRpm, installedRpm);
// System.err.println("Result: Not installed RPM is older: " + firstIsOlder);
}
return firstIsOlder;
}
public void getLinuxPackageNamesFromRpmquery(PackageDescription packageData, InstallData installData) {
if ((packageData.getPackageName() != null) && ( ! packageData.getPackageName().equals(""))) {
boolean rpmExists = checkPackageExistence(packageData.getPackageName(), installData);
if ( rpmExists ) {
// Full package name not defined in xpd file
if (( packageData.getFullPackageName() == null ) || ( packageData.getFullPackageName().equals(""))) {
// Now it is possible to query the rpm database for the packageName, if it is not defined in xpd file!
String fullPackageName = getPackageNameFromRpm(packageData.getPackageName(), installData);
if ( fullPackageName != null ) {
packageData.setFullPackageName(fullPackageName);
} else {
System.err.println("Error: Linux package name not defined in xpd file and could not be determined: "
+ packageData.getPackageName());
}
}
packageData.setPkgExists(true);
} else {
packageData.setPkgExists(false);
}
}
for (Enumeration e = packageData.children(); e.hasMoreElements(); ) {
PackageDescription child = (PackageDescription) e.nextElement();
getLinuxPackageNamesFromRpmquery(child, installData);
}
}
public String getLinuxDatabasePath(InstallData data) {
String databasePath = null;
String installDir = data.getInstallDir();
String databaseDir = SystemManager.getParentDirectory(installDir);
String linuxDatabaseName = ".RPM_OFFICE_DATABASE";
File databaseFile = new File(databaseDir, linuxDatabaseName);
databasePath = databaseFile.getPath();
return databasePath;
}
public void getLinuxFileInfo(PackageDescription packageData) {
// analyzing a string like "openoffice-core01-2.0.3-159" as "name-version-release"
InstallData data = InstallData.getInstance();
if ( packageData.pkgExists() ) {
if (( packageData.getFullPackageName() != null ) && ( ! packageData.getFullPackageName().equals(""))) {
String longName = packageData.getFullPackageName();
int pos = longName.lastIndexOf("-");
if (data.isInstallationMode()) {
// not saving at uninstallation, because it can be updated without GUI installer
packageData.setPkgRelease(longName.substring(pos+1, longName.length()));
}
longName = longName.substring(0, pos);
pos = longName.lastIndexOf("-");
if (data.isInstallationMode()) {
// not saving at uninstallation, because it can be updated without GUI installer
packageData.setPkgVersion(longName.substring(pos+1, longName.length()));
}
packageData.setPkgRealName(longName.substring(0, pos));
}
}
for (Enumeration e = packageData.children(); e.hasMoreElements(); ) {
PackageDescription child = (PackageDescription) e.nextElement();
getLinuxFileInfo(child);
}
}
public void setFullPackageNameAtUninstall(PackageDescription packageData, HashMap packageNames) {
if (( packageData.getPackageName() != null ) && ( ! packageData.getPackageName().equals(""))) {
if (( packageData.getFullPackageName() == null ) || ( packageData.getFullPackageName().equals(""))) {
String packageName = packageData.getPackageName();
// Does this always exist? Should not be required!
// But is there another way to get the packageNames, without this file?
// During installation the packageNames can be determined by querying the rpm file
// -> this is not possible during uninstallation
String fullPackageName = (String) packageNames.get(packageName);
packageData.setFullPackageName(fullPackageName);
}
}
for (Enumeration e = packageData.children(); e.hasMoreElements(); ) {
PackageDescription child = (PackageDescription) e.nextElement();
setFullPackageNameAtUninstall(child, packageNames);
}
}
public String getRelocationString(PackageDescription packageData, String packageName) {
String relocationString = null;
if ( packageData.isRelocatable() ) {
// String rpmQuery = "rpm -qp --qf %{PREFIXES}=" + " " + packageName;
String[] rpmQueryArray = new String[5];
rpmQueryArray[0] = "rpm";
rpmQueryArray[1] = "-qp";
rpmQueryArray[2] = "--qf";
rpmQueryArray[3] = "%{PREFIXES}=";
rpmQueryArray[4] = packageName;
Vector returnVector = new Vector();
Vector returnErrorVector = new Vector();
int returnValue = ExecuteProcess.executeProcessReturnVector(rpmQueryArray, returnVector, returnErrorVector);
relocationString = (String) returnVector.get(0);
}
return relocationString;
}
public void createPackageNameFileAtPostinstall(InstallData data, PackageDescription packageData) {
// The file "packageNames" must not be an own database! It must be possible to install
// and deinstall RPMs without this GUI installer. Therefore the file packageNames is
// not always up to date. Nevertheless it makes the deinstallation faster, because of
// all packages, whose "real" package name is not defined in xpd files (for example
// "openoffice-core01-2.0.3-159.rpm" hat the "real" name "openoffice-core01" that is
// used for deinstallation) this can be read in this file. Otherwise it would be
// neccessary to determine the "real" name with a database question.
// The version and release that are also stored in file "packageNames" must not be
// used for deinstallation because they are probably not up to date.
String destDirStr = data.getInstallDir();
File uninstallDir = new File(destDirStr, data.getUninstallDirName());
String fileName = "packageNames";
File packageNamesFile = new File(uninstallDir, fileName);
Vector packageNames = new Vector();
getPackageNamesContent(packageData, packageNames);
SystemManager.saveCharFileVector(packageNamesFile.getPath(), packageNames);
}
public HashMap readPackageNamesFile() {
// package names are stored in file "packageNames" in data.getInfoRoot() directory
String fileName = "packageNames";
InstallData data = InstallData.getInstance();
File dir = data.getInfoRoot();
File file = new File(dir, fileName);
Vector fileContent = SystemManager.readCharFileVector(file.getPath());
HashMap map = Converter.convertVectorToHashmap(fileContent);
return map;
}
public void saveModulesLogFile(InstallData data) {
if ( data.logModuleStates() ) {
Vector logContent = LogManager.getModulesLogFile();
String destDirStr = data.getInstallDir();
File uninstallDir = new File(destDirStr, data.getUninstallDirName());
File modulesLogFile = new File(uninstallDir, "moduleSettingsLog.txt");
SystemManager.saveCharFileVector(modulesLogFile.getPath(), logContent);
}
}
public String fixInstallationDirectory(String installDir) {
// inject a second slash to the last path segment to avoid rpm 3 concatenation bug
int lastSlashPos = installDir.lastIndexOf('/');
String sub1 = installDir.substring(0,lastSlashPos);
String sub2 = installDir.substring(lastSlashPos);
String fixedInstallDir = sub1 + "/" + sub2;
// fixedInstallDir.replaceAll(" ", "%20");
return fixedInstallDir;
}
}
| INTEGRATION: CWS native87 (1.1.10); FILE MERGED
2007/06/01 10:56:33 is 1.1.10.2: #i65425# new Java gui installer
2007/04/24 10:09:23 is 1.1.10.1: #i65425# adding header
| javainstaller2/src/JavaSetup/org/openoffice/setup/InstallerHelper/LinuxHelper.java | INTEGRATION: CWS native87 (1.1.10); FILE MERGED 2007/06/01 10:56:33 is 1.1.10.2: #i65425# new Java gui installer 2007/04/24 10:09:23 is 1.1.10.1: #i65425# adding header |
|
Java | agpl-3.0 | 407b4aa70440a789f2beb3a00fe56d4ca06059eb | 0 | hobrasoft-cz/PDFMU,hobrasoft-cz/PDFMU | package cz.hobrasoft.pdfmu.operation.version;
import com.itextpdf.text.pdf.PdfReader;
import cz.hobrasoft.pdfmu.jackson.VersionSet;
import cz.hobrasoft.pdfmu.operation.Operation;
import cz.hobrasoft.pdfmu.operation.OperationCommon;
import cz.hobrasoft.pdfmu.operation.OperationException;
import cz.hobrasoft.pdfmu.operation.args.InOutPdfArgs;
import cz.hobrasoft.pdfmu.operation.args.InPdfArgs;
import cz.hobrasoft.pdfmu.operation.args.OutPdfArgs;
import java.util.logging.Logger;
import net.sourceforge.argparse4j.impl.Arguments;
import net.sourceforge.argparse4j.inf.Namespace;
import net.sourceforge.argparse4j.inf.Subparser;
/**
* Sets the PDF version of a PDF document
*
* @author <a href="mailto:[email protected]">Filip Bartek</a>
*/
public class OperationVersionSet extends OperationCommon {
private static final Logger logger = Logger.getLogger(OperationVersionSet.class.getName());
private final InOutPdfArgs inout = new InOutPdfArgs(false);
@Override
public Subparser configureSubparser(Subparser subparser) {
String help = "Set PDF version of a PDF document";
// Configure the subparser
subparser.help(help)
.description(help)
.defaultHelp(true)
.setDefault("command", OperationVersionSet.class);
inout.addArguments(subparser);
String metavarVersion = "VERSION";
subparser.addArgument("-v", "--version")
.help(String.format("set PDF version to %s", metavarVersion))
.metavar(metavarVersion)
.type(PdfVersion.class)
.setDefault(new PdfVersion("1.6"));
subparser.addArgument("--only-if-lower")
.help(String.format("only set version if the current version is lower than %s", metavarVersion))
.type(boolean.class)
.action(Arguments.storeTrue());
return subparser;
}
@Override
public void execute(Namespace namespace) throws OperationException {
inout.setFromNamespace(namespace);
PdfVersion outVersion = namespace.get("version");
boolean onlyIfLower = namespace.get("only_if_lower");
VersionSet result = execute(inout, outVersion, onlyIfLower);
writeResult(result);
}
private static VersionSet execute(InOutPdfArgs inout, PdfVersion outVersion, boolean onlyIfLower) throws OperationException {
InPdfArgs in = inout.getIn();
OutPdfArgs out = inout.getOut();
return execute(in, out, outVersion, onlyIfLower);
}
private static VersionSet execute(InPdfArgs in, OutPdfArgs out, PdfVersion outVersion, boolean onlyIfLower) throws OperationException {
in.open();
PdfReader pdfReader = in.getPdfReader();
// Fetch the PDF version of the input PDF document
PdfVersion inVersion = new PdfVersion(pdfReader.getPdfVersion());
logger.info(String.format("Input PDF document version: %s", inVersion));
// Commence to set the PDF version of the output PDF document
// Determine the desired PDF version
assert outVersion != null; // The argument "version" has a default value
logger.info(String.format("Desired output PDF version: %s", outVersion));
boolean set = true;
if (outVersion.compareTo(inVersion) <= 0) {
// The desired version is lower than the current version.
if (onlyIfLower) {
set = false;
logger.info("The input PDF version is not lower than the desired version. No modification will be performed.");
} else {
logger.warning("Setting the PDF version to a lower value.");
}
}
if (set) {
out.open(pdfReader, false, outVersion.toChar());
out.close();
}
in.close();
return new VersionSet(inVersion.toString(), outVersion.toString(), set);
}
private static Operation instance = null;
public static Operation getInstance() {
if (instance == null) {
instance = new OperationVersionSet();
}
return instance;
}
private OperationVersionSet() {
// Singleton
}
}
| pdfmu/src/main/java/cz/hobrasoft/pdfmu/operation/version/OperationVersionSet.java | package cz.hobrasoft.pdfmu.operation.version;
import com.itextpdf.text.pdf.PdfReader;
import cz.hobrasoft.pdfmu.jackson.VersionSet;
import cz.hobrasoft.pdfmu.operation.Operation;
import cz.hobrasoft.pdfmu.operation.OperationCommon;
import cz.hobrasoft.pdfmu.operation.OperationException;
import cz.hobrasoft.pdfmu.operation.args.InOutPdfArgs;
import cz.hobrasoft.pdfmu.operation.args.InPdfArgs;
import cz.hobrasoft.pdfmu.operation.args.OutPdfArgs;
import java.util.logging.Logger;
import net.sourceforge.argparse4j.impl.Arguments;
import net.sourceforge.argparse4j.inf.Namespace;
import net.sourceforge.argparse4j.inf.Subparser;
/**
* Sets the PDF version of a PDF document
*
* @author <a href="mailto:[email protected]">Filip Bartek</a>
*/
public class OperationVersionSet extends OperationCommon {
private static final Logger logger = Logger.getLogger(OperationVersionSet.class.getName());
private final InOutPdfArgs inout = new InOutPdfArgs();
@Override
public Subparser configureSubparser(Subparser subparser) {
String help = "Set PDF version of a PDF document";
// Configure the subparser
subparser.help(help)
.description(help)
.defaultHelp(true)
.setDefault("command", OperationVersionSet.class);
inout.addArguments(subparser);
String metavarVersion = "VERSION";
subparser.addArgument("-v", "--version")
.help(String.format("set PDF version to %s", metavarVersion))
.metavar(metavarVersion)
.type(PdfVersion.class)
.setDefault(new PdfVersion("1.6"));
subparser.addArgument("--only-if-lower")
.help(String.format("only set version if the current version is lower than %s", metavarVersion))
.type(boolean.class)
.action(Arguments.storeTrue());
return subparser;
}
@Override
public void execute(Namespace namespace) throws OperationException {
inout.setFromNamespace(namespace);
PdfVersion outVersion = namespace.get("version");
boolean onlyIfLower = namespace.get("only_if_lower");
VersionSet result = execute(inout, outVersion, onlyIfLower);
writeResult(result);
}
private static VersionSet execute(InOutPdfArgs inout, PdfVersion outVersion, boolean onlyIfLower) throws OperationException {
InPdfArgs in = inout.getIn();
OutPdfArgs out = inout.getOut();
return execute(in, out, outVersion, onlyIfLower);
}
private static VersionSet execute(InPdfArgs in, OutPdfArgs out, PdfVersion outVersion, boolean onlyIfLower) throws OperationException {
in.open();
PdfReader pdfReader = in.getPdfReader();
// Fetch the PDF version of the input PDF document
PdfVersion inVersion = new PdfVersion(pdfReader.getPdfVersion());
logger.info(String.format("Input PDF document version: %s", inVersion));
// Commence to set the PDF version of the output PDF document
// Determine the desired PDF version
assert outVersion != null; // The argument "version" has a default value
logger.info(String.format("Desired output PDF version: %s", outVersion));
boolean set = true;
if (outVersion.compareTo(inVersion) <= 0) {
// The desired version is lower than the current version.
if (onlyIfLower) {
set = false;
logger.info("The input PDF version is not lower than the desired version. No modification will be performed.");
} else {
logger.warning("Setting the PDF version to a lower value.");
}
}
if (set) {
out.open(pdfReader, false, outVersion.toChar());
out.close();
}
in.close();
return new VersionSet(inVersion.toString(), outVersion.toString(), set);
}
private static Operation instance = null;
public static Operation getInstance() {
if (instance == null) {
instance = new OperationVersionSet();
}
return instance;
}
private OperationVersionSet() {
// Singleton
}
}
| OperationVersionSet: Disable --append option (enforce value false)
`version set` with `--append=true` does not work.
| pdfmu/src/main/java/cz/hobrasoft/pdfmu/operation/version/OperationVersionSet.java | OperationVersionSet: Disable --append option (enforce value false) |
|
Java | lgpl-2.1 | 876666c45db5ab6f66894750fbee80eb23215288 | 0 | xwiki/xwiki-enterprise,xwiki/xwiki-enterprise | /*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package com.xpn.xwiki.it.xhtml;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* Used to overwrite the the default entity resolver and get the entities from the resources.
*
* @version $Id$
*/
class ResourcesEntityResolver implements EntityResolver
{
private static final String ENTITIES_ROOT = "/entities";
/**
* {@inheritDoc}
*
* @see org.xml.sax.EntityResolver#resolveEntity(java.lang.String, java.lang.String)
*/
public InputSource resolveEntity(String publicId, String systemId) throws SAXException, IOException
{
int index = systemId.lastIndexOf("/");
String dtd = ENTITIES_ROOT + (index != -1 ? systemId.substring(index) : "/" + systemId);
InputStream stream = ResourcesEntityResolver.class.getResourceAsStream(dtd);
return new InputSource(new InputStreamReader(stream));
}
}
| distribution-test/misc-tests/src/test/it/com/xpn/xwiki/it/xhtml/ResourcesEntityResolver.java | /*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package com.xpn.xwiki.it.xhtml;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* Used to overwrite the the default entity resolver and get the entities from the resources.
*
* @version $Id$
*/
class ResourcesEntityResolver implements EntityResolver
{
private static final String ENTITIES_ROOT = "/entities";
/**
* {@inheritDoc}
*
* @see org.xml.sax.EntityResolver#resolveEntity(java.lang.String, java.lang.String)
*/
public InputSource resolveEntity(String publicId, String systemId) throws SAXException, IOException
{
int index = systemId.lastIndexOf("/");
String dtd = ENTITIES_ROOT + "/" + (index != -1 ? systemId.substring(index) : systemId);
InputStream stream = ResourcesEntityResolver.class.getResourceAsStream(dtd);
return new InputSource(new InputStreamReader(stream));
}
}
| [misc] Fix tests
git-svn-id: cf27bad30c6b7316185bdac65b014e8c16cd40b6@24531 f329d543-caf0-0310-9063-dda96c69346f
| distribution-test/misc-tests/src/test/it/com/xpn/xwiki/it/xhtml/ResourcesEntityResolver.java | [misc] Fix tests |
|
Java | lgpl-2.1 | bb6770930da72b4bb96f014ba2cb79dcdab7759f | 0 | shuiblue/JDimeForCpp,shuiblue/JDimeForCpp,shuiblue/JDimeForCpp | package de.fosd.jdime.gui;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.beans.binding.BooleanBinding;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.property.SimpleListProperty;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.concurrent.Task;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Insets;
import javafx.scene.Node;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.CornerRadii;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javafx.stage.Window;
import de.uni_passau.fim.seibt.kvconfig.Config;
import de.uni_passau.fim.seibt.kvconfig.PropFileConfigSource;
import de.uni_passau.fim.seibt.kvconfig.SysEnvConfigSource;
/**
* A simple JavaFX GUI for JDime.
*/
@SuppressWarnings("unused")
public final class GUI extends Application {
private static final Logger LOG = Logger.getLogger(GUI.class.getCanonicalName());
private static final String TITLE = "JDime";
private static final String JDIME_CONF_FILE = "JDime.properties";
private static final String JDIME_DEFAULT_ARGS_KEY = "DEFAULT_ARGS";
private static final String JDIME_DEFAULT_LEFT_KEY = "DEFAULT_LEFT";
private static final String JDIME_DEFAULT_BASE_KEY = "DEFAULT_BASE";
private static final String JDIME_DEFAULT_RIGHT_KEY = "DEFAULT_RIGHT";
private static final String JDIME_EXEC_KEY = "JDIME_EXEC";
private static final String JDIME_ALLOW_INVALID_KEY = "ALLOW_INVALID";
private static final String JDIME_BUFFERED_LINES = "BUFFERED_LINES";
private static final String JVM_DEBUG_PARAMS = "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005";
private static final String STARTSCRIPT_JVM_ENV_VAR = "JAVA_OPTS";
private static final Pattern DUMP_GRAPH = Pattern.compile(".*-mode\\s+dumpgraph.*");
@FXML
ListView<String> output;
@FXML
TextField left;
@FXML
TextField base;
@FXML
TextField right;
@FXML
TextField jDime;
@FXML
TextField cmdArgs;
@FXML
CheckBox debugMode;
@FXML
TabPane tabPane;
@FXML
Tab outputTab;
@FXML
private StackPane cancelPane;
@FXML
private GridPane controlsPane;
@FXML
private Button historyPrevious;
@FXML
private Button historyNext;
private Config config;
private int bufferedLines;
private boolean allowInvalid;
private File lastChooseDir;
private List<TextField> textFields;
private IntegerProperty historyIndex;
private ObservableList<State> history;
private State inProgress;
private Task<Void> jDimeExec;
private Process jDimeProcess;
/**
* Launches the GUI with the given <code>args</code>.
*
* @param args
* the command line arguments
*/
public static void main(String[] args) {
launch(args);
}
@Override
public void start(Stage primaryStage) throws Exception {
FXMLLoader loader = new FXMLLoader(getClass().getResource(getClass().getSimpleName() + ".fxml"));
loader.setController(this);
Parent root = loader.load();
Scene scene = new Scene(root);
textFields = Arrays.asList(left, base, right, jDime, cmdArgs);
historyIndex = new SimpleIntegerProperty(0);
history = FXCollections.observableArrayList();
config = new Config();
config.addSource(new SysEnvConfigSource());
loadConfigFile();
loadDefaults();
SimpleListProperty<State> historyListProp = new SimpleListProperty<>(history);
BooleanBinding noPrev = historyListProp.emptyProperty().or(historyIndex.isEqualTo(0));
BooleanBinding noNext = historyListProp.emptyProperty().or(historyIndex.greaterThanOrEqualTo(historyListProp.sizeProperty()));
historyNext.disableProperty().bind(noNext);
historyPrevious.disableProperty().bind(noPrev);
primaryStage.setTitle(TITLE);
primaryStage.setScene(scene);
primaryStage.show();
}
/**
* Loads default values for the <code>TextField</code>s from the config file.
*/
private void loadDefaults() {
config.get(JDIME_EXEC_KEY).ifPresent(s -> jDime.setText(s.trim()));
config.get(JDIME_DEFAULT_ARGS_KEY).ifPresent(s -> cmdArgs.setText(s.trim()));
config.get(JDIME_DEFAULT_LEFT_KEY).ifPresent(left::setText);
config.get(JDIME_DEFAULT_BASE_KEY).ifPresent(base::setText);
config.get(JDIME_DEFAULT_RIGHT_KEY).ifPresent(right::setText);
bufferedLines = config.getInteger(JDIME_BUFFERED_LINES).orElse(100);
allowInvalid = config.getBoolean(JDIME_ALLOW_INVALID_KEY).orElse(false);
}
/**
* Checks whether the current working directory contains a file called {@value #JDIME_CONF_FILE} and if so adds
* a <code>PropFileConfigSource</code> to <code>config</code>.
*/
private void loadConfigFile() {
File configFile = new File(JDIME_CONF_FILE);
if (configFile.exists()) {
try {
config.addSource(new PropFileConfigSource(configFile));
} catch (IOException e) {
LOG.log(Level.WARNING, e, () -> "Could not load " + configFile);
}
}
}
/**
* Shows a <code>FileChooser</code> and returns the chosen <code>File</code>. Sets <code>lastChooseDir</code>
* to the parent file of the returned <code>File</code>.
*
* @param event
* the <code>ActionEvent</code> that occurred in the action listener
*
* @return the chosen <code>File</code> or <code>null</code> if the dialog was closed
*/
private File getChosenFile(ActionEvent event) {
FileChooser chooser = new FileChooser();
Window window = ((Node) event.getTarget()).getScene().getWindow();
if (lastChooseDir != null && lastChooseDir.isDirectory()) {
chooser.setInitialDirectory(lastChooseDir);
}
return chooser.showOpenDialog(window);
}
/**
* Called when the 'Choose' button for the left file is clicked.
*
* @param event
* the <code>ActionEvent</code> that occurred
*/
public void chooseLeft(ActionEvent event) {
File leftArtifact = getChosenFile(event);
if (leftArtifact != null) {
lastChooseDir = leftArtifact.getParentFile();
left.setText(leftArtifact.getAbsolutePath());
}
}
/**
* Called when the 'Choose' button for the base file is clicked.
*
* @param event
* the <code>ActionEvent</code> that occurred
*/
public void chooseBase(ActionEvent event) {
File baseArtifact = getChosenFile(event);
if (baseArtifact != null) {
lastChooseDir = baseArtifact.getParentFile();
base.setText(baseArtifact.getAbsolutePath());
}
}
/**
* Called when the 'Choose' button for the right file is clicked.
*
* @param event
* the <code>ActionEvent</code> that occurred
*/
public void chooseRight(ActionEvent event) {
File rightArtifact = getChosenFile(event);
if (rightArtifact != null) {
lastChooseDir = rightArtifact.getParentFile();
right.setText(rightArtifact.getAbsolutePath());
}
}
/**
* Called when the 'Choose' button for the JDime executable is clicked.
*
* @param event
* the <code>ActionEvent</code> that occurred
*/
public void chooseJDime(ActionEvent event) {
File jDimeBinary = getChosenFile(event);
if (jDimeBinary != null) {
lastChooseDir = jDimeBinary.getParentFile();
jDime.setText(jDimeBinary.getAbsolutePath());
}
}
/**
* Called when the '{@literal >}' button for the history is clicked.
*/
public void historyNext() {
historyIndex.setValue(historyIndex.get() + 1);
if (historyIndex.get() == history.size()) {
inProgress.applyTo(this);
} else {
history.get(historyIndex.get()).applyTo(this);
}
}
/**
* Called when the '{@literal <}' button for the history is clicked.
*/
public void historyPrevious() {
if (historyIndex.get() == history.size()) {
inProgress = State.of(this);
}
historyIndex.setValue(historyIndex.get() - 1);
history.get(historyIndex.get()).applyTo(this);
}
/**
* Called when the 'Cancel' button is clicked.
*/
public void cancelClicked() throws InterruptedException {
jDimeProcess.destroyForcibly().waitFor();
jDimeExec.cancel(true);
}
/**
* Called when the 'Run' button is clicked.
*/
public void runClicked() {
boolean valid = textFields.stream().allMatch(tf -> {
if (tf == cmdArgs) {
return true;
}
if (tf == base) {
return tf.getText().trim().isEmpty() || new File(tf.getText()).exists();
}
return new File(tf.getText()).exists();
});
if (!valid && !config.getBoolean(JDIME_ALLOW_INVALID_KEY).orElse(false)) {
return;
}
jDimeExec = new Task<Void>() {
@Override
protected Void call() throws Exception {
ProcessBuilder builder = new ProcessBuilder();
List<String> command = new ArrayList<>();
String input;
input = jDime.getText().trim();
if (!input.isEmpty()) {
command.add(input);
}
List<String> args = Arrays.asList(cmdArgs.getText().trim().split("\\s+"));
if (!args.isEmpty()) {
command.addAll(args);
}
input = left.getText().trim();
if (!input.isEmpty()) {
command.add(input);
}
input = base.getText().trim();
if (!input.isEmpty()) {
command.add(input);
}
input = right.getText().trim();
if (!input.isEmpty()) {
command.add(input);
}
builder.command(command);
builder.redirectErrorStream(true);
File workingDir = new File(jDime.getText()).getParentFile();
if (workingDir != null && workingDir.exists()) {
builder.directory(workingDir);
}
if (debugMode.isSelected()) {
builder.environment().put(STARTSCRIPT_JVM_ENV_VAR, JVM_DEBUG_PARAMS);
}
jDimeProcess = builder.start();
Charset cs = StandardCharsets.UTF_8;
try (BufferedReader r = new BufferedReader(new InputStreamReader(jDimeProcess.getInputStream(), cs))) {
List<String> lines = new ArrayList<>(bufferedLines);
boolean stop = false;
String line;
while (!Thread.interrupted() && !stop) {
if (r.ready()) {
if ((line = r.readLine()) != null) {
if (lines.size() < bufferedLines) {
lines.add(line);
} else {
List<String> toAdd = new ArrayList<>(lines);
Platform.runLater(() -> output.getItems().addAll(toAdd));
lines.clear();
}
} else {
stop = true;
}
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
stop = true;
}
}
Platform.runLater(() -> output.getItems().addAll(lines));
}
try {
jDimeProcess.waitFor();
} catch (InterruptedException ignored) {
jDimeProcess.destroyForcibly();
}
return null;
}
};
jDimeExec.setOnRunning(event -> {
controlsPane.setDisable(true);
cancelPane.setVisible(true);
});
jDimeExec.setOnSucceeded(event -> {
boolean dumpGraph = DUMP_GRAPH.matcher(cmdArgs.getText()).matches();
tabPane.getTabs().retainAll(outputTab);
if (dumpGraph) {
GraphvizParser parser = new GraphvizParser(output.getItems());
parser.setOnSucceeded(roots -> {
addTabs(parser.getValue());
reactivate();
});
parser.setOnFailed(event1 -> {
LOG.log(Level.WARNING, event1.getSource().getException(), () -> "Graphviz parsing failed.");
reactivate();
});
new Thread(parser).start();
} else {
reactivate();
}
});
jDimeExec.setOnCancelled(event -> {
reactivate();
});
jDimeExec.setOnFailed(event -> {
LOG.log(Level.WARNING, event.getSource().getException(), () -> "JDime execution failed.");
reactivate();
});
output.setItems(FXCollections.observableArrayList());
Thread jDimeT = new Thread(jDimeExec);
jDimeT.setName("JDime Task Thread");
jDimeT.start();
}
/**
* Saves the current state of the GUI to the history and then reactivates the user controls.
*/
private void reactivate() {
State currentState = State.of(GUI.this);
if (history.isEmpty() || !history.get(history.size() - 1).equals(currentState)) {
history.add(currentState);
historyIndex.setValue(history.size());
}
cancelPane.setVisible(false);
controlsPane.setDisable(false);
}
/**
* Adds <code>Tab</code>s containing <code>TreeTableView</code>s for every <code>TreeDumpNode</code> root in the
* given <code>List</code>.
*
* @param roots
* the roots of the trees to display
*/
private void addTabs(List<TreeItem<TreeDumpNode>> roots) {
roots.forEach(root -> tabPane.getTabs().add(getTreeTableViewTab(root)));
}
/**
* Returns a <code>Tab</code> containing a <code>TreeTableView</code> displaying the with the given
* <code>root</code>.
*
* @param root
* the root of the tree to display
* @return a <code>Tab</code> containing the tree
*/
private Tab getTreeTableViewTab(TreeItem<TreeDumpNode> root) {
TreeTableView<TreeDumpNode> tableView = new TreeTableView<>(root);
TreeTableColumn<TreeDumpNode, String> id = new TreeTableColumn<>("ID");
TreeTableColumn<TreeDumpNode, String> label = new TreeTableColumn<>("AST Type");
tableView.setRowFactory(param -> {
TreeTableRow<TreeDumpNode> row = new TreeTableRow<>();
TreeDumpNode node = row.getItem();
if (node == null) {
return row;
}
String color = node.getFillColor();
if (color != null) {
try {
BackgroundFill fill = new BackgroundFill(Color.valueOf(color), CornerRadii.EMPTY, Insets.EMPTY);
row.setBackground(new Background(fill));
} catch (IllegalArgumentException e) {
LOG.fine(() -> String.format("Could not convert '%s' to a JavaFX Color.", color));
}
}
return row;
});
id.setCellValueFactory(param -> param.getValue().getValue().idProperty());
label.setCellValueFactory(param -> param.getValue().getValue().labelProperty());
tableView.getColumns().setAll(Arrays.asList(label, id));
return new Tab("Tree View", tableView);
}
}
| src/de/fosd/jdime/gui/GUI.java | package de.fosd.jdime.gui;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import de.uni_passau.fim.seibt.kvconfig.Config;
import de.uni_passau.fim.seibt.kvconfig.PropFileConfigSource;
import de.uni_passau.fim.seibt.kvconfig.SysEnvConfigSource;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.beans.binding.BooleanBinding;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.property.SimpleListProperty;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.concurrent.Task;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Insets;
import javafx.scene.Node;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.CornerRadii;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javafx.stage.Window;
/**
* A simple JavaFX GUI for JDime.
*/
@SuppressWarnings("unused")
public final class GUI extends Application {
private static final Logger LOG = Logger.getLogger(GUI.class.getCanonicalName());
private static final String TITLE = "JDime";
private static final String JDIME_CONF_FILE = "JDime.properties";
private static final String JDIME_DEFAULT_ARGS_KEY = "DEFAULT_ARGS";
private static final String JDIME_DEFAULT_LEFT_KEY = "DEFAULT_LEFT";
private static final String JDIME_DEFAULT_BASE_KEY = "DEFAULT_BASE";
private static final String JDIME_DEFAULT_RIGHT_KEY = "DEFAULT_RIGHT";
private static final String JDIME_EXEC_KEY = "JDIME_EXEC";
private static final String JDIME_ALLOW_INVALID_KEY = "ALLOW_INVALID";
private static final String JDIME_BUFFERED_LINES = "BUFFERED_LINES";
private static final String JVM_DEBUG_PARAMS = "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005";
private static final String STARTSCRIPT_JVM_ENV_VAR = "JAVA_OPTS";
private static final Pattern DUMP_GRAPH = Pattern.compile(".*-mode\\s+dumpgraph.*");
@FXML
ListView<String> output;
@FXML
TextField left;
@FXML
TextField base;
@FXML
TextField right;
@FXML
TextField jDime;
@FXML
TextField cmdArgs;
@FXML
CheckBox debugMode;
@FXML
TabPane tabPane;
@FXML
Tab outputTab;
@FXML
private StackPane cancelPane;
@FXML
private GridPane controlsPane;
@FXML
private Button historyPrevious;
@FXML
private Button historyNext;
private Config config;
private int bufferedLines;
private boolean allowInvalid;
private File lastChooseDir;
private List<TextField> textFields;
private IntegerProperty historyIndex;
private ObservableList<State> history;
private State inProgress;
private Task<Void> jDimeExec;
private Process jDimeProcess;
/**
* Launches the GUI with the given <code>args</code>.
*
* @param args
* the command line arguments
*/
public static void main(String[] args) {
launch(args);
}
@Override
public void start(Stage primaryStage) throws Exception {
FXMLLoader loader = new FXMLLoader(getClass().getResource(getClass().getSimpleName() + ".fxml"));
loader.setController(this);
Parent root = loader.load();
Scene scene = new Scene(root);
textFields = Arrays.asList(left, base, right, jDime, cmdArgs);
historyIndex = new SimpleIntegerProperty(0);
history = FXCollections.observableArrayList();
config = new Config();
config.addSource(new SysEnvConfigSource());
loadConfigFile();
loadDefaults();
SimpleListProperty<State> historyListProp = new SimpleListProperty<>(history);
BooleanBinding noPrev = historyListProp.emptyProperty().or(historyIndex.isEqualTo(0));
BooleanBinding noNext = historyListProp.emptyProperty().or(historyIndex.greaterThanOrEqualTo(historyListProp.sizeProperty()));
historyNext.disableProperty().bind(noNext);
historyPrevious.disableProperty().bind(noPrev);
primaryStage.setTitle(TITLE);
primaryStage.setScene(scene);
primaryStage.show();
}
/**
* Loads default values for the <code>TextField</code>s from the config file.
*/
private void loadDefaults() {
config.get(JDIME_EXEC_KEY).ifPresent(s -> jDime.setText(s.trim()));
config.get(JDIME_DEFAULT_ARGS_KEY).ifPresent(s -> cmdArgs.setText(s.trim()));
config.get(JDIME_DEFAULT_LEFT_KEY).ifPresent(left::setText);
config.get(JDIME_DEFAULT_BASE_KEY).ifPresent(base::setText);
config.get(JDIME_DEFAULT_RIGHT_KEY).ifPresent(right::setText);
bufferedLines = config.getInteger(JDIME_BUFFERED_LINES).orElse(100);
allowInvalid = config.getBoolean(JDIME_ALLOW_INVALID_KEY).orElse(false);
}
/**
* Checks whether the current working directory contains a file called {@value #JDIME_CONF_FILE} and if so adds
* a <code>PropFileConfigSource</code> to <code>config</code>.
*/
private void loadConfigFile() {
File configFile = new File(JDIME_CONF_FILE);
if (configFile.exists()) {
try {
config.addSource(new PropFileConfigSource(configFile));
} catch (IOException e) {
LOG.log(Level.WARNING, e, () -> "Could not load " + configFile);
}
}
}
/**
* Shows a <code>FileChooser</code> and returns the chosen <code>File</code>. Sets <code>lastChooseDir</code>
* to the parent file of the returned <code>File</code>.
*
* @param event
* the <code>ActionEvent</code> that occurred in the action listener
*
* @return the chosen <code>File</code> or <code>null</code> if the dialog was closed
*/
private File getChosenFile(ActionEvent event) {
FileChooser chooser = new FileChooser();
Window window = ((Node) event.getTarget()).getScene().getWindow();
if (lastChooseDir != null && lastChooseDir.isDirectory()) {
chooser.setInitialDirectory(lastChooseDir);
}
return chooser.showOpenDialog(window);
}
/**
* Called when the 'Choose' button for the left file is clicked.
*
* @param event
* the <code>ActionEvent</code> that occurred
*/
public void chooseLeft(ActionEvent event) {
File leftArtifact = getChosenFile(event);
if (leftArtifact != null) {
lastChooseDir = leftArtifact.getParentFile();
left.setText(leftArtifact.getAbsolutePath());
}
}
/**
* Called when the 'Choose' button for the base file is clicked.
*
* @param event
* the <code>ActionEvent</code> that occurred
*/
public void chooseBase(ActionEvent event) {
File baseArtifact = getChosenFile(event);
if (baseArtifact != null) {
lastChooseDir = baseArtifact.getParentFile();
base.setText(baseArtifact.getAbsolutePath());
}
}
/**
* Called when the 'Choose' button for the right file is clicked.
*
* @param event
* the <code>ActionEvent</code> that occurred
*/
public void chooseRight(ActionEvent event) {
File rightArtifact = getChosenFile(event);
if (rightArtifact != null) {
lastChooseDir = rightArtifact.getParentFile();
right.setText(rightArtifact.getAbsolutePath());
}
}
/**
* Called when the 'Choose' button for the JDime executable is clicked.
*
* @param event
* the <code>ActionEvent</code> that occurred
*/
public void chooseJDime(ActionEvent event) {
File jDimeBinary = getChosenFile(event);
if (jDimeBinary != null) {
lastChooseDir = jDimeBinary.getParentFile();
jDime.setText(jDimeBinary.getAbsolutePath());
}
}
/**
* Called when the '{@literal >}' button for the history is clicked.
*/
public void historyNext() {
historyIndex.setValue(historyIndex.get() + 1);
if (historyIndex.get() == history.size()) {
inProgress.applyTo(this);
} else {
history.get(historyIndex.get()).applyTo(this);
}
}
/**
* Called when the '{@literal <}' button for the history is clicked.
*/
public void historyPrevious() {
if (historyIndex.get() == history.size()) {
inProgress = State.of(this);
}
historyIndex.setValue(historyIndex.get() - 1);
history.get(historyIndex.get()).applyTo(this);
}
/**
* Called when the 'Cancel' button is clicked.
*/
public void cancelClicked() throws InterruptedException {
jDimeProcess.destroyForcibly().waitFor();
jDimeExec.cancel(true);
}
/**
* Called when the 'Run' button is clicked.
*/
public void runClicked() {
boolean valid = textFields.stream().allMatch(tf -> {
if (tf == cmdArgs) {
return true;
}
if (tf == base) {
return tf.getText().trim().isEmpty() || new File(tf.getText()).exists();
}
return new File(tf.getText()).exists();
});
if (!valid && !config.getBoolean(JDIME_ALLOW_INVALID_KEY).orElse(false)) {
return;
}
jDimeExec = new Task<Void>() {
@Override
protected Void call() throws Exception {
ProcessBuilder builder = new ProcessBuilder();
List<String> command = new ArrayList<>();
String input;
input = jDime.getText().trim();
if (!input.isEmpty()) {
command.add(input);
}
List<String> args = Arrays.asList(cmdArgs.getText().trim().split("\\s+"));
if (!args.isEmpty()) {
command.addAll(args);
}
input = left.getText().trim();
if (!input.isEmpty()) {
command.add(input);
}
input = base.getText().trim();
if (!input.isEmpty()) {
command.add(input);
}
input = right.getText().trim();
if (!input.isEmpty()) {
command.add(input);
}
builder.command(command);
builder.redirectErrorStream(true);
File workingDir = new File(jDime.getText()).getParentFile();
if (workingDir != null && workingDir.exists()) {
builder.directory(workingDir);
}
if (debugMode.isSelected()) {
builder.environment().put(STARTSCRIPT_JVM_ENV_VAR, JVM_DEBUG_PARAMS);
}
jDimeProcess = builder.start();
Charset cs = StandardCharsets.UTF_8;
try (BufferedReader r = new BufferedReader(new InputStreamReader(jDimeProcess.getInputStream(), cs))) {
List<String> lines = new ArrayList<>(bufferedLines);
boolean stop = false;
String line;
while (!Thread.interrupted() && !stop) {
if (r.ready()) {
if ((line = r.readLine()) != null) {
if (lines.size() < bufferedLines) {
lines.add(line);
} else {
List<String> toAdd = new ArrayList<>(lines);
Platform.runLater(() -> output.getItems().addAll(toAdd));
lines.clear();
}
} else {
stop = true;
}
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
stop = true;
}
}
Platform.runLater(() -> output.getItems().addAll(lines));
}
try {
jDimeProcess.waitFor();
} catch (InterruptedException ignored) {
jDimeProcess.destroyForcibly();
}
return null;
}
};
jDimeExec.setOnRunning(event -> {
controlsPane.setDisable(true);
cancelPane.setVisible(true);
});
jDimeExec.setOnSucceeded(event -> {
boolean dumpGraph = DUMP_GRAPH.matcher(cmdArgs.getText()).matches();
tabPane.getTabs().retainAll(outputTab);
if (dumpGraph) {
GraphvizParser parser = new GraphvizParser(output.getItems());
parser.setOnSucceeded(roots -> {
addTabs(parser.getValue());
reactivate();
});
parser.setOnFailed(event1 -> {
LOG.log(Level.WARNING, event1.getSource().getException(), () -> "Graphviz parsing failed.");
reactivate();
});
new Thread(parser).start();
} else {
reactivate();
}
});
jDimeExec.setOnCancelled(event -> {
reactivate();
});
jDimeExec.setOnFailed(event -> {
LOG.log(Level.WARNING, event.getSource().getException(), () -> "JDime execution failed.");
reactivate();
});
output.setItems(FXCollections.observableArrayList());
Thread jDimeT = new Thread(jDimeExec);
jDimeT.setName("JDime Task Thread");
jDimeT.start();
}
/**
* Saves the current state of the GUI to the history and then reactivates the user controls.
*/
private void reactivate() {
State currentState = State.of(GUI.this);
if (history.isEmpty() || !history.get(history.size() - 1).equals(currentState)) {
history.add(currentState);
historyIndex.setValue(history.size());
}
cancelPane.setVisible(false);
controlsPane.setDisable(false);
}
/**
* Adds <code>Tab</code>s containing <code>TreeTableView</code>s for every <code>TreeDumpNode</code> root in the
* given <code>List</code>.
*
* @param roots
* the roots of the trees to display
*/
private void addTabs(List<TreeItem<TreeDumpNode>> roots) {
roots.forEach(root -> tabPane.getTabs().add(getTreeTableViewTab(root)));
}
/**
* Returns a <code>Tab</code> containing a <code>TreeTableView</code> displaying the with the given
* <code>root</code>.
*
* @param root
* the root of the tree to display
* @return a <code>Tab</code> containing the tree
*/
private Tab getTreeTableViewTab(TreeItem<TreeDumpNode> root) {
TreeTableView<TreeDumpNode> tableView = new TreeTableView<>(root);
TreeTableColumn<TreeDumpNode, String> id = new TreeTableColumn<>("ID");
TreeTableColumn<TreeDumpNode, String> label = new TreeTableColumn<>("AST Type");
tableView.setRowFactory(param -> {
TreeTableRow<TreeDumpNode> row = new TreeTableRow<>();
TreeDumpNode node = row.getItem();
if (node == null) {
return row;
}
String color = node.getFillColor();
if (color != null) {
try {
BackgroundFill fill = new BackgroundFill(Color.valueOf(color), CornerRadii.EMPTY, Insets.EMPTY);
row.setBackground(new Background(fill));
} catch (IllegalArgumentException e) {
LOG.fine(() -> String.format("Could not convert '%s' to a JavaFX Color.", color));
}
}
return row;
});
id.setCellValueFactory(param -> param.getValue().getValue().idProperty());
label.setCellValueFactory(param -> param.getValue().getValue().labelProperty());
tableView.getColumns().setAll(Arrays.asList(label, id));
return new Tab("Tree View", tableView);
}
}
| optimize imports
| src/de/fosd/jdime/gui/GUI.java | optimize imports |
|
Java | apache-2.0 | 565dd22e258dfb120baef1748ad23f1db78cf9ef | 0 | DataReply/kafka-connect-jdbc,cotedm/kafka-connect-jdbc,cotedm/kafka-connect-jdbc,DataReply/kafka-connect-jdbc | /**
* Copyright 2015 Datamountaineer.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package com.datamountaineer.streamreactor.connect.jdbc.sink;
import com.datamountaineer.streamreactor.connect.jdbc.common.DatabaseMetadata;
import com.datamountaineer.streamreactor.connect.jdbc.common.DbTable;
import com.datamountaineer.streamreactor.connect.jdbc.common.ParameterValidator;
import com.datamountaineer.streamreactor.connect.jdbc.dialect.DbDialect;
import com.zaxxer.hikari.HikariDataSource;
import io.confluent.common.config.ConfigException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Controls the database changes - creating/amending tables.
*/
public class Database {
private final static Logger logger = LoggerFactory.getLogger(Database.class);
private final int executionRetries;
private final Set<String> tablesAllowingAutoCreate;
private final Set<String> tablesAllowingSchemaEvolution;
private final DatabaseMetadata databaseMetadata;
private final DbDialect dbDialect;
private final HikariDataSource connectionPool;
public Database(final HikariDataSource connectionPool,
final Set<String> tablesAllowingAutoCreate,
final Set<String> tablesAllowingSchemaEvolution,
final DatabaseMetadata databaseMetadata,
final DbDialect dbDialect,
final int executionRetries) {
this.executionRetries = executionRetries;
ParameterValidator.notNull(connectionPool, "connectionPool");
ParameterValidator.notNull(databaseMetadata, "databaseMetadata");
ParameterValidator.notNull(tablesAllowingAutoCreate, "tablesAllowingAutoCreate");
ParameterValidator.notNull(tablesAllowingSchemaEvolution, "tablesAllowingSchemaEvolution");
ParameterValidator.notNull(dbDialect, "dbDialect");
this.connectionPool = connectionPool;
this.tablesAllowingAutoCreate = tablesAllowingAutoCreate;
this.tablesAllowingSchemaEvolution = tablesAllowingSchemaEvolution;
this.databaseMetadata = databaseMetadata;
this.dbDialect = dbDialect;
}
public void update(final Map<String, Collection<SinkRecordField>> tablesToColumnsMap) throws SQLException {
DatabaseMetadata.Changes changes = databaseMetadata.getChanges(tablesToColumnsMap);
final Map<String, Collection<SinkRecordField>> amendmentsMap = changes.getAmendmentMap();
final Map<String, Collection<SinkRecordField>> createMap = changes.getCreatedMap();
//short-circuit if there is nothing to change
if ((createMap == null || createMap.isEmpty()) && (amendmentsMap == null || amendmentsMap.isEmpty())) {
return;
}
Connection connection = null;
try {
connection = connectionPool.getConnection();
connection.setAutoCommit(false);
createTables(createMap, connection);
evolveTables(amendmentsMap, connection);
connection.commit();
} catch (RuntimeException ex) {
connection.rollback();
throw ex;
} finally {
if (connection != null) {
connection.close();
}
}
}
public void createTables(final Map<String, Collection<SinkRecordField>> tableMap,
final Connection connection) {
if (tableMap == null || tableMap.size() == 0) {
return;
}
for (final Map.Entry<String, Collection<SinkRecordField>> entry : tableMap.entrySet()) {
final String tableName = entry.getKey();
if (databaseMetadata.containsTable(tableName)) {
continue;
}
if (!tablesAllowingAutoCreate.contains(tableName)) {
throw new ConfigException(String.format("Table %s is not configured with auto-create", entry.getKey()));
}
boolean retry = true;
int retryAttempts = executionRetries;
while (retry) {
try {
final DbTable table = createTable(tableName, entry.getValue(), connection);
databaseMetadata.update(table);
retry = false;
} catch (RuntimeException ex) {
if (--retryAttempts <= 0) {
//we want to stop the execution
throw ex;
}
try {
//should we exponentially wait?
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
}
}
}
private DbTable createTable(final String tableName,
final Collection<SinkRecordField> fields,
final Connection connection) {
final String sql = dbDialect.getCreateQuery(tableName, fields);
logger.info(String.format("Changing database structure for database %s%s%s",
databaseMetadata.getDatabaseName(),
System.lineSeparator(),
sql));
Statement statement = null;
try {
statement = connection.createStatement();
statement.execute(sql);
return DatabaseMetadata.getTableMetadata(connection, tableName);
} catch (SQLException e) {
logger.error("Creating table failed,", e);
//tricky part work out if the table already exists
try {
if (DatabaseMetadata.tableExists(connection, tableName)) {
final DbTable table = DatabaseMetadata.getTableMetadata(connection, tableName);
//check for all fields from above where they are not present
List<SinkRecordField> notPresentFields = null;
for (final SinkRecordField f : fields) {
if (!table.containsColumn(f.getName())) {
if (notPresentFields == null) {
notPresentFields = new ArrayList<>();
}
notPresentFields.add(f);
}
}
//we have some difference; run amend table
if (notPresentFields != null) {
return amendTable(tableName, notPresentFields, connection);
}
return table;
} else {
//table doesn't exist throw; the above layer will pick up an retry
throw new RuntimeException(e.getMessage(), e);
}
} catch (SQLException e1) {
logger.error("There was an error on creating the table " + tableName + e1.getMessage(), e1);
throw new RuntimeException(e1.getMessage(), e1);
}
} finally {
try {
if (statement != null) {
statement.close();
}
} catch (SQLException e) {
logger.error(e.getMessage(), e);
}
}
}
private void evolveTables(final Map<String, Collection<SinkRecordField>> tableMap,
final Connection connection) {
if (tableMap == null || tableMap.size() == 0) {
return;
}
for (final Map.Entry<String, Collection<SinkRecordField>> entry : tableMap.entrySet()) {
final String tableName = entry.getKey();
if (!databaseMetadata.containsTable(tableName)) {
throw new RuntimeException(String.format("%s is set for amendments but hasn't been created yet", entry.getKey()));
}
if (!tablesAllowingSchemaEvolution.contains(entry.getKey())) {
logger.warn(String.format("Table %s is not configured with schema evolution", entry.getKey()));
continue;
}
boolean retry = true;
int retryAttempts = executionRetries;
while (retry) {
try {
final DbTable table = amendTable(tableName, entry.getValue(), connection);
databaseMetadata.update(table);
retry = false;
} catch (RuntimeException ex) {
if (--retryAttempts <= 0) {
//we want to stop the execution
throw ex;
}
try {
//should we exponentially wait?
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
}
}
}
private DbTable amendTable(final String tableName,
final Collection<SinkRecordField> fields,
final Connection connection) {
final List<String> amendTableQueries = dbDialect.getAlterTable(tableName, fields);
Statement statement = null;
try {
connection.setAutoCommit(false);
statement = connection.createStatement();
for (String amendTableQuery : amendTableQueries) {
logger.info(String.format("Changing database structure for database %s%s%s",
databaseMetadata.getDatabaseName(),
System.lineSeparator(),
amendTableQuery));
statement.execute(amendTableQuery);
}
//commit the transaction
connection.commit();
return DatabaseMetadata.getTableMetadata(connection, tableName);
} catch (SQLException e) {
//see if it there was a race with other tasks to add the colums
try {
final DbTable table = DatabaseMetadata.getTableMetadata(connection, tableName);
List<SinkRecordField> notPresentFields = null;
for (final SinkRecordField f : fields) {
if (!table.containsColumn(f.getName())) {
if (notPresentFields == null) {
notPresentFields = new ArrayList<>();
}
notPresentFields.add(f);
}
}
//we have some difference; run amend table
if (notPresentFields != null) {
return amendTable(tableName, notPresentFields, connection);
}
return table;
} catch (SQLException e1) {
throw new RuntimeException(e1.getMessage(), e);
}
} finally {
try {
if (statement != null) {
statement.close();
}
} catch (SQLException e) {
logger.error(e.getMessage(), e);
}
}
}
}
| src/main/java/com/datamountaineer/streamreactor/connect/jdbc/sink/Database.java | /**
* Copyright 2015 Datamountaineer.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package com.datamountaineer.streamreactor.connect.jdbc.sink;
import com.datamountaineer.streamreactor.connect.jdbc.common.DatabaseMetadata;
import com.datamountaineer.streamreactor.connect.jdbc.common.DbTable;
import com.datamountaineer.streamreactor.connect.jdbc.common.ParameterValidator;
import com.datamountaineer.streamreactor.connect.jdbc.dialect.DbDialect;
import com.zaxxer.hikari.HikariDataSource;
import io.confluent.common.config.ConfigException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Controls the database changes - creating/amending tables.
*/
public class Database {
private final static Logger logger = LoggerFactory.getLogger(Database.class);
private final int executionRetries;
private final Set<String> tablesAllowingAutoCreate;
private final Set<String> tablesAllowingSchemaEvolution;
private final DatabaseMetadata databaseMetadata;
private final DbDialect dbDialect;
private final HikariDataSource connectionPool;
public Database(final HikariDataSource connectionPool,
final Set<String> tablesAllowingAutoCreate,
final Set<String> tablesAllowingSchemaEvolution,
final DatabaseMetadata databaseMetadata,
final DbDialect dbDialect,
final int executionRetries) {
this.executionRetries = executionRetries;
ParameterValidator.notNull(connectionPool, "connectionPool");
ParameterValidator.notNull(databaseMetadata, "databaseMetadata");
ParameterValidator.notNull(tablesAllowingAutoCreate, "tablesAllowingAutoCreate");
ParameterValidator.notNull(tablesAllowingSchemaEvolution, "tablesAllowingSchemaEvolution");
ParameterValidator.notNull(dbDialect, "dbDialect");
this.connectionPool = connectionPool;
this.tablesAllowingAutoCreate = tablesAllowingAutoCreate;
this.tablesAllowingSchemaEvolution = tablesAllowingSchemaEvolution;
this.databaseMetadata = databaseMetadata;
this.dbDialect = dbDialect;
}
public void update(final Map<String, Collection<SinkRecordField>> tablesToColumnsMap) throws SQLException {
DatabaseMetadata.Changes changes = databaseMetadata.getChanges(tablesToColumnsMap);
final Map<String, Collection<SinkRecordField>> amendmentsMap = changes.getAmendmentMap();
final Map<String, Collection<SinkRecordField>> createMap = changes.getCreatedMap();
//short-circuit if there is nothing to change
if ((createMap == null || createMap.isEmpty()) && (amendmentsMap == null || amendmentsMap.isEmpty())) {
return;
}
Connection connection = null;
try {
connection = connectionPool.getConnection();
connection.setAutoCommit(false);
createTables(createMap, connection);
evolveTables(amendmentsMap, connection);
connection.commit();
} catch (RuntimeException ex) {
connection.rollback();
throw ex;
} finally {
if (connection != null) {
connection.close();
}
}
}
public void createTables(final Map<String, Collection<SinkRecordField>> tableMap,
final Connection connection) {
if (tableMap == null || tableMap.size() == 0) {
return;
}
for (final Map.Entry<String, Collection<SinkRecordField>> entry : tableMap.entrySet()) {
final String tableName = entry.getKey();
if (databaseMetadata.containsTable(tableName)) {
continue;
}
if (!tablesAllowingAutoCreate.contains(tableName)) {
throw new ConfigException(String.format("Table %s is not configured with auto-create", entry.getKey()));
}
boolean retry = true;
int retryAttempts = executionRetries;
while (retry) {
try {
final DbTable table = createTable(tableName, entry.getValue(), connection);
databaseMetadata.update(table);
retry = false;
} catch (RuntimeException ex) {
if (--retryAttempts <= 0) {
//we want to stop the execution
throw ex;
}
try {
//should we exponentially wait?
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
}
}
}
private DbTable createTable(final String tableName,
final Collection<SinkRecordField> fields,
final Connection connection) {
final String sql = dbDialect.getCreateQuery(tableName, fields);
logger.info(String.format("Changing database structure for database %s%s%s",
databaseMetadata.getDatabaseName(),
System.lineSeparator(),
sql));
Statement statement = null;
try {
statement = connection.createStatement();
statement.execute(sql);
return DatabaseMetadata.getTableMetadata(connection, tableName);
} catch (SQLException e) {
logger.error("Creating table failed,", e);
//tricky part work out if the table already exists
try {
if (DatabaseMetadata.tableExists(connection, tableName)) {
final DbTable table = DatabaseMetadata.getTableMetadata(connection, tableName);
//check for all fields from above where they are not present
List<SinkRecordField> notPresentFields = null;
for (final SinkRecordField f : fields) {
if (!table.containsColumn(f.getName())) {
if (notPresentFields == null) {
notPresentFields = new ArrayList<>();
}
notPresentFields.add(f);
}
}
//we have some difference; run amend table
if (notPresentFields != null) {
return amendTable(tableName, notPresentFields, connection);
}
return table;
} else {
//table doesn't exist throw; the above layer will pick up an retry
throw new RuntimeException(e.getMessage(), e);
}
} catch (SQLException e1) {
logger.error("There was an error on creating the table " + tableName + e1.getMessage(), e1);
throw new RuntimeException(e1.getMessage(), e1);
}
} finally {
try {
if (statement != null) {
statement.close();
}
} catch (SQLException e) {
logger.error(e.getMessage(), e);
}
}
}
private void evolveTables(final Map<String, Collection<SinkRecordField>> tableMap,
final Connection connection) {
if (tableMap == null || tableMap.size() == 0) {
return;
}
for (final Map.Entry<String, Collection<SinkRecordField>> entry : tableMap.entrySet()) {
final String tableName = entry.getKey();
if (!databaseMetadata.containsTable(tableName)) {
throw new RuntimeException(String.format("%s is set for amendments but hasn't been created yet", entry.getKey()));
}
if (!tablesAllowingSchemaEvolution.contains(entry.getKey())) {
logger.warn(String.format("Table %s is not configured with schema evolution", entry.getKey()));
continue;
}
boolean retry = true;
int retryAttempts = executionRetries;
while (retry) {
try {
final DbTable table = amendTable(tableName, entry.getValue(), connection);
databaseMetadata.update(table);
retry = false;
} catch (RuntimeException ex) {
if (--retryAttempts <= 0) {
//we want to stop the execution
throw ex;
}
try {
//should we exponentially wait?
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
}
}
}
private DbTable amendTable(final String tableName,
final Collection<SinkRecordField> fields,
final Connection connection) {
final List<String> amendTableQueries = dbDialect.getAlterTable(tableName, fields);
Statement statement = null;
try {
connection.setAutoCommit(false);
statement = connection.createStatement();
for (String amendTableQuery : amendTableQueries) {
logger.info(String.format("Changing database structure for database %s%s%s",
databaseMetadata.getDatabaseName(),
System.lineSeparator(),
amendTableQuery));
statement.execute(amendTableQuery);
}
//commit the transaction
connection.commit();
return DatabaseMetadata.getTableMetadata(connection, tableName);
} catch (SQLException e) {
//see if it there was a race with other tasks to add the colums
try {
final DbTable table = DatabaseMetadata.getTableMetadata(connection, tableName);
List<SinkRecordField> notPresentFields = null;
for (final SinkRecordField f : fields) {
if (!table.containsColumn(f.getName())) {
if (notPresentFields == null) {
notPresentFields = new ArrayList<>();
}
notPresentFields.add(f);
}
}
//we have some difference; run amend table
if (notPresentFields != null) {
return amendTable(tableName, notPresentFields, connection);
}
return table;
} catch (SQLException e1) {
throw new RuntimeException(e1.getMessage(), e);
}
} finally {
try {
if (statement != null) {
statement.close();
}
} catch (SQLException e) {
logger.error(e.getMessage(), e);
}
}
}
}
| fixing the check style
| src/main/java/com/datamountaineer/streamreactor/connect/jdbc/sink/Database.java | fixing the check style |
|
Java | apache-2.0 | 782addf84745d2edd363c1850e8058c8666c4e84 | 0 | nikita36078/J2ME-Loader,nikita36078/J2ME-Loader,nikita36078/J2ME-Loader,nikita36078/J2ME-Loader | /*
* Copyright 2017 Nikita Shakarun
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.playsoftware.j2meloader.info;
import android.app.Dialog;
import android.os.Bundle;
import android.text.Html;
import android.text.method.ScrollingMovementMethod;
import android.text.util.Linkify;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
import ru.playsoftware.j2meloader.BuildConfig;
import ru.playsoftware.j2meloader.R;
public class AboutDialogFragment extends DialogFragment {
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
StringBuilder message = new StringBuilder().append(getText(R.string.version))
.append(BuildConfig.VERSION_NAME)
.append(getText(R.string.about_email))
.append(getText(R.string.about_github))
.append(getText(R.string.about_4pda))
.append(getText(R.string.about_xda))
.append(getText(R.string.about_crowdin))
.append(getText(R.string.about_copyright));
TextView tv = new TextView(getActivity());
tv.setText(Html.fromHtml(message.toString()));
tv.setTextSize(16);
tv.setMovementMethod(new ScrollingMovementMethod());
Linkify.addLinks(tv, Linkify.ALL);
float density = getResources().getDisplayMetrics().density;
int paddingHorizontal = (int) (density * 20);
int paddingVertical = (int) (density * 14);
tv.setPadding(paddingHorizontal, paddingVertical, paddingHorizontal, 0);
AlertDialog.Builder builder = new AlertDialog.Builder(requireActivity());
builder.setTitle(R.string.app_name)
.setIcon(R.mipmap.ic_launcher)
.setView(tv)
.setPositiveButton(R.string.licenses, (dialog, which) -> {
LicensesDialogFragment licensesDialogFragment = new LicensesDialogFragment();
licensesDialogFragment.show(getParentFragmentManager(), "licenses");
})
.setNeutralButton(R.string.more, (dialog, which) -> {
InfoDialogFragment infoDialogFragment = new InfoDialogFragment();
infoDialogFragment.show(getParentFragmentManager(), "more");
});
return builder.create();
}
}
| app/src/main/java/ru/playsoftware/j2meloader/info/AboutDialogFragment.java | /*
* Copyright 2017 Nikita Shakarun
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.playsoftware.j2meloader.info;
import android.app.Dialog;
import android.os.Bundle;
import android.text.Html;
import android.text.method.LinkMovementMethod;
import android.text.method.ScrollingMovementMethod;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
import ru.playsoftware.j2meloader.BuildConfig;
import ru.playsoftware.j2meloader.R;
public class AboutDialogFragment extends DialogFragment {
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
StringBuilder message = new StringBuilder().append(getText(R.string.version))
.append(BuildConfig.VERSION_NAME)
.append(getText(R.string.about_email))
.append(getText(R.string.about_github))
.append(getText(R.string.about_4pda))
.append(getText(R.string.about_xda))
.append(getText(R.string.about_crowdin))
.append(getText(R.string.about_copyright));
TextView tv = new TextView(getActivity());
tv.setMovementMethod(LinkMovementMethod.getInstance());
tv.setText(Html.fromHtml(message.toString()));
tv.setTextSize(16);
tv.setMovementMethod(new ScrollingMovementMethod());
float density = getResources().getDisplayMetrics().density;
int paddingHorizontal = (int) (density * 20);
int paddingVertical = (int) (density * 14);
tv.setPadding(paddingHorizontal, paddingVertical, paddingHorizontal, 0);
AlertDialog.Builder builder = new AlertDialog.Builder(requireActivity());
builder.setTitle(R.string.app_name)
.setIcon(R.mipmap.ic_launcher)
.setView(tv)
.setPositiveButton(R.string.licenses, (dialog, which) -> {
LicensesDialogFragment licensesDialogFragment = new LicensesDialogFragment();
licensesDialogFragment.show(getParentFragmentManager(), "licenses");
})
.setNeutralButton(R.string.more, (dialog, which) -> {
InfoDialogFragment infoDialogFragment = new InfoDialogFragment();
infoDialogFragment.show(getParentFragmentManager(), "more");
});
return builder.create();
}
}
| Fix links in about dialog
| app/src/main/java/ru/playsoftware/j2meloader/info/AboutDialogFragment.java | Fix links in about dialog |
|
Java | apache-2.0 | 4e373632b63851a42e1408b139edf0a95653df1a | 0 | skeptomai/nightvale | package com.skeptomai;
import com.google.common.util.concurrent.*;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import javax.annotation.ParametersAreNonnullByDefault;
public class Main {
private static final Logger log = Logger.getLogger(Main.class.getName());
public static void main(String[] args) {
try {
HashMap<String,String> m = new Cli(args).parse();
List<String> mp3sToFetch = getMP3Filenames(m.get("u"));
fetchMP3s(mp3sToFetch);
} catch (IOException ioe) {
log.severe(ioe.getMessage());
}
}
private static void fetchMP3s(List<String> mp3sToFetch) {
final ExecutorService pool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
ListeningExecutorService executor = MoreExecutors
.listeningDecorator(pool);
final List<ListenableFuture<String>> collect =
mp3sToFetch
.stream()
.map((s) -> {
ListenableFuture<String> lf = executor.submit(new MP3Fetcher(s));
Futures.addCallback(lf, new FutureCallback<String>() {
public void onSuccess(String result) {
System.out.println("Gettin' called back: " + result);
}
@ParametersAreNonnullByDefault
public void onFailure(Throwable thrown) {
System.out.println("Shit! Failed: " + thrown.getMessage());
}
});
return lf;
}).collect(Collectors.toList());
ListenableFuture<List<String>> lf1 = Futures.successfulAsList(collect);
try {
lf1.get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
log.info("Finished fetches. All done..");
pool.shutdown();
}
private static List<String> getMP3Filenames(String url) throws IOException {
List<String> mp3sToFetch = new ArrayList<>();
URLListFetcher ulf = new URLListFetcher(url);
Document html = Jsoup.parse(ulf.call());
for (Element element : html.body().getElementsByClass("postDetails")) {
Element mp3Link = element.getElementsByAttributeValueStarting("href","http").first();
if (mp3Link != null) {
mp3sToFetch.add(mp3Link.attr("href"));
}
}
return mp3sToFetch;
}
}
| src/main/java/com/skeptomai/Main.java | package com.skeptomai;
import com.google.common.util.concurrent.*;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import javax.annotation.ParametersAreNonnullByDefault;
public class Main {
private static final Logger log = Logger.getLogger(Main.class.getName());
public static void main(String[] args) {
try {
HashMap<String,String> m = new Cli(args).parse();
List<String> mp3sToFetch = getMP3Filenames(m.get("u"));
fetchMP3s(mp3sToFetch);
} catch (IOException ioe) {
log.severe(ioe.getMessage());
}
}
private static void fetchMP3s(List<String> mp3sToFetch) {
final ExecutorService pool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
ListeningExecutorService executor = MoreExecutors
.listeningDecorator(pool);
final List<ListenableFuture<String>> collect = mp3sToFetch.stream().map((s) -> {
ListenableFuture<String> lf = executor.submit(new MP3Fetcher(s));
Futures.addCallback(lf, new FutureCallback<String>() {
public void onSuccess(String result) {
System.out.println("Gettin' called back: " + result);
}
@ParametersAreNonnullByDefault
public void onFailure(Throwable thrown) {
System.out.println("Shit! Failed: " + thrown.getMessage());
}
});
return lf;
}).collect(Collectors.toList());
ListenableFuture<List<String>> lf1 = Futures.successfulAsList(collect);
try {
lf1.get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
log.info("Finished fetches. All done..");
pool.shutdown();
}
private static List<String> getMP3Filenames(String url) throws IOException {
List<String> mp3sToFetch = new ArrayList<>();
URLListFetcher ulf = new URLListFetcher(url);
Document html = Jsoup.parse(ulf.call());
for (Element element : html.body().getElementsByClass("postDetails")) {
Element mp3Link = element.getElementsByAttributeValueStarting("href","http").first();
if (mp3Link != null) {
mp3sToFetch.add(mp3Link.attr("href"));
}
}
return mp3sToFetch;
}
}
| tidy up formatting
| src/main/java/com/skeptomai/Main.java | tidy up formatting |
|
Java | apache-2.0 | b98701c972419f86da545f490ed448df3ecd6dc4 | 0 | dart-maven-plugin/dart-maven-plugin,dart-maven-plugin/dart-maven-plugin,dart-maven-plugin/dart-maven-plugin,dart-maven-plugin/dart-maven-plugin | package com.google.dart;
import java.io.File;
import java.io.OutputStreamWriter;
import java.util.Set;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.cli.CommandLineException;
import org.codehaus.plexus.util.cli.CommandLineUtils;
import org.codehaus.plexus.util.cli.Commandline;
import org.codehaus.plexus.util.cli.StreamConsumer;
import org.codehaus.plexus.util.cli.WriterStreamConsumer;
import com.google.dart.util.OsUtil;
/**
* Goal to invoke the dart scripts.
*
* @author Daniel Zwicker
*/
@Mojo(name = "dart")
public class DartMojo extends PubMojo {
/**
* Insert runtime type checks and enable assertions (checked mode).
*
* @since 2.0
*/
private final static String ARGUMENT_CECKED_MODE = "--checked";
/**
* Where to find packages, that is, "package:..." imports.
*
* @since 2.0
*/
protected final static String ARGUMENT_PACKAGE_PATH = "--package-root=";
/**
* enables debugging and listens on specified port for debugger connections
* (default port number is 5858)
*
* @since 2.0
*/
private final static String ARGUMENT_DEBUG = "--debug";
/**
* sets a breakpoint at specified location where <location> is one of :
* url:<line_num> e.g. test.dart:10
* [<class_name>.]<function_name> e.g. B.foo
*
* @since 2.0
*/
private final static String ARGUMENT_BREAK_AT = "--break_at=";
/**
* executes Dart script present in the specified snapshot file
*
* @since 2.0
*/
private final static String ARGUMENT_USE_SCRIPT_SNAPAHOT = "--use_script_snapshot=";
/**
* Skip the execution of dart2js.
*
* @since 2.0
*/
@Parameter(property = "script")
private File script;
/**
* Insert runtime type checks and enable assertions (checked mode).
*
* @since 2.0
*/
@Parameter(defaultValue = "false", property = "dart.checkedMode")
private boolean checkedMode;
/**
* Where to find packages, that is, "package:..." imports.
*
* @since 2.0
*/
@Parameter(property = "dart.packagepath")
private File packagePath;
/**
* enables debugging and listens on specified port for debugger connections
* (default port number is 5858)
*
* @since 2.0
*/
@Parameter(defaultValue = "false", property = "dart.debug")
private boolean debug;
/**
* enables debugging and listens on specified port for debugger connections
* (default port number is 5858)
*
* @since 2.0
*/
@Parameter(property = "dart.debugPort")
private String debugPort;
/**
* sets a breakpoint at specified location where <location> is one of :
* url:<line_num> e.g. test.dart:10
* [<class_name>.]<function_name> e.g. B.foo
*
* @since 2.0
*/
@Parameter(property = "dart.breakAt")
private String breakAt;
/**
* executes Dart script present in the specified snapshot file
*
* @since 2.0
*/
@Parameter(property = "dart.useScriptSnapshot")
private String useScriptSnapshot;
/**
* Set this to 'true' to skip running dart's packagemanager pub.
*
* @since 2.0
*/
@Parameter(defaultValue = "false", property = "dart.pup.skip")
private boolean skipPub;
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
final Set<File> dartPackageRoots = findDartPackageRoots();
processPubDependencies(dartPackageRoots);
executeDart();
}
private void executeDart() throws MojoExecutionException {
final Commandline cl = createBaseCommandline();
if (script == null) {
throw new NullPointerException("Script is required but is null.");
}
if (!script.exists() || !script.isFile()) {
throw new IllegalArgumentException("Script must be a file. scripte=" + script.getAbsolutePath());
}
if (!script.canRead()) {
throw new IllegalArgumentException("Script must be a readable file. scripte=" + script.getAbsolutePath());
}
cl.createArg(true).setValue(script.getAbsolutePath());
final StreamConsumer output = new WriterStreamConsumer(new OutputStreamWriter(System.out));
final StreamConsumer error = new WriterStreamConsumer(new OutputStreamWriter(System.err));
getLog().info("Execute dart: " + cl.toString());
System.out.println();
System.out.println();
try {
final int returnValue = CommandLineUtils.executeCommandLine(cl, output, error);
if (getLog().isDebugEnabled()) {
getLog().debug("dart return code: " + returnValue);
}
if (returnValue != 0) {
throw new MojoExecutionException("Dart returned error code " + returnValue);
}
} catch (final CommandLineException e) {
getLog().debug("dart error: ", e);
}
System.out.println();
System.out.println();
}
protected Commandline createBaseCommandline() throws MojoExecutionException {
checkDart();
String dartPath = getDartExecutable().getAbsolutePath();
if (getLog().isDebugEnabled()) {
getLog().debug("Using dart '" + dartPath + "'.");
}
final Commandline cl = new Commandline();
cl.setExecutable(dartPath);
if (isCheckedMode()) {
cl.createArg().setValue(ARGUMENT_CECKED_MODE);
}
if (isDebug()) {
cl.createArg().setValue(ARGUMENT_DEBUG + (isDebugPort() ? ":" + debugPort : ""));
}
if (isBreakAt()) {
cl.createArg().setValue(ARGUMENT_BREAK_AT + breakAt);
}
if (isUseScriptSnapshot()) {
cl.createArg().setValue(ARGUMENT_USE_SCRIPT_SNAPAHOT + useScriptSnapshot);
}
if (isPackagePath()) {
cl.createArg().setValue(ARGUMENT_PACKAGE_PATH + packagePath.getAbsolutePath());
}
if (getLog().isDebugEnabled()) {
getLog().debug("Base dart command: " + cl.toString());
}
return cl;
}
protected void checkDart() throws MojoExecutionException {
checkDartSdk();
if (!getDartExecutable().canExecute()) {
throw new MojoExecutionException("Dart not executable! Configuration error for dartSdk? dartSdk="
+ getDartSdk().getAbsolutePath());
}
}
protected File getDartExecutable() {
return new File(getDartSdk(), "bin/dart" + (OsUtil.isWindows() ? ".exe" : ""));
}
protected File getPackagePath() {
return packagePath;
}
@Override
public boolean isPubSkipped() {
return skipPub;
}
protected boolean isCheckedMode() {
return checkedMode;
}
protected boolean isDebug() {
return debug;
}
protected boolean isDebugPort() {
return !StringUtils.isEmpty(debugPort);
}
protected boolean isPackagePath() {
return packagePath != null;
}
protected boolean isBreakAt() {
return !StringUtils.isEmpty(breakAt);
}
protected boolean isUseScriptSnapshot() {
return !StringUtils.isEmpty(useScriptSnapshot);
}
}
| src/main/java/com/google/dart/DartMojo.java | package com.google.dart;
import java.io.File;
import java.io.OutputStreamWriter;
import java.util.Set;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.cli.CommandLineException;
import org.codehaus.plexus.util.cli.CommandLineUtils;
import org.codehaus.plexus.util.cli.Commandline;
import org.codehaus.plexus.util.cli.StreamConsumer;
import org.codehaus.plexus.util.cli.WriterStreamConsumer;
import com.google.dart.util.OsUtil;
/**
* Goal to invoke the dart scripts.
*
* @author Daniel Zwicker
*/
@Mojo(name = "dart")
public class DartMojo extends PubMojo {
/**
* Insert runtime type checks and enable assertions (checked mode).
*
* @since 2.0
*/
private final static String ARGUMENT_CECKED_MODE = "--checked";
/**
* Where to find packages, that is, "package:..." imports.
*
* @since 2.0
*/
protected final static String ARGUMENT_PACKAGE_PATH = "--package-root=";
/**
* enables debugging and listens on specified port for debugger connections
* (default port number is 5858)
*
* @since 2.0
*/
private final static String ARGUMENT_DEBUG = "--debug";
/**
* sets a breakpoint at specified location where <location> is one of :
* url:<line_num> e.g. test.dart:10
* [<class_name>.]<function_name> e.g. B.foo
*
* @since 2.0
*/
private final static String ARGUMENT_BREAK_AT = "--break_at=";
/**
* executes Dart script present in the specified snapshot file
*
* @since 2.0
*/
private final static String ARGUMENT_USE_SCRIPT_SNAPAHOT = "--use_script_snapshot=";
/**
* Skip the execution of dart2js.
*
* @since 2.0
*/
@Parameter(property = "script")
private File script;
/**
* Insert runtime type checks and enable assertions (checked mode).
*
* @since 2.0
*/
@Parameter(defaultValue = "false", property = "dart.checkedMode")
private boolean checkedMode;
/**
* Where to find packages, that is, "package:..." imports.
*
* @since 2.0
*/
@Parameter(property = "dart.packagepath")
private File packagePath;
/**
* enables debugging and listens on specified port for debugger connections
* (default port number is 5858)
*
* @since 2.0
*/
@Parameter(defaultValue = "false", property = "dart.debug")
private boolean debug;
/**
* enables debugging and listens on specified port for debugger connections
* (default port number is 5858)
*
* @since 2.0
*/
@Parameter(property = "dart.debugPort")
private String debugPort;
/**
* sets a breakpoint at specified location where <location> is one of :
* url:<line_num> e.g. test.dart:10
* [<class_name>.]<function_name> e.g. B.foo
*
* @since 2.0
*/
@Parameter(property = "dart.breakAt")
private String breakAt;
/**
* executes Dart script present in the specified snapshot file
*
* @since 2.0
*/
@Parameter(property = "dart.useScriptSnapshot")
private String useScriptSnapshot;
/**
* Set this to 'true' to skip running dart's packagemanager pub.
*
* @since 2.0
*/
@Parameter(defaultValue = "false", property = "dart.pup.skip")
private boolean skipPub;
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
final Set<File> dartPackageRoots = findDartPackageRoots();
processPubDependencies(dartPackageRoots);
executeDart();
}
private void executeDart() throws MojoExecutionException {
final Commandline cl = createBaseCommandline();
if (script == null) {
throw new NullPointerException("Script is required but is null.");
}
if (!script.exists() || !script.isFile()) {
throw new IllegalArgumentException("Script must be a file. scripte=" + script.getAbsolutePath());
}
if (script.canRead()) {
throw new IllegalArgumentException("Script must be a readable file. scripte=" + script.getAbsolutePath());
}
cl.createArg(true).setValue(script.getAbsolutePath());
final StreamConsumer output = new WriterStreamConsumer(new OutputStreamWriter(System.out));
final StreamConsumer error = new WriterStreamConsumer(new OutputStreamWriter(System.err));
getLog().info("Execute dart: " + cl.toString());
System.out.println();
System.out.println();
try {
final int returnValue = CommandLineUtils.executeCommandLine(cl, output, error);
if (getLog().isDebugEnabled()) {
getLog().debug("dart return code: " + returnValue);
}
if (returnValue != 0) {
throw new MojoExecutionException("Dart returned error code " + returnValue);
}
} catch (final CommandLineException e) {
getLog().debug("dart error: ", e);
}
System.out.println();
System.out.println();
}
protected Commandline createBaseCommandline() throws MojoExecutionException {
checkDart();
String dartPath = getDartExecutable().getAbsolutePath();
if (getLog().isDebugEnabled()) {
getLog().debug("Using dart '" + dartPath + "'.");
}
final Commandline cl = new Commandline();
cl.setExecutable(dartPath);
if (isCheckedMode()) {
cl.createArg().setValue(ARGUMENT_CECKED_MODE);
}
if (isDebug()) {
cl.createArg().setValue(ARGUMENT_DEBUG + (isDebugPort() ? ":" + debugPort : ""));
}
if (isBreakAt()) {
cl.createArg().setValue(ARGUMENT_BREAK_AT + breakAt);
}
if (isUseScriptSnapshot()) {
cl.createArg().setValue(ARGUMENT_USE_SCRIPT_SNAPAHOT + useScriptSnapshot);
}
if (isPackagePath()) {
cl.createArg().setValue(ARGUMENT_PACKAGE_PATH + packagePath.getAbsolutePath());
}
if (getLog().isDebugEnabled()) {
getLog().debug("Base dart command: " + cl.toString());
}
return cl;
}
protected void checkDart() throws MojoExecutionException {
checkDartSdk();
if (!getDartExecutable().canExecute()) {
throw new MojoExecutionException("Dart not executable! Configuration error for dartSdk? dartSdk="
+ getDartSdk().getAbsolutePath());
}
}
protected File getDartExecutable() {
return new File(getDartSdk(), "bin/dart" + (OsUtil.isWindows() ? ".exe" : ""));
}
protected File getPackagePath() {
return packagePath;
}
@Override
public boolean isPubSkipped() {
return skipPub;
}
protected boolean isCheckedMode() {
return checkedMode;
}
protected boolean isDebug() {
return debug;
}
protected boolean isDebugPort() {
return !StringUtils.isEmpty(debugPort);
}
protected boolean isPackagePath() {
return packagePath != null;
}
protected boolean isBreakAt() {
return !StringUtils.isEmpty(breakAt);
}
protected boolean isUseScriptSnapshot() {
return !StringUtils.isEmpty(useScriptSnapshot);
}
}
| fix script.canRead() check
| src/main/java/com/google/dart/DartMojo.java | fix script.canRead() check |
|
Java | apache-2.0 | 595b721b2226eac4ed240ff931441786018eb27a | 0 | sekikn/ambari,radicalbit/ambari,arenadata/ambari,arenadata/ambari,radicalbit/ambari,alexryndin/ambari,radicalbit/ambari,alexryndin/ambari,radicalbit/ambari,sekikn/ambari,radicalbit/ambari,alexryndin/ambari,radicalbit/ambari,sekikn/ambari,sekikn/ambari,arenadata/ambari,alexryndin/ambari,alexryndin/ambari,arenadata/ambari,arenadata/ambari,alexryndin/ambari,arenadata/ambari,arenadata/ambari,arenadata/ambari,alexryndin/ambari,sekikn/ambari,alexryndin/ambari,radicalbit/ambari,arenadata/ambari,radicalbit/ambari,alexryndin/ambari,sekikn/ambari,alexryndin/ambari,sekikn/ambari,sekikn/ambari,arenadata/ambari,alexryndin/ambari,sekikn/ambari,arenadata/ambari,radicalbit/ambari,radicalbit/ambari,radicalbit/ambari | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.agent;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.HostNotFoundException;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.ServiceComponentHostNotFoundException;
import org.apache.ambari.server.ServiceComponentNotFoundException;
import org.apache.ambari.server.ServiceNotFoundException;
import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.metadata.ActionMetadata;
import org.apache.ambari.server.state.AgentVersion;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostHealthStatus;
import org.apache.ambari.server.state.HostHealthStatus.HealthStatus;
import org.apache.ambari.server.state.HostState;
import org.apache.ambari.server.state.MaintenanceState;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.State;
import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
import org.apache.ambari.server.state.host.HostHealthyHeartbeatEvent;
import org.apache.ambari.server.state.host.HostRegistrationRequestEvent;
import org.apache.ambari.server.state.host.HostStatusUpdatesReceivedEvent;
import org.apache.ambari.server.state.host.HostUnhealthyHeartbeatEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpFailedEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpSucceededEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartedEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStoppedEvent;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.ambari.server.utils.VersionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.gson.Gson;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Singleton;
/**
* This class handles the heartbeats coming from the agent, passes on the information
* to other modules and processes the queue to send heartbeat response.
*/
@Singleton
public class HeartBeatHandler {
private static Log LOG = LogFactory.getLog(HeartBeatHandler.class);
private final Clusters clusterFsm;
private final ActionQueue actionQueue;
private final ActionManager actionManager;
@Inject
Injector injector;
@Inject
Configuration config;
@Inject
AmbariMetaInfo ambariMetaInfo;
@Inject
ActionMetadata actionMetadata;
private HeartbeatMonitor heartbeatMonitor;
@Inject
private Gson gson;
private Map<String, Long> hostResponseIds = new ConcurrentHashMap<String, Long>();
private Map<String, HeartBeatResponse> hostResponses = new ConcurrentHashMap<String, HeartBeatResponse>();
@Inject
public HeartBeatHandler(Clusters fsm, ActionQueue aq, ActionManager am,
Injector injector) {
this.clusterFsm = fsm;
this.actionQueue = aq;
this.actionManager = am;
this.heartbeatMonitor = new HeartbeatMonitor(fsm, aq, am, 60000, injector);
injector.injectMembers(this);
}
public void start() {
heartbeatMonitor.start();
}
void setHeartbeatMonitor(HeartbeatMonitor heartbeatMonitor) {
this.heartbeatMonitor = heartbeatMonitor;
}
public HeartBeatResponse handleHeartBeat(HeartBeat heartbeat)
throws AmbariException {
long now = System.currentTimeMillis();
if(heartbeat.getAgentEnv() != null && heartbeat.getAgentEnv().getHostHealth() != null) {
heartbeat.getAgentEnv().getHostHealth().setServerTimeStampAtReporting(now);
}
String hostname = heartbeat.getHostname();
Long currentResponseId = hostResponseIds.get(hostname);
HeartBeatResponse response;
if (currentResponseId == null) {
//Server restarted, or unknown host.
LOG.error("CurrentResponseId unknown for " + hostname + " - send register command");
return createRegisterCommand();
}
LOG.debug("Received heartbeat from host"
+ ", hostname=" + hostname
+ ", currentResponseId=" + currentResponseId
+ ", receivedResponseId=" + heartbeat.getResponseId());
if (heartbeat.getResponseId() == currentResponseId - 1) {
LOG.warn("Old responseId received - response was lost - returning cached response");
return hostResponses.get(hostname);
} else if (heartbeat.getResponseId() != currentResponseId) {
LOG.error("Error in responseId sequence - sending agent restart command");
return createRestartCommand(currentResponseId);
}
response = new HeartBeatResponse();
response.setResponseId(++currentResponseId);
Host hostObject = clusterFsm.getHost(hostname);
if (hostObject.getState().equals(HostState.HEARTBEAT_LOST)) {
// After loosing heartbeat agent should reregister
LOG.warn("Host is in HEARTBEAT_LOST state - sending register command");
return createRegisterCommand();
}
hostResponseIds.put(hostname, currentResponseId);
hostResponses.put(hostname, response);
HostState hostState = hostObject.getState();
// If the host is waiting for component status updates, notify it
if (heartbeat.componentStatus.size() > 0
&& hostObject.getState().equals(HostState.WAITING_FOR_HOST_STATUS_UPDATES)) {
try {
LOG.debug("Got component status updates");
hostObject.handleEvent(new HostStatusUpdatesReceivedEvent(hostname, now));
} catch (InvalidStateTransitionException e) {
LOG.warn("Failed to notify the host about component status updates", e);
}
}
try {
if (heartbeat.getNodeStatus().getStatus().equals(HostStatus.Status.HEALTHY)) {
hostObject.handleEvent(new HostHealthyHeartbeatEvent(hostname, now,
heartbeat.getAgentEnv(), heartbeat.getMounts()));
} else {
hostObject.handleEvent(new HostUnhealthyHeartbeatEvent(hostname, now,
null));
}
} catch (InvalidStateTransitionException ex) {
LOG.warn("Asking agent to reregister due to " + ex.getMessage(), ex);
hostObject.setState(HostState.INIT);
return createRegisterCommand();
}
//Examine heartbeat for command reports
processCommandReports(heartbeat, hostname, clusterFsm, now);
// Examine heartbeart for component live status reports
processStatusReports(heartbeat, hostname, clusterFsm);
// Calculate host status
// NOTE: This step must be after processing command/status reports
processHostStatus(heartbeat, hostname);
// Send commands if node is active
if (hostObject.getState().equals(HostState.HEALTHY)) {
sendCommands(hostname, response);
annotateResponse(hostname, response);
}
return response;
}
protected void processHostStatus(HeartBeat heartbeat, String hostname) throws AmbariException {
Host host = clusterFsm.getHost(hostname);
HealthStatus healthStatus = host.getHealthStatus().getHealthStatus();
if (!healthStatus.equals(HostHealthStatus.HealthStatus.UNKNOWN)) {
List<ComponentStatus> componentStatuses = heartbeat.getComponentStatus();
//Host status info could be calculated only if agent returned statuses in heartbeat
//Or, if a command is executed that can change component status
boolean calculateHostStatus = false;
String clusterName = null;
if (componentStatuses.size() > 0) {
calculateHostStatus = true;
for (ComponentStatus componentStatus : componentStatuses) {
clusterName = componentStatus.getClusterName();
break;
}
}
if (!calculateHostStatus) {
List<CommandReport> reports = heartbeat.getReports();
for (CommandReport report : reports) {
if (RoleCommand.ACTIONEXECUTE.toString().equals(report.getRoleCommand())) {
continue;
}
String service = report.getServiceName();
if (actionMetadata.getActions(service.toLowerCase()).contains(report.getRole())) {
continue;
}
if (report.getStatus().equals("COMPLETED")) {
calculateHostStatus = true;
clusterName = report.getClusterName();
break;
}
}
}
if (calculateHostStatus) {
//Use actual component status to compute the host status
int masterCount = 0;
int mastersRunning = 0;
int slaveCount = 0;
int slavesRunning = 0;
StackId stackId;
Cluster cluster = clusterFsm.getCluster(clusterName);
stackId = cluster.getDesiredStackVersion();
MaintenanceStateHelper psh = injector.getInstance(MaintenanceStateHelper.class);
List<ServiceComponentHost> scHosts = cluster.getServiceComponentHosts(heartbeat.getHostname());
for (ServiceComponentHost scHost : scHosts) {
ComponentInfo componentInfo =
ambariMetaInfo.getComponent(stackId.getStackName(),
stackId.getStackVersion(), scHost.getServiceName(),
scHost.getServiceComponentName());
String status = scHost.getState().name();
String category = componentInfo.getCategory();
if (MaintenanceState.OFF == psh.getEffectiveState(scHost)) {
if (category.equals("MASTER")) {
++masterCount;
if (status.equals("STARTED")) {
++mastersRunning;
}
} else if (category.equals("SLAVE")) {
++slaveCount;
if (status.equals("STARTED")) {
++slavesRunning;
}
}
}
}
if (masterCount == mastersRunning && slaveCount == slavesRunning) {
healthStatus = HostHealthStatus.HealthStatus.HEALTHY;
} else if (masterCount > 0 && mastersRunning < masterCount) {
healthStatus = HostHealthStatus.HealthStatus.UNHEALTHY;
} else {
healthStatus = HostHealthStatus.HealthStatus.ALERT;
}
host.setStatus(healthStatus.name());
host.persist();
}
//If host doesn't belongs to any cluster
if ((clusterFsm.getClustersForHost(host.getHostName())).size() == 0) {
healthStatus = HostHealthStatus.HealthStatus.HEALTHY;
host.setStatus(healthStatus.name());
host.persist();
}
}
}
protected void processCommandReports(
HeartBeat heartbeat, String hostname, Clusters clusterFsm, long now)
throws AmbariException {
List<CommandReport> reports = heartbeat.getReports();
for (CommandReport report : reports) {
LOG.debug("Received command report: " + report);
if (RoleCommand.ACTIONEXECUTE.toString().equals(report.getRoleCommand()) ||
RoleCommand.CUSTOM_COMMAND.toString().equals(report.getRoleCommand())) {
continue;
}
Cluster cl = clusterFsm.getCluster(report.getClusterName());
String service = report.getServiceName();
if (service == null || service.isEmpty()) {
throw new AmbariException("Invalid command report, service: " + service);
}
if (actionMetadata.getActions(service.toLowerCase()).contains(report.getRole())) {
LOG.debug(report.getRole() + " is an action - skip component lookup");
} else {
try {
Service svc = cl.getService(service);
ServiceComponent svcComp = svc.getServiceComponent(report.getRole());
ServiceComponentHost scHost = svcComp.getServiceComponentHost(hostname);
String schName = scHost.getServiceComponentName();
State state = scHost.getState();
if (report.getStatus().equals("COMPLETED")) {
// Updating stack version, if needed
if (scHost.getState().equals(State.UPGRADING)) {
scHost.setStackVersion(scHost.getDesiredStackVersion());
} else if (report.getRoleCommand().equals(RoleCommand.START.toString())
&& null != report.getConfigurationTags()
&& !report.getConfigurationTags().isEmpty()) {
LOG.info("Updating applied config on service " + scHost.getServiceName() +
", component " + scHost.getServiceComponentName() + ", host " + scHost.getHostName());
scHost.updateActualConfigs(report.getConfigurationTags());
}
if (RoleCommand.START.toString().equals(report.getRoleCommand())) {
scHost.handleEvent(new ServiceComponentHostStartedEvent(schName,
hostname, now));
} else if (RoleCommand.STOP.toString().equals(report.getRoleCommand())) {
scHost.handleEvent(new ServiceComponentHostStoppedEvent(schName,
hostname, now));
} else {
scHost.handleEvent(new ServiceComponentHostOpSucceededEvent(schName,
hostname, now));
}
} else if (report.getStatus().equals("FAILED")) {
LOG.warn("Operation failed - may be retried. Service component host: "
+ schName + ", host: " + hostname + " Action id" + report.getActionId());
if (actionManager.isInProgressCommand(report)) {
scHost.handleEvent(new ServiceComponentHostOpFailedEvent
(schName, hostname, now));
} else {
LOG.info("Received report for a command that is no longer active. " + report);
}
} else if (report.getStatus().equals("IN_PROGRESS")) {
scHost.handleEvent(new ServiceComponentHostOpInProgressEvent(schName,
hostname, now));
}
} catch (ServiceComponentNotFoundException scnex) {
LOG.warn("Service component not found ", scnex);
} catch (InvalidStateTransitionException ex) {
LOG.warn("State machine exception", ex);
}
}
}
//Update state machines from reports
actionManager.processTaskResponse(hostname, reports);
}
protected void processStatusReports(HeartBeat heartbeat,
String hostname,
Clusters clusterFsm)
throws AmbariException {
Set<Cluster> clusters = clusterFsm.getClustersForHost(hostname);
for (Cluster cl : clusters) {
for (ComponentStatus status : heartbeat.componentStatus) {
if (status.getClusterName().equals(cl.getClusterName())) {
try {
Service svc = cl.getService(status.getServiceName());
String componentName = status.getComponentName();
if (svc.getServiceComponents().containsKey(componentName)) {
ServiceComponent svcComp = svc.getServiceComponent(
componentName);
ServiceComponentHost scHost = svcComp.getServiceComponentHost(
hostname);
State prevState = scHost.getState();
State liveState = State.valueOf(State.class, status.getStatus());
if (prevState.equals(State.INSTALLED)
|| prevState.equals(State.STARTED)
|| prevState.equals(State.STARTING)
|| prevState.equals(State.STOPPING)
|| prevState.equals(State.UNKNOWN)) {
scHost.setState(liveState);
if (!prevState.equals(liveState)) {
LOG.info("State of service component " + componentName
+ " of service " + status.getServiceName()
+ " of cluster " + status.getClusterName()
+ " has changed from " + prevState + " to " + liveState
+ " at host " + hostname);
}
}
if (null != status.getStackVersion() && !status.getStackVersion().isEmpty()) {
scHost.setStackVersion(gson.fromJson(status.getStackVersion(), StackId.class));
}
if (null != status.getConfigTags()) {
scHost.updateActualConfigs(status.getConfigTags());
}
} else {
// TODO: What should be done otherwise?
}
} catch (ServiceNotFoundException e) {
LOG.warn("Received a live status update for a non-initialized"
+ " service"
+ ", clusterName=" + status.getClusterName()
+ ", serviceName=" + status.getServiceName());
// FIXME ignore invalid live update and continue for now?
continue;
} catch (ServiceComponentNotFoundException e) {
LOG.warn("Received a live status update for a non-initialized"
+ " servicecomponent"
+ ", clusterName=" + status.getClusterName()
+ ", serviceName=" + status.getServiceName()
+ ", componentName=" + status.getComponentName());
// FIXME ignore invalid live update and continue for now?
continue;
} catch (ServiceComponentHostNotFoundException e) {
LOG.warn("Received a live status update for a non-initialized"
+ " service"
+ ", clusterName=" + status.getClusterName()
+ ", serviceName=" + status.getServiceName()
+ ", componentName=" + status.getComponentName()
+ ", hostname=" + hostname);
// FIXME ignore invalid live update and continue for now?
continue;
} catch (RuntimeException e) {
LOG.warn("Received a live status with invalid payload"
+ " service"
+ ", clusterName=" + status.getClusterName()
+ ", serviceName=" + status.getServiceName()
+ ", componentName=" + status.getComponentName()
+ ", hostname=" + hostname
+ ", error=" + e.getMessage());
continue;
}
}
}
}
}
/**
* Adds commands from action queue to a heartbeat responce
*/
protected void sendCommands(String hostname, HeartBeatResponse response)
throws AmbariException {
List<AgentCommand> cmds = actionQueue.dequeueAll(hostname);
if (cmds != null && !cmds.isEmpty()) {
for (AgentCommand ac : cmds) {
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Sending command string = " + StageUtils.jaxbToString(ac));
}
} catch (Exception e) {
throw new AmbariException("Could not get jaxb string for command", e);
}
switch (ac.getCommandType()) {
case EXECUTION_COMMAND: {
response.addExecutionCommand((ExecutionCommand) ac);
break;
}
case STATUS_COMMAND: {
response.addStatusCommand((StatusCommand) ac);
break;
}
default:
LOG.error("There is no action for agent command =" +
ac.getCommandType().name());
}
}
}
}
public String getOsType(String os, String osRelease) {
String osType = "";
if (os != null) {
osType = os;
}
if (osRelease != null) {
String[] release = osRelease.split("\\.");
if (release.length > 0) {
osType += release[0];
}
}
return osType.toLowerCase();
}
protected HeartBeatResponse createRegisterCommand() {
HeartBeatResponse response = new HeartBeatResponse();
RegistrationCommand regCmd = new RegistrationCommand();
response.setResponseId(0);
response.setRegistrationCommand(regCmd);
return response;
}
protected HeartBeatResponse createRestartCommand(Long currentResponseId) {
HeartBeatResponse response = new HeartBeatResponse();
response.setRestartAgent(true);
response.setResponseId(currentResponseId);
return response;
}
public RegistrationResponse handleRegistration(Register register)
throws InvalidStateTransitionException, AmbariException {
String hostname = register.getHostname();
int currentPingPort = register.getCurrentPingPort();
long now = System.currentTimeMillis();
String agentVersion = register.getAgentVersion();
String serverVersion = ambariMetaInfo.getServerVersion();
if (!VersionUtils.areVersionsEqual(serverVersion, agentVersion, true)) {
LOG.warn("Received registration request from host with non compatible"
+ " agent version"
+ ", hostname=" + hostname
+ ", agentVersion=" + agentVersion
+ ", serverVersion=" + serverVersion);
throw new AmbariException("Cannot register host with non compatible"
+ " agent version"
+ ", hostname=" + hostname
+ ", agentVersion=" + agentVersion
+ ", serverVersion=" + serverVersion);
}
String agentOsType = getOsType(register.getHardwareProfile().getOS(),
register.getHardwareProfile().getOSRelease());
if (!ambariMetaInfo.isOsSupported(agentOsType)) {
LOG.warn("Received registration request from host with not supported"
+ " os type"
+ ", hostname=" + hostname
+ ", serverOsType=" + config.getServerOsType()
+ ", agentOstype=" + agentOsType);
throw new AmbariException("Cannot register host with not supported"
+ " os type"
+ ", hostname=" + hostname
+ ", serverOsType=" + config.getServerOsType()
+ ", agentOstype=" + agentOsType);
}
Host hostObject;
try {
hostObject = clusterFsm.getHost(hostname);
} catch (HostNotFoundException ex) {
clusterFsm.addHost(hostname);
hostObject = clusterFsm.getHost(hostname);
}
// Resetting host state
hostObject.setState(HostState.INIT);
// Set ping port for agent
hostObject.setCurrentPingPort(currentPingPort);
// Get status of service components
List<StatusCommand> cmds = heartbeatMonitor.generateStatusCommands(hostname);
hostObject.handleEvent(new HostRegistrationRequestEvent(hostname,
null != register.getPublicHostname() ? register.getPublicHostname() : hostname,
new AgentVersion(register.getAgentVersion()), now, register.getHardwareProfile(),
register.getAgentEnv()));
RegistrationResponse response = new RegistrationResponse();
if (cmds.isEmpty()) {
//No status commands needed let the fsm know that status step is done
hostObject.handleEvent(new HostStatusUpdatesReceivedEvent(hostname,
now));
}
response.setStatusCommands(cmds);
response.setResponseStatus(RegistrationStatus.OK);
Long requestId = 0L;
hostResponseIds.put(hostname, requestId);
response.setResponseId(requestId);
return response;
}
/**
* Annotate the response with some housekeeping details.
* hasMappedComponents - indicates if any components are mapped to the host
* @param hostname
* @param response
* @throws AmbariException
*/
private void annotateResponse(String hostname, HeartBeatResponse response) throws AmbariException {
for (Cluster cl : this.clusterFsm.getClustersForHost(hostname)) {
List<ServiceComponentHost> scHosts = cl.getServiceComponentHosts(hostname);
if (scHosts != null && scHosts.size() > 0) {
response.setHasMappedComponents(true);
break;
}
}
}
}
| ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.agent;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.HostNotFoundException;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.ServiceComponentHostNotFoundException;
import org.apache.ambari.server.ServiceComponentNotFoundException;
import org.apache.ambari.server.ServiceNotFoundException;
import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.metadata.ActionMetadata;
import org.apache.ambari.server.state.AgentVersion;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostHealthStatus;
import org.apache.ambari.server.state.HostHealthStatus.HealthStatus;
import org.apache.ambari.server.state.HostState;
import org.apache.ambari.server.state.MaintenanceState;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.State;
import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
import org.apache.ambari.server.state.host.HostHealthyHeartbeatEvent;
import org.apache.ambari.server.state.host.HostRegistrationRequestEvent;
import org.apache.ambari.server.state.host.HostStatusUpdatesReceivedEvent;
import org.apache.ambari.server.state.host.HostUnhealthyHeartbeatEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpFailedEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpSucceededEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartedEvent;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStoppedEvent;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.ambari.server.utils.VersionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.gson.Gson;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Singleton;
/**
* This class handles the heartbeats coming from the agent, passes on the information
* to other modules and processes the queue to send heartbeat response.
*/
@Singleton
public class HeartBeatHandler {
private static Log LOG = LogFactory.getLog(HeartBeatHandler.class);
private final Clusters clusterFsm;
private final ActionQueue actionQueue;
private final ActionManager actionManager;
@Inject
Injector injector;
@Inject
Configuration config;
@Inject
AmbariMetaInfo ambariMetaInfo;
@Inject
ActionMetadata actionMetadata;
private HeartbeatMonitor heartbeatMonitor;
@Inject
private Gson gson;
private Map<String, Long> hostResponseIds = new ConcurrentHashMap<String, Long>();
private Map<String, HeartBeatResponse> hostResponses = new ConcurrentHashMap<String, HeartBeatResponse>();
@Inject
public HeartBeatHandler(Clusters fsm, ActionQueue aq, ActionManager am,
Injector injector) {
this.clusterFsm = fsm;
this.actionQueue = aq;
this.actionManager = am;
this.heartbeatMonitor = new HeartbeatMonitor(fsm, aq, am, 60000, injector);
injector.injectMembers(this);
}
public void start() {
heartbeatMonitor.start();
}
void setHeartbeatMonitor(HeartbeatMonitor heartbeatMonitor) {
this.heartbeatMonitor = heartbeatMonitor;
}
public HeartBeatResponse handleHeartBeat(HeartBeat heartbeat)
throws AmbariException {
long now = System.currentTimeMillis();
if(heartbeat.getAgentEnv() != null && heartbeat.getAgentEnv().getHostHealth() != null) {
heartbeat.getAgentEnv().getHostHealth().setServerTimeStampAtReporting(now);
}
String hostname = heartbeat.getHostname();
Long currentResponseId = hostResponseIds.get(hostname);
HeartBeatResponse response;
if (currentResponseId == null) {
//Server restarted, or unknown host.
LOG.error("CurrentResponseId unknown for " + hostname + " - send register command");
return createRegisterCommand();
}
LOG.debug("Received heartbeat from host"
+ ", hostname=" + hostname
+ ", currentResponseId=" + currentResponseId
+ ", receivedResponseId=" + heartbeat.getResponseId());
if (heartbeat.getResponseId() == currentResponseId - 1) {
LOG.warn("Old responseId received - response was lost - returning cached response");
return hostResponses.get(hostname);
} else if (heartbeat.getResponseId() != currentResponseId) {
LOG.error("Error in responseId sequence - sending agent restart command");
return createRestartCommand(currentResponseId);
}
response = new HeartBeatResponse();
response.setResponseId(++currentResponseId);
Host hostObject = clusterFsm.getHost(hostname);
if (hostObject.getState().equals(HostState.HEARTBEAT_LOST)) {
// After loosing heartbeat agent should reregister
LOG.warn("Host is in HEARTBEAT_LOST state - sending register command");
return createRegisterCommand();
}
hostResponseIds.put(hostname, currentResponseId);
hostResponses.put(hostname, response);
HostState hostState = hostObject.getState();
// If the host is waiting for component status updates, notify it
if (heartbeat.componentStatus.size() > 0
&& hostObject.getState().equals(HostState.WAITING_FOR_HOST_STATUS_UPDATES)) {
try {
LOG.debug("Got component status updates");
hostObject.handleEvent(new HostStatusUpdatesReceivedEvent(hostname, now));
} catch (InvalidStateTransitionException e) {
LOG.warn("Failed to notify the host about component status updates", e);
}
}
try {
if (heartbeat.getNodeStatus().getStatus().equals(HostStatus.Status.HEALTHY)) {
hostObject.handleEvent(new HostHealthyHeartbeatEvent(hostname, now,
heartbeat.getAgentEnv(), heartbeat.getMounts()));
} else {
hostObject.handleEvent(new HostUnhealthyHeartbeatEvent(hostname, now,
null));
}
} catch (InvalidStateTransitionException ex) {
LOG.warn("Asking agent to reregister due to " + ex.getMessage(), ex);
hostObject.setState(HostState.INIT);
return createRegisterCommand();
}
//Examine heartbeat for command reports
processCommandReports(heartbeat, hostname, clusterFsm, now);
// Examine heartbeart for component live status reports
processStatusReports(heartbeat, hostname, clusterFsm);
// Calculate host status
// NOTE: This step must be after processing command/status reports
processHostStatus(heartbeat, hostname);
// Send commands if node is active
if (hostObject.getState().equals(HostState.HEALTHY)) {
sendCommands(hostname, response);
annotateResponse(hostname, response);
}
return response;
}
protected void processHostStatus(HeartBeat heartbeat, String hostname) throws AmbariException {
Host host = clusterFsm.getHost(hostname);
HealthStatus healthStatus = host.getHealthStatus().getHealthStatus();
if (!healthStatus.equals(HostHealthStatus.HealthStatus.UNKNOWN)) {
List<ComponentStatus> componentStatuses = heartbeat.getComponentStatus();
//Host status info could be calculated only if agent returned statuses in heartbeat
//Or, if a command is executed that can change component status
boolean calculateHostStatus = false;
String clusterName = null;
if (componentStatuses.size() > 0) {
calculateHostStatus = true;
for (ComponentStatus componentStatus : componentStatuses) {
clusterName = componentStatus.getClusterName();
break;
}
}
if (!calculateHostStatus) {
List<CommandReport> reports = heartbeat.getReports();
for (CommandReport report : reports) {
if (RoleCommand.ACTIONEXECUTE.toString().equals(report.getRoleCommand())) {
continue;
}
String service = report.getServiceName();
if (actionMetadata.getActions(service.toLowerCase()).contains(report.getRole())) {
continue;
}
if (report.getStatus().equals("COMPLETED")) {
calculateHostStatus = true;
clusterName = report.getClusterName();
break;
}
}
}
if (calculateHostStatus) {
//Use actual component status to compute the host status
int masterCount = 0;
int mastersRunning = 0;
int slaveCount = 0;
int slavesRunning = 0;
StackId stackId;
Cluster cluster = clusterFsm.getCluster(clusterName);
stackId = cluster.getDesiredStackVersion();
MaintenanceStateHelper psh = injector.getInstance(MaintenanceStateHelper.class);
List<ServiceComponentHost> scHosts = cluster.getServiceComponentHosts(heartbeat.getHostname());
for (ServiceComponentHost scHost : scHosts) {
ComponentInfo componentInfo =
ambariMetaInfo.getComponent(stackId.getStackName(),
stackId.getStackVersion(), scHost.getServiceName(),
scHost.getServiceComponentName());
String status = scHost.getState().name();
String category = componentInfo.getCategory();
if (MaintenanceState.OFF == psh.getEffectiveState(scHost)) {
if (category.equals("MASTER")) {
++masterCount;
if (status.equals("STARTED")) {
++mastersRunning;
}
} else if (category.equals("SLAVE")) {
++slaveCount;
if (status.equals("STARTED")) {
++slavesRunning;
}
}
}
}
if (masterCount == mastersRunning && slaveCount == slavesRunning) {
healthStatus = HostHealthStatus.HealthStatus.HEALTHY;
} else if (masterCount > 0 && mastersRunning < masterCount) {
healthStatus = HostHealthStatus.HealthStatus.UNHEALTHY;
} else {
healthStatus = HostHealthStatus.HealthStatus.ALERT;
}
host.setStatus(healthStatus.name());
host.persist();
}
//If host doesn't belongs to any cluster
if ((clusterFsm.getClustersForHost(host.getHostName())).size() == 0) {
healthStatus = HostHealthStatus.HealthStatus.HEALTHY;
host.setStatus(healthStatus.name());
host.persist();
}
}
}
protected void processCommandReports(
HeartBeat heartbeat, String hostname, Clusters clusterFsm, long now)
throws AmbariException {
List<CommandReport> reports = heartbeat.getReports();
for (CommandReport report : reports) {
LOG.debug("Received command report: " + report);
if (RoleCommand.ACTIONEXECUTE.toString().equals(report.getRoleCommand()) ||
RoleCommand.CUSTOM_COMMAND.toString().equals(report.getRoleCommand())) {
continue;
}
Cluster cl = clusterFsm.getCluster(report.getClusterName());
String service = report.getServiceName();
if (service == null || service.isEmpty()) {
throw new AmbariException("Invalid command report, service: " + service);
}
if (actionMetadata.getActions(service.toLowerCase()).contains(report.getRole())) {
LOG.debug(report.getRole() + " is an action - skip component lookup");
} else {
try {
Service svc = cl.getService(service);
ServiceComponent svcComp = svc.getServiceComponent(report.getRole());
ServiceComponentHost scHost = svcComp.getServiceComponentHost(hostname);
String schName = scHost.getServiceComponentName();
State state = scHost.getState();
if (report.getStatus().equals("COMPLETED")) {
// Updating stack version, if needed
if (scHost.getState().equals(State.UPGRADING)) {
scHost.setStackVersion(scHost.getDesiredStackVersion());
} else if (report.getRoleCommand().equals(RoleCommand.START.toString())
&& null != report.getConfigurationTags()
&& !report.getConfigurationTags().isEmpty()) {
LOG.info("Updating applied config on service " + scHost.getServiceName() +
", component " + scHost.getServiceComponentName() + ", host " + scHost.getHostName());
scHost.updateActualConfigs(report.getConfigurationTags());
}
if (RoleCommand.START.toString().equals(report.getRoleCommand())) {
scHost.handleEvent(new ServiceComponentHostStartedEvent(schName,
hostname, now));
} else if (RoleCommand.STOP.toString().equals(report.getRoleCommand())) {
scHost.handleEvent(new ServiceComponentHostStoppedEvent(schName,
hostname, now));
} else {
scHost.handleEvent(new ServiceComponentHostOpSucceededEvent(schName,
hostname, now));
}
} else if (report.getStatus().equals("FAILED")) {
LOG.warn("Operation failed - may be retried. Service component host: "
+ schName + ", host: " + hostname + " Action id" + report.getActionId());
if (actionManager.isInProgressCommand(report)) {
scHost.handleEvent(new ServiceComponentHostOpFailedEvent
(schName, hostname, now));
} else {
LOG.info("Report arrived after command is no longer running. " +
"Ignoring report. " + report);
}
} else if (report.getStatus().equals("IN_PROGRESS")) {
scHost.handleEvent(new ServiceComponentHostOpInProgressEvent(schName,
hostname, now));
}
} catch (ServiceComponentNotFoundException scnex) {
LOG.warn("Service component not found ", scnex);
} catch (InvalidStateTransitionException ex) {
LOG.warn("State machine exception", ex);
}
}
}
//Update state machines from reports
actionManager.processTaskResponse(hostname, reports);
}
protected void processStatusReports(HeartBeat heartbeat,
String hostname,
Clusters clusterFsm)
throws AmbariException {
Set<Cluster> clusters = clusterFsm.getClustersForHost(hostname);
for (Cluster cl : clusters) {
for (ComponentStatus status : heartbeat.componentStatus) {
if (status.getClusterName().equals(cl.getClusterName())) {
try {
Service svc = cl.getService(status.getServiceName());
String componentName = status.getComponentName();
if (svc.getServiceComponents().containsKey(componentName)) {
ServiceComponent svcComp = svc.getServiceComponent(
componentName);
ServiceComponentHost scHost = svcComp.getServiceComponentHost(
hostname);
State prevState = scHost.getState();
State liveState = State.valueOf(State.class, status.getStatus());
if (prevState.equals(State.INSTALLED)
|| prevState.equals(State.STARTED)
|| prevState.equals(State.STARTING)
|| prevState.equals(State.STOPPING)
|| prevState.equals(State.UNKNOWN)) {
scHost.setState(liveState);
if (!prevState.equals(liveState)) {
LOG.info("State of service component " + componentName
+ " of service " + status.getServiceName()
+ " of cluster " + status.getClusterName()
+ " has changed from " + prevState + " to " + liveState
+ " at host " + hostname);
}
}
if (null != status.getStackVersion() && !status.getStackVersion().isEmpty()) {
scHost.setStackVersion(gson.fromJson(status.getStackVersion(), StackId.class));
}
if (null != status.getConfigTags()) {
scHost.updateActualConfigs(status.getConfigTags());
}
} else {
// TODO: What should be done otherwise?
}
} catch (ServiceNotFoundException e) {
LOG.warn("Received a live status update for a non-initialized"
+ " service"
+ ", clusterName=" + status.getClusterName()
+ ", serviceName=" + status.getServiceName());
// FIXME ignore invalid live update and continue for now?
continue;
} catch (ServiceComponentNotFoundException e) {
LOG.warn("Received a live status update for a non-initialized"
+ " servicecomponent"
+ ", clusterName=" + status.getClusterName()
+ ", serviceName=" + status.getServiceName()
+ ", componentName=" + status.getComponentName());
// FIXME ignore invalid live update and continue for now?
continue;
} catch (ServiceComponentHostNotFoundException e) {
LOG.warn("Received a live status update for a non-initialized"
+ " service"
+ ", clusterName=" + status.getClusterName()
+ ", serviceName=" + status.getServiceName()
+ ", componentName=" + status.getComponentName()
+ ", hostname=" + hostname);
// FIXME ignore invalid live update and continue for now?
continue;
} catch (RuntimeException e) {
LOG.warn("Received a live status with invalid payload"
+ " service"
+ ", clusterName=" + status.getClusterName()
+ ", serviceName=" + status.getServiceName()
+ ", componentName=" + status.getComponentName()
+ ", hostname=" + hostname
+ ", error=" + e.getMessage());
continue;
}
}
}
}
}
/**
* Adds commands from action queue to a heartbeat responce
*/
protected void sendCommands(String hostname, HeartBeatResponse response)
throws AmbariException {
List<AgentCommand> cmds = actionQueue.dequeueAll(hostname);
if (cmds != null && !cmds.isEmpty()) {
for (AgentCommand ac : cmds) {
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Sending command string = " + StageUtils.jaxbToString(ac));
}
} catch (Exception e) {
throw new AmbariException("Could not get jaxb string for command", e);
}
switch (ac.getCommandType()) {
case EXECUTION_COMMAND: {
response.addExecutionCommand((ExecutionCommand) ac);
break;
}
case STATUS_COMMAND: {
response.addStatusCommand((StatusCommand) ac);
break;
}
default:
LOG.error("There is no action for agent command =" +
ac.getCommandType().name());
}
}
}
}
public String getOsType(String os, String osRelease) {
String osType = "";
if (os != null) {
osType = os;
}
if (osRelease != null) {
String[] release = osRelease.split("\\.");
if (release.length > 0) {
osType += release[0];
}
}
return osType.toLowerCase();
}
protected HeartBeatResponse createRegisterCommand() {
HeartBeatResponse response = new HeartBeatResponse();
RegistrationCommand regCmd = new RegistrationCommand();
response.setResponseId(0);
response.setRegistrationCommand(regCmd);
return response;
}
protected HeartBeatResponse createRestartCommand(Long currentResponseId) {
HeartBeatResponse response = new HeartBeatResponse();
response.setRestartAgent(true);
response.setResponseId(currentResponseId);
return response;
}
public RegistrationResponse handleRegistration(Register register)
throws InvalidStateTransitionException, AmbariException {
String hostname = register.getHostname();
int currentPingPort = register.getCurrentPingPort();
long now = System.currentTimeMillis();
String agentVersion = register.getAgentVersion();
String serverVersion = ambariMetaInfo.getServerVersion();
if (!VersionUtils.areVersionsEqual(serverVersion, agentVersion, true)) {
LOG.warn("Received registration request from host with non compatible"
+ " agent version"
+ ", hostname=" + hostname
+ ", agentVersion=" + agentVersion
+ ", serverVersion=" + serverVersion);
throw new AmbariException("Cannot register host with non compatible"
+ " agent version"
+ ", hostname=" + hostname
+ ", agentVersion=" + agentVersion
+ ", serverVersion=" + serverVersion);
}
String agentOsType = getOsType(register.getHardwareProfile().getOS(),
register.getHardwareProfile().getOSRelease());
if (!ambariMetaInfo.isOsSupported(agentOsType)) {
LOG.warn("Received registration request from host with not supported"
+ " os type"
+ ", hostname=" + hostname
+ ", serverOsType=" + config.getServerOsType()
+ ", agentOstype=" + agentOsType);
throw new AmbariException("Cannot register host with not supported"
+ " os type"
+ ", hostname=" + hostname
+ ", serverOsType=" + config.getServerOsType()
+ ", agentOstype=" + agentOsType);
}
Host hostObject;
try {
hostObject = clusterFsm.getHost(hostname);
} catch (HostNotFoundException ex) {
clusterFsm.addHost(hostname);
hostObject = clusterFsm.getHost(hostname);
}
// Resetting host state
hostObject.setState(HostState.INIT);
// Set ping port for agent
hostObject.setCurrentPingPort(currentPingPort);
// Get status of service components
List<StatusCommand> cmds = heartbeatMonitor.generateStatusCommands(hostname);
hostObject.handleEvent(new HostRegistrationRequestEvent(hostname,
null != register.getPublicHostname() ? register.getPublicHostname() : hostname,
new AgentVersion(register.getAgentVersion()), now, register.getHardwareProfile(),
register.getAgentEnv()));
RegistrationResponse response = new RegistrationResponse();
if (cmds.isEmpty()) {
//No status commands needed let the fsm know that status step is done
hostObject.handleEvent(new HostStatusUpdatesReceivedEvent(hostname,
now));
}
response.setStatusCommands(cmds);
response.setResponseStatus(RegistrationStatus.OK);
Long requestId = 0L;
hostResponseIds.put(hostname, requestId);
response.setResponseId(requestId);
return response;
}
/**
* Annotate the response with some housekeeping details.
* hasMappedComponents - indicates if any components are mapped to the host
* @param hostname
* @param response
* @throws AmbariException
*/
private void annotateResponse(String hostname, HeartBeatResponse response) throws AmbariException {
for (Cluster cl : this.clusterFsm.getClustersForHost(hostname)) {
List<ServiceComponentHost> scHosts = cl.getServiceComponentHosts(hostname);
if (scHosts != null && scHosts.size() > 0) {
response.setHasMappedComponents(true);
break;
}
}
}
}
| AMBARI-4878. On a retry all the install commands succeed but the status of one of the components in INSTALL_FAILED which causes the next START command to fail on UI. Fixed log. (swagle)
| ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java | AMBARI-4878. On a retry all the install commands succeed but the status of one of the components in INSTALL_FAILED which causes the next START command to fail on UI. Fixed log. (swagle) |
|
Java | apache-2.0 | 7397fb6be813704436e916eddea11d1b86687706 | 0 | kuujo/copycat,atomix/atomix,kuujo/copycat,atomix/atomix | /*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.cluster.messaging.impl;
import io.atomix.utils.net.Address;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetAddress;
import java.net.ProtocolException;
import java.nio.charset.Charset;
import java.util.List;
import static com.google.common.base.Preconditions.checkState;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
* Decoder for inbound messages.
*/
public class MessageDecoder extends ByteToMessageDecoder {
private static final int VERSION = 1;
private final Logger log = LoggerFactory.getLogger(getClass());
private static final byte[] EMPTY_PAYLOAD = new byte[0];
private static final int BYTE_SIZE = 1;
private static final int SHORT_SIZE = 2;
private static final int INT_SIZE = 4;
private static final int LONG_SIZE = 8;
private DecoderState currentState = DecoderState.READ_SENDER_VERSION;
private InetAddress senderIp;
private int senderPort;
private Address address;
private int version;
private InternalMessage.Type type;
private int preamble;
private long messageId;
private int contentLength;
private byte[] content;
private int subjectLength;
@Override
@SuppressWarnings("squid:S128") // suppress switch fall through warning
protected void decode(
ChannelHandlerContext context,
ByteBuf buffer,
List<Object> out) throws Exception {
switch (currentState) {
case READ_SENDER_VERSION:
if (buffer.readableBytes() < SHORT_SIZE) {
return;
}
version = buffer.readShort();
if (version != VERSION) {
throw new ProtocolException("Unsupported protocol version: " + version);
}
currentState = DecoderState.READ_SENDER_IP;
case READ_SENDER_IP:
if (buffer.readableBytes() < BYTE_SIZE) {
return;
}
buffer.markReaderIndex();
int octetsLength = buffer.readByte();
if (buffer.readableBytes() < octetsLength) {
buffer.resetReaderIndex();
return;
}
byte[] octets = new byte[octetsLength];
buffer.readBytes(octets);
senderIp = InetAddress.getByAddress(octets);
currentState = DecoderState.READ_SENDER_PORT;
case READ_SENDER_PORT:
if (buffer.readableBytes() < INT_SIZE) {
return;
}
senderPort = buffer.readInt();
address = new Address(senderIp.getHostName(), senderPort, senderIp);
currentState = DecoderState.READ_TYPE;
case READ_TYPE:
if (buffer.readableBytes() < BYTE_SIZE) {
return;
}
type = InternalMessage.Type.forId(buffer.readByte());
currentState = DecoderState.READ_PREAMBLE;
case READ_PREAMBLE:
if (buffer.readableBytes() < INT_SIZE) {
return;
}
preamble = buffer.readInt();
currentState = DecoderState.READ_MESSAGE_ID;
case READ_MESSAGE_ID:
if (buffer.readableBytes() < LONG_SIZE) {
return;
}
messageId = buffer.readLong();
currentState = DecoderState.READ_CONTENT_LENGTH;
case READ_CONTENT_LENGTH:
if (buffer.readableBytes() < INT_SIZE) {
return;
}
contentLength = buffer.readInt();
currentState = DecoderState.READ_CONTENT;
case READ_CONTENT:
if (buffer.readableBytes() < contentLength) {
return;
}
if (contentLength > 0) {
// TODO: Perform a sanity check on the size before allocating
content = new byte[contentLength];
buffer.readBytes(content);
} else {
content = EMPTY_PAYLOAD;
}
switch (type) {
case REQUEST:
currentState = DecoderState.READ_SUBJECT_LENGTH;
break;
case REPLY:
currentState = DecoderState.READ_STATUS;
break;
default:
checkState(false, "Must not be here");
}
break;
default:
break;
}
switch (type) {
case REQUEST:
switch (currentState) {
case READ_SUBJECT_LENGTH:
if (buffer.readableBytes() < SHORT_SIZE) {
return;
}
subjectLength = buffer.readShort();
currentState = DecoderState.READ_SUBJECT;
case READ_SUBJECT:
if (buffer.readableBytes() < subjectLength) {
return;
}
final String subject = readString(buffer, subjectLength, UTF_8);
InternalRequest message = new InternalRequest(
preamble,
messageId,
address,
subject,
content);
out.add(message);
currentState = DecoderState.READ_TYPE;
break;
default:
break;
}
break;
case REPLY:
switch (currentState) {
case READ_STATUS:
if (buffer.readableBytes() < BYTE_SIZE) {
return;
}
InternalReply.Status status = InternalReply.Status.forId(buffer.readByte());
InternalReply message = new InternalReply(preamble,
messageId,
content,
status);
out.add(message);
currentState = DecoderState.READ_TYPE;
break;
default:
break;
}
break;
default:
checkState(false, "Must not be here");
}
}
static String readString(ByteBuf buffer, int length, Charset charset) {
if (buffer.isDirect()) {
final String result = buffer.toString(buffer.readerIndex(), length, charset);
buffer.skipBytes(length);
return result;
} else if (buffer.hasArray()) {
final String result = new String(buffer.array(), buffer.arrayOffset() + buffer.readerIndex(), length, charset);
buffer.skipBytes(length);
return result;
} else {
final byte[] array = new byte[length];
buffer.readBytes(array);
return new String(array, charset);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext context, Throwable cause) {
try {
log.error("Exception inside channel handling pipeline.", cause);
} finally {
context.close();
}
}
} | cluster/src/main/java/io/atomix/cluster/messaging/impl/MessageDecoder.java | /*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.cluster.messaging.impl;
import io.atomix.utils.net.Address;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetAddress;
import java.nio.charset.Charset;
import java.util.List;
import static com.google.common.base.Preconditions.checkState;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
* Decoder for inbound messages.
*/
public class MessageDecoder extends ByteToMessageDecoder {
private final Logger log = LoggerFactory.getLogger(getClass());
private static final byte[] EMPTY_PAYLOAD = new byte[0];
private static final int BYTE_SIZE = 1;
private static final int SHORT_SIZE = 2;
private static final int INT_SIZE = 4;
private static final int LONG_SIZE = 8;
private DecoderState currentState = DecoderState.READ_SENDER_VERSION;
private InetAddress senderIp;
private int senderPort;
private Address address;
private int version;
private InternalMessage.Type type;
private int preamble;
private long messageId;
private int contentLength;
private byte[] content;
private int subjectLength;
@Override
@SuppressWarnings("squid:S128") // suppress switch fall through warning
protected void decode(
ChannelHandlerContext context,
ByteBuf buffer,
List<Object> out) throws Exception {
switch (currentState) {
case READ_SENDER_VERSION:
if (buffer.readableBytes() < SHORT_SIZE) {
return;
}
version = buffer.readShort();
currentState = DecoderState.READ_SENDER_IP;
case READ_SENDER_IP:
if (buffer.readableBytes() < BYTE_SIZE) {
return;
}
buffer.markReaderIndex();
int octetsLength = buffer.readByte();
if (buffer.readableBytes() < octetsLength) {
buffer.resetReaderIndex();
return;
}
byte[] octets = new byte[octetsLength];
buffer.readBytes(octets);
senderIp = InetAddress.getByAddress(octets);
currentState = DecoderState.READ_SENDER_PORT;
case READ_SENDER_PORT:
if (buffer.readableBytes() < INT_SIZE) {
return;
}
senderPort = buffer.readInt();
address = new Address(senderIp.getHostName(), senderPort, senderIp);
currentState = DecoderState.READ_TYPE;
case READ_TYPE:
if (buffer.readableBytes() < BYTE_SIZE) {
return;
}
type = InternalMessage.Type.forId(buffer.readByte());
currentState = DecoderState.READ_PREAMBLE;
case READ_PREAMBLE:
if (buffer.readableBytes() < INT_SIZE) {
return;
}
preamble = buffer.readInt();
currentState = DecoderState.READ_MESSAGE_ID;
case READ_MESSAGE_ID:
if (buffer.readableBytes() < LONG_SIZE) {
return;
}
messageId = buffer.readLong();
currentState = DecoderState.READ_CONTENT_LENGTH;
case READ_CONTENT_LENGTH:
if (buffer.readableBytes() < INT_SIZE) {
return;
}
contentLength = buffer.readInt();
currentState = DecoderState.READ_CONTENT;
case READ_CONTENT:
if (buffer.readableBytes() < contentLength) {
return;
}
if (contentLength > 0) {
// TODO: Perform a sanity check on the size before allocating
content = new byte[contentLength];
buffer.readBytes(content);
} else {
content = EMPTY_PAYLOAD;
}
switch (type) {
case REQUEST:
currentState = DecoderState.READ_SUBJECT_LENGTH;
break;
case REPLY:
currentState = DecoderState.READ_STATUS;
break;
default:
checkState(false, "Must not be here");
}
break;
default:
break;
}
switch (type) {
case REQUEST:
switch (currentState) {
case READ_SUBJECT_LENGTH:
if (buffer.readableBytes() < SHORT_SIZE) {
return;
}
subjectLength = buffer.readShort();
currentState = DecoderState.READ_SUBJECT;
case READ_SUBJECT:
if (buffer.readableBytes() < subjectLength) {
return;
}
final String subject = readString(buffer, subjectLength, UTF_8);
InternalRequest message = new InternalRequest(
preamble,
messageId,
address,
subject,
content);
out.add(message);
currentState = DecoderState.READ_TYPE;
break;
default:
break;
}
break;
case REPLY:
switch (currentState) {
case READ_STATUS:
if (buffer.readableBytes() < BYTE_SIZE) {
return;
}
InternalReply.Status status = InternalReply.Status.forId(buffer.readByte());
InternalReply message = new InternalReply(preamble,
messageId,
content,
status);
out.add(message);
currentState = DecoderState.READ_TYPE;
break;
default:
break;
}
break;
default:
checkState(false, "Must not be here");
}
}
static String readString(ByteBuf buffer, int length, Charset charset) {
if (buffer.isDirect()) {
final String result = buffer.toString(buffer.readerIndex(), length, charset);
buffer.skipBytes(length);
return result;
} else if (buffer.hasArray()) {
final String result = new String(buffer.array(), buffer.arrayOffset() + buffer.readerIndex(), length, charset);
buffer.skipBytes(length);
return result;
} else {
final byte[] array = new byte[length];
buffer.readBytes(array);
return new String(array, charset);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext context, Throwable cause) {
try {
log.error("Exception inside channel handling pipeline.", cause);
} finally {
context.close();
}
}
} | Throw protocol exception to close connection on invalid version.
| cluster/src/main/java/io/atomix/cluster/messaging/impl/MessageDecoder.java | Throw protocol exception to close connection on invalid version. |
|
Java | apache-2.0 | 557135dd9b73749889bedf407ae957535915d763 | 0 | apache/solr,apache/solr,apache/solr,apache/solr,apache/solr | package org.apache.lucene.util;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.annotation.Documented;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.*;
import org.apache.lucene.index.codecs.Codec;
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.codecs.mockintblock.MockFixedIntBlockCodec;
import org.apache.lucene.index.codecs.mockintblock.MockVariableIntBlockCodec;
import org.apache.lucene.index.codecs.mocksep.MockSepCodec;
import org.apache.lucene.index.codecs.mockrandom.MockRandomCodec;
import org.apache.lucene.index.codecs.preflex.PreFlexCodec;
import org.apache.lucene.index.codecs.preflexrw.PreFlexRWCodec;
import org.apache.lucene.index.codecs.pulsing.PulsingCodec;
import org.apache.lucene.index.codecs.simpletext.SimpleTextCodec;
import org.apache.lucene.index.codecs.standard.StandardCodec;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldCache.CacheEntry;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockFactory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
import org.apache.lucene.util.FieldCacheSanityChecker.Insanity;
import org.junit.*;
import org.junit.rules.TestWatchman;
import org.junit.runner.Description;
import org.junit.runner.RunWith;
import org.junit.runner.manipulation.Filter;
import org.junit.runner.manipulation.NoTestsRemainException;
import org.junit.runner.notification.Failure;
import org.junit.runner.notification.RunListener;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
/**
* Base class for all Lucene unit tests, Junit3 or Junit4 variant.
* <p>
* </p>
* <p>
* If you
* override either <code>setUp()</code> or
* <code>tearDown()</code> in your unit test, make sure you
* call <code>super.setUp()</code> and
* <code>super.tearDown()</code>
* </p>
*
* <code>@After</code> - replaces setup
* <code>@Before</code> - replaces teardown
* <code>@Test</code> - any public method with this annotation is a test case, regardless
* of its name
* <p>
* <p>
* See Junit4 <a href="http://junit.org/junit/javadoc/4.7/">documentation</a> for a complete list of features.
* <p>
* Import from org.junit rather than junit.framework.
* <p>
* You should be able to use this class anywhere you used LuceneTestCase
* if you annotate your derived class correctly with the annotations above
* @see #assertSaneFieldCaches(String)
*/
@RunWith(LuceneTestCase.LuceneTestCaseRunner.class)
public abstract class LuceneTestCase extends Assert {
/**
* true iff tests are run in verbose mode. Note: if it is false, tests are not
* expected to print any messages.
*/
public static final boolean VERBOSE = Boolean.getBoolean("tests.verbose");
/** Use this constant when creating Analyzers and any other version-dependent stuff.
* <p><b>NOTE:</b> Change this when development starts for new Lucene version:
*/
public static final Version TEST_VERSION_CURRENT = Version.LUCENE_40;
/**
* If this is set, it is the only method that should run.
*/
static final String TEST_METHOD;
/** Create indexes in this directory, optimally use a subdir, named after the test */
public static final File TEMP_DIR;
static {
String method = System.getProperty("testmethod", "").trim();
TEST_METHOD = method.length() == 0 ? null : method;
String s = System.getProperty("tempDir", System.getProperty("java.io.tmpdir"));
if (s == null)
throw new RuntimeException("To run tests, you need to define system property 'tempDir' or 'java.io.tmpdir'.");
TEMP_DIR = new File(s);
TEMP_DIR.mkdirs();
}
/** set of directories we created, in afterclass we try to clean these up */
private static final Map<File, StackTraceElement[]> tempDirs = Collections.synchronizedMap(new HashMap<File, StackTraceElement[]>());
// by default we randomly pick a different codec for
// each test case (non-J4 tests) and each test class (J4
// tests)
/** Gets the codec to run tests with. */
public static final String TEST_CODEC = System.getProperty("tests.codec", "randomPerField");
/** Gets the codecprovider to run tests with */
public static final String TEST_CODECPROVIDER = System.getProperty("tests.codecprovider", "random");
/** Gets the locale to run tests with */
public static final String TEST_LOCALE = System.getProperty("tests.locale", "random");
/** Gets the timezone to run tests with */
public static final String TEST_TIMEZONE = System.getProperty("tests.timezone", "random");
/** Gets the directory to run tests with */
public static final String TEST_DIRECTORY = System.getProperty("tests.directory", "random");
/** Get the number of times to run tests */
public static final int TEST_ITER = Integer.parseInt(System.getProperty("tests.iter", "1"));
/** Get the minimum number of times to run tests until a failure happens */
public static final int TEST_ITER_MIN = Integer.parseInt(System.getProperty("tests.iter.min", Integer.toString(TEST_ITER)));
/** Get the random seed for tests */
public static final String TEST_SEED = System.getProperty("tests.seed", "random");
/** whether or not nightly tests should run */
public static final boolean TEST_NIGHTLY = Boolean.parseBoolean(System.getProperty("tests.nightly", "false"));
/** the line file used by LineFileDocs */
public static final String TEST_LINE_DOCS_FILE = System.getProperty("tests.linedocsfile", "europarl.lines.txt.gz");
/** whether or not to clean threads between test invocations: "false", "perMethod", "perClass" */
public static final String TEST_CLEAN_THREADS = System.getProperty("tests.cleanthreads", "perClass");
/** whether or not to clean threads between test invocations: "false", "perMethod", "perClass" */
public static final Throttling TEST_THROTTLING = TEST_NIGHTLY ? Throttling.SOMETIMES : Throttling.NEVER;
private static final Pattern codecWithParam = Pattern.compile("(.*)\\(\\s*(\\d+)\\s*\\)");
/**
* A random multiplier which you should use when writing random tests:
* multiply it by the number of iterations
*/
public static final int RANDOM_MULTIPLIER = Integer.parseInt(System.getProperty("tests.multiplier", "1"));
private int savedBoolMaxClauseCount;
private volatile Thread.UncaughtExceptionHandler savedUncaughtExceptionHandler = null;
/** Used to track if setUp and tearDown are called correctly from subclasses */
private static State state = State.INITIAL;
private static enum State {
INITIAL, // no tests ran yet
SETUP, // test has called setUp()
RANTEST, // test is running
TEARDOWN // test has called tearDown()
}
/**
* Some tests expect the directory to contain a single segment, and want to do tests on that segment's reader.
* This is an utility method to help them.
*/
public static SegmentReader getOnlySegmentReader(IndexReader reader) {
if (reader instanceof SegmentReader)
return (SegmentReader) reader;
IndexReader[] subReaders = reader.getSequentialSubReaders();
if (subReaders.length != 1)
throw new IllegalArgumentException(reader + " has " + subReaders.length + " segments instead of exactly one");
return (SegmentReader) subReaders[0];
}
private static class UncaughtExceptionEntry {
public final Thread thread;
public final Throwable exception;
public UncaughtExceptionEntry(Thread thread, Throwable exception) {
this.thread = thread;
this.exception = exception;
}
}
private List<UncaughtExceptionEntry> uncaughtExceptions = Collections.synchronizedList(new ArrayList<UncaughtExceptionEntry>());
// saves default codec: we do this statically as many build indexes in @beforeClass
private static String savedDefaultCodec;
// default codec: not set when we use a per-field provider.
private static Codec codec;
// default codec provider
private static CodecProvider savedCodecProvider;
private static Locale locale;
private static Locale savedLocale;
private static TimeZone timeZone;
private static TimeZone savedTimeZone;
protected static Map<MockDirectoryWrapper,StackTraceElement[]> stores;
private static final String[] TEST_CODECS = new String[] {"MockSep", "MockFixedIntBlock", "MockVariableIntBlock", "MockRandom"};
private static void swapCodec(Codec c, CodecProvider cp) {
Codec prior = null;
try {
prior = cp.lookup(c.name);
} catch (IllegalArgumentException iae) {
}
if (prior != null) {
cp.unregister(prior);
}
cp.register(c);
}
// returns current default codec
static Codec installTestCodecs(String codec, CodecProvider cp) {
savedDefaultCodec = cp.getDefaultFieldCodec();
final boolean codecHasParam;
int codecParam = 0;
if (codec.equals("randomPerField")) {
// lie
codec = "Standard";
codecHasParam = false;
} else if (codec.equals("random")) {
codec = pickRandomCodec(random);
codecHasParam = false;
} else {
Matcher m = codecWithParam.matcher(codec);
if (m.matches()) {
// codec has a fixed param
codecHasParam = true;
codec = m.group(1);
codecParam = Integer.parseInt(m.group(2));
} else {
codecHasParam = false;
}
}
cp.setDefaultFieldCodec(codec);
if (codec.equals("PreFlex")) {
// If we're running w/ PreFlex codec we must swap in the
// test-only PreFlexRW codec (since core PreFlex can
// only read segments):
swapCodec(new PreFlexRWCodec(), cp);
}
swapCodec(new MockSepCodec(), cp);
swapCodec(new PulsingCodec(codecHasParam && "Pulsing".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 20)), cp);
swapCodec(new MockFixedIntBlockCodec(codecHasParam && "MockFixedIntBlock".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 2000)), cp);
// baseBlockSize cannot be over 127:
swapCodec(new MockVariableIntBlockCodec(codecHasParam && "MockVariableIntBlock".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 127)), cp);
swapCodec(new MockRandomCodec(random), cp);
return cp.lookup(codec);
}
// returns current PreFlex codec
static void removeTestCodecs(Codec codec, CodecProvider cp) {
if (codec.name.equals("PreFlex")) {
final Codec preFlex = cp.lookup("PreFlex");
if (preFlex != null) {
cp.unregister(preFlex);
}
cp.register(new PreFlexCodec());
}
cp.unregister(cp.lookup("MockSep"));
cp.unregister(cp.lookup("MockFixedIntBlock"));
cp.unregister(cp.lookup("MockVariableIntBlock"));
cp.unregister(cp.lookup("MockRandom"));
swapCodec(new PulsingCodec(1), cp);
cp.setDefaultFieldCodec(savedDefaultCodec);
}
// randomly picks from core and test codecs
static String pickRandomCodec(Random rnd) {
int idx = rnd.nextInt(CodecProvider.CORE_CODECS.length +
TEST_CODECS.length);
if (idx < CodecProvider.CORE_CODECS.length) {
return CodecProvider.CORE_CODECS[idx];
} else {
return TEST_CODECS[idx - CodecProvider.CORE_CODECS.length];
}
}
private static class TwoLongs {
public final long l1, l2;
public TwoLongs(long l1, long l2) {
this.l1 = l1;
this.l2 = l2;
}
@Override
public String toString() {
return l1 + ":" + l2;
}
public static TwoLongs fromString(String s) {
final int i = s.indexOf(':');
assert i != -1;
return new TwoLongs(Long.parseLong(s.substring(0, i)),
Long.parseLong(s.substring(1+i)));
}
}
/** @deprecated (4.0) until we fix no-fork problems in solr tests */
@Deprecated
private static List<String> testClassesRun = new ArrayList<String>();
@BeforeClass
public static void beforeClassLuceneTestCaseJ4() {
state = State.INITIAL;
staticSeed = "random".equals(TEST_SEED) ? seedRand.nextLong() : TwoLongs.fromString(TEST_SEED).l1;
random.setSeed(staticSeed);
tempDirs.clear();
stores = Collections.synchronizedMap(new IdentityHashMap<MockDirectoryWrapper,StackTraceElement[]>());
// enable this by default, for IDE consistency with ant tests (as its the default from ant)
// TODO: really should be in solr base classes, but some extend LTC directly.
// we do this in beforeClass, because some tests currently disable it
if (System.getProperty("solr.directoryFactory") == null) {
System.setProperty("solr.directoryFactory", "org.apache.solr.core.MockDirectoryFactory");
}
savedCodecProvider = CodecProvider.getDefault();
if ("random".equals(TEST_CODECPROVIDER)) {
if ("randomPerField".equals(TEST_CODEC)) {
if (random.nextInt(4) == 0) { // preflex-only setup
codec = installTestCodecs("PreFlex", CodecProvider.getDefault());
} else { // per-field setup
CodecProvider.setDefault(new RandomCodecProvider(random));
codec = installTestCodecs(TEST_CODEC, CodecProvider.getDefault());
}
} else { // ordinary setup
codec = installTestCodecs(TEST_CODEC, CodecProvider.getDefault());
}
} else {
// someone specified their own codecprovider by class
try {
Class<? extends CodecProvider> cpClazz = Class.forName(TEST_CODECPROVIDER).asSubclass(CodecProvider.class);
CodecProvider cp = cpClazz.newInstance();
String codecName;
if (TEST_CODEC.startsWith("random")) { // TODO: somehow do random per-field?!
Set<String> codecSet = cp.listAll();
String availableCodecs[] = codecSet.toArray(new String[codecSet.size()]);
codecName = availableCodecs[random.nextInt(availableCodecs.length)];
} else {
codecName = TEST_CODEC;
}
codec = cp.lookup(codecName);
cp.setDefaultFieldCodec(codecName);
CodecProvider.setDefault(cp);
} catch (Exception e) {
System.err.println("Could not instantiate CodecProvider: " + TEST_CODECPROVIDER);
throw new RuntimeException(e);
}
}
savedLocale = Locale.getDefault();
locale = TEST_LOCALE.equals("random") ? randomLocale(random) : localeForName(TEST_LOCALE);
Locale.setDefault(locale);
savedTimeZone = TimeZone.getDefault();
timeZone = TEST_TIMEZONE.equals("random") ? randomTimeZone(random) : TimeZone.getTimeZone(TEST_TIMEZONE);
TimeZone.setDefault(timeZone);
testsFailed = false;
}
@AfterClass
public static void afterClassLuceneTestCaseJ4() {
if (!testsFailed) {
assertTrue("ensure your setUp() calls super.setUp() and your tearDown() calls super.tearDown()!!!",
state == State.INITIAL || state == State.TEARDOWN);
}
state = State.INITIAL;
if (! "false".equals(TEST_CLEAN_THREADS)) {
int rogueThreads = threadCleanup("test class");
if (rogueThreads > 0) {
// TODO: fail here once the leaks are fixed.
System.err.println("RESOURCE LEAK: test class left " + rogueThreads + " thread(s) running");
}
}
String codecDescription;
CodecProvider cp = CodecProvider.getDefault();
if ("randomPerField".equals(TEST_CODEC) && cp instanceof RandomCodecProvider) {
codecDescription = cp.toString();
} else {
codecDescription = codec.toString();
}
if ("random".equals(TEST_CODECPROVIDER) && CodecProvider.getDefault() == savedCodecProvider)
removeTestCodecs(codec, CodecProvider.getDefault());
CodecProvider.setDefault(savedCodecProvider);
Locale.setDefault(savedLocale);
TimeZone.setDefault(savedTimeZone);
System.clearProperty("solr.solr.home");
System.clearProperty("solr.data.dir");
// now look for unclosed resources
if (!testsFailed)
for (MockDirectoryWrapper d : stores.keySet()) {
if (d.isOpen()) {
StackTraceElement elements[] = stores.get(d);
// Look for the first class that is not LuceneTestCase that requested
// a Directory. The first two items are of Thread's, so skipping over
// them.
StackTraceElement element = null;
for (int i = 2; i < elements.length; i++) {
StackTraceElement ste = elements[i];
if (ste.getClassName().indexOf("LuceneTestCase") == -1) {
element = ste;
break;
}
}
fail("directory of test was not closed, opened from: " + element);
}
}
stores = null;
// if verbose or tests failed, report some information back
if (VERBOSE || testsFailed)
System.err.println("NOTE: test params are: codec=" + codecDescription +
", locale=" + locale +
", timezone=" + (timeZone == null ? "(null)" : timeZone.getID()));
if (testsFailed) {
System.err.println("NOTE: all tests run in this JVM:");
System.err.println(Arrays.toString(testClassesRun.toArray()));
System.err.println("NOTE: " + System.getProperty("os.name") + " "
+ System.getProperty("os.version") + " "
+ System.getProperty("os.arch") + "/"
+ System.getProperty("java.vendor") + " "
+ System.getProperty("java.version") + " "
+ (Constants.JRE_IS_64BIT ? "(64-bit)" : "(32-bit)") + "/"
+ "cpus=" + Runtime.getRuntime().availableProcessors() + ","
+ "threads=" + Thread.activeCount() + ","
+ "free=" + Runtime.getRuntime().freeMemory() + ","
+ "total=" + Runtime.getRuntime().totalMemory());
}
// clear out any temp directories if we can
if (!testsFailed) {
for (Entry<File, StackTraceElement[]> entry : tempDirs.entrySet()) {
try {
_TestUtil.rmDir(entry.getKey());
} catch (IOException e) {
e.printStackTrace();
System.err.println("path " + entry.getKey() + " allocated from");
// first two STE's are Java's
StackTraceElement[] elements = entry.getValue();
for (int i = 2; i < elements.length; i++) {
StackTraceElement ste = elements[i];
// print only our code's stack information
if (ste.getClassName().indexOf("org.apache.lucene") == -1) break;
System.err.println("\t" + ste);
}
fail("could not remove temp dir: " + entry.getKey());
}
}
}
}
private static boolean testsFailed; /* true if any tests failed */
// This is how we get control when errors occur.
// Think of this as start/end/success/failed
// events.
@Rule
public final TestWatchman intercept = new TestWatchman() {
@Override
public void failed(Throwable e, FrameworkMethod method) {
// org.junit.internal.AssumptionViolatedException in older releases
// org.junit.Assume.AssumptionViolatedException in recent ones
if (e.getClass().getName().endsWith("AssumptionViolatedException")) {
if (e.getCause() instanceof TestIgnoredException)
e = e.getCause();
System.err.print("NOTE: Assume failed in '" + method.getName() + "' (ignored):");
if (VERBOSE) {
System.err.println();
e.printStackTrace(System.err);
} else {
System.err.print(" ");
System.err.println(e.getMessage());
}
} else {
testsFailed = true;
reportAdditionalFailureInfo();
}
super.failed(e, method);
}
@Override
public void starting(FrameworkMethod method) {
// set current method name for logging
LuceneTestCase.this.name = method.getName();
if (!testsFailed) {
assertTrue("ensure your setUp() calls super.setUp()!!!", state == State.SETUP);
}
state = State.RANTEST;
super.starting(method);
}
};
@Before
public void setUp() throws Exception {
seed = "random".equals(TEST_SEED) ? seedRand.nextLong() : TwoLongs.fromString(TEST_SEED).l2;
random.setSeed(seed);
if (!testsFailed) {
assertTrue("ensure your tearDown() calls super.tearDown()!!!", (state == State.INITIAL || state == State.TEARDOWN));
}
state = State.SETUP;
savedUncaughtExceptionHandler = Thread.getDefaultUncaughtExceptionHandler();
Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
testsFailed = true;
uncaughtExceptions.add(new UncaughtExceptionEntry(t, e));
if (savedUncaughtExceptionHandler != null)
savedUncaughtExceptionHandler.uncaughtException(t, e);
}
});
savedBoolMaxClauseCount = BooleanQuery.getMaxClauseCount();
}
/**
* Forcible purges all cache entries from the FieldCache.
* <p>
* This method will be called by tearDown to clean up FieldCache.DEFAULT.
* If a (poorly written) test has some expectation that the FieldCache
* will persist across test methods (ie: a static IndexReader) this
* method can be overridden to do nothing.
* </p>
*
* @see FieldCache#purgeAllCaches()
*/
protected void purgeFieldCache(final FieldCache fc) {
fc.purgeAllCaches();
}
protected String getTestLabel() {
return getClass().getName() + "." + getName();
}
@After
public void tearDown() throws Exception {
if (!testsFailed) {
// Note: we allow a test to go straight from SETUP -> TEARDOWN (without ever entering the RANTEST state)
// because if you assume() inside setUp(), it skips the test and the TestWatchman has no way to know...
assertTrue("ensure your setUp() calls super.setUp()!!!", state == State.RANTEST || state == State.SETUP);
}
state = State.TEARDOWN;
BooleanQuery.setMaxClauseCount(savedBoolMaxClauseCount);
if ("perMethod".equals(TEST_CLEAN_THREADS)) {
int rogueThreads = threadCleanup("test method: '" + getName() + "'");
if (rogueThreads > 0) {
System.err.println("RESOURCE LEAK: test method: '" + getName()
+ "' left " + rogueThreads + " thread(s) running");
// TODO: fail, but print seed for now.
if (!testsFailed && uncaughtExceptions.isEmpty()) {
reportAdditionalFailureInfo();
}
}
}
Thread.setDefaultUncaughtExceptionHandler(savedUncaughtExceptionHandler);
try {
if (!uncaughtExceptions.isEmpty()) {
testsFailed = true;
reportAdditionalFailureInfo();
System.err.println("The following exceptions were thrown by threads:");
for (UncaughtExceptionEntry entry : uncaughtExceptions) {
System.err.println("*** Thread: " + entry.thread.getName() + " ***");
entry.exception.printStackTrace(System.err);
}
fail("Some threads threw uncaught exceptions!");
}
// calling assertSaneFieldCaches here isn't as useful as having test
// classes call it directly from the scope where the index readers
// are used, because they could be gc'ed just before this tearDown
// method is called.
//
// But it's better then nothing.
//
// If you are testing functionality that you know for a fact
// "violates" FieldCache sanity, then you should either explicitly
// call purgeFieldCache at the end of your test method, or refactor
// your Test class so that the inconsistant FieldCache usages are
// isolated in distinct test methods
assertSaneFieldCaches(getTestLabel());
} finally {
purgeFieldCache(FieldCache.DEFAULT);
}
}
private final static int THREAD_STOP_GRACE_MSEC = 50;
// jvm-wide list of 'rogue threads' we found, so they only get reported once.
private final static IdentityHashMap<Thread,Boolean> rogueThreads = new IdentityHashMap<Thread,Boolean>();
static {
// just a hack for things like eclipse test-runner threads
for (Thread t : Thread.getAllStackTraces().keySet()) {
rogueThreads.put(t, true);
}
}
/**
* Looks for leftover running threads, trying to kill them off,
* so they don't fail future tests.
* returns the number of rogue threads that it found.
*/
private static int threadCleanup(String context) {
// educated guess
Thread[] stillRunning = new Thread[Thread.activeCount()+1];
int threadCount = 0;
int rogueCount = 0;
if ((threadCount = Thread.enumerate(stillRunning)) > 1) {
while (threadCount == stillRunning.length) {
// truncated response
stillRunning = new Thread[stillRunning.length*2];
threadCount = Thread.enumerate(stillRunning);
}
for (int i = 0; i < threadCount; i++) {
Thread t = stillRunning[i];
if (t.isAlive() &&
!rogueThreads.containsKey(t) &&
t != Thread.currentThread() &&
/* its ok to keep your searcher across test cases */
(t.getName().startsWith("LuceneTestCase") && context.startsWith("test method")) == false) {
System.err.println("WARNING: " + context + " left thread running: " + t);
rogueThreads.put(t, true);
rogueCount++;
if (t.getName().startsWith("LuceneTestCase")) {
System.err.println("PLEASE CLOSE YOUR INDEXSEARCHERS IN YOUR TEST!!!!");
continue;
} else {
// wait on the thread to die of natural causes
try {
t.join(THREAD_STOP_GRACE_MSEC);
} catch (InterruptedException e) { e.printStackTrace(); }
}
// try to stop the thread:
t.setUncaughtExceptionHandler(null);
Thread.setDefaultUncaughtExceptionHandler(null);
if (!t.getName().startsWith("SyncThread")) // avoid zookeeper jre crash
t.interrupt();
}
}
}
return rogueCount;
}
/**
* Asserts that FieldCacheSanityChecker does not detect any
* problems with FieldCache.DEFAULT.
* <p>
* If any problems are found, they are logged to System.err
* (allong with the msg) when the Assertion is thrown.
* </p>
* <p>
* This method is called by tearDown after every test method,
* however IndexReaders scoped inside test methods may be garbage
* collected prior to this method being called, causing errors to
* be overlooked. Tests are encouraged to keep their IndexReaders
* scoped at the class level, or to explicitly call this method
* directly in the same scope as the IndexReader.
* </p>
*
* @see org.apache.lucene.util.FieldCacheSanityChecker
*/
protected void assertSaneFieldCaches(final String msg) {
final CacheEntry[] entries = FieldCache.DEFAULT.getCacheEntries();
Insanity[] insanity = null;
try {
try {
insanity = FieldCacheSanityChecker.checkSanity(entries);
} catch (RuntimeException e) {
dumpArray(msg + ": FieldCache", entries, System.err);
throw e;
}
assertEquals(msg + ": Insane FieldCache usage(s) found",
0, insanity.length);
insanity = null;
} finally {
// report this in the event of any exception/failure
// if no failure, then insanity will be null anyway
if (null != insanity) {
dumpArray(msg + ": Insane FieldCache usage(s)", insanity, System.err);
}
}
}
/**
* Returns a number of at least <code>i</code>
* <p>
* The actual number returned will be influenced by whether {@link #TEST_NIGHTLY}
* is active and {@link #RANDOM_MULTIPLIER}, but also with some random fudge.
*/
public static int atLeast(Random random, int i) {
int min = (TEST_NIGHTLY ? 5*i : i) * RANDOM_MULTIPLIER;
int max = min+(min/2);
return _TestUtil.nextInt(random, min, max);
}
public static int atLeast(int i) {
return atLeast(random, i);
}
/**
* Returns true if something should happen rarely,
* <p>
* The actual number returned will be influenced by whether {@link #TEST_NIGHTLY}
* is active and {@link #RANDOM_MULTIPLIER}.
*/
public static boolean rarely(Random random) {
int p = TEST_NIGHTLY ? 25 : 5;
p += (p * Math.log(RANDOM_MULTIPLIER));
int min = 100 - Math.min(p, 90); // never more than 90
return random.nextInt(100) >= min;
}
public static boolean rarely() {
return rarely(random);
}
public static boolean usually(Random random) {
return !rarely(random);
}
public static boolean usually() {
return usually(random);
}
// @deprecated (4.0) These deprecated methods should be removed soon, when all tests using no Epsilon are fixed:
@Deprecated
static public void assertEquals(double expected, double actual) {
assertEquals(null, expected, actual);
}
@Deprecated
static public void assertEquals(String message, double expected, double actual) {
assertEquals(message, Double.valueOf(expected), Double.valueOf(actual));
}
@Deprecated
static public void assertEquals(float expected, float actual) {
assertEquals(null, expected, actual);
}
@Deprecated
static public void assertEquals(String message, float expected, float actual) {
assertEquals(message, Float.valueOf(expected), Float.valueOf(actual));
}
// Replacement for Assume jUnit class, so we can add a message with explanation:
private static final class TestIgnoredException extends RuntimeException {
TestIgnoredException(String msg) {
super(msg);
}
TestIgnoredException(String msg, Throwable t) {
super(msg, t);
}
@Override
public String getMessage() {
StringBuilder sb = new StringBuilder(super.getMessage());
if (getCause() != null)
sb.append(" - ").append(getCause());
return sb.toString();
}
// only this one is called by our code, exception is not used outside this class:
@Override
public void printStackTrace(PrintStream s) {
if (getCause() != null) {
s.println(super.toString() + " - Caused by:");
getCause().printStackTrace(s);
} else {
super.printStackTrace(s);
}
}
}
public static void assumeTrue(String msg, boolean b) {
Assume.assumeNoException(b ? null : new TestIgnoredException(msg));
}
public static void assumeFalse(String msg, boolean b) {
assumeTrue(msg, !b);
}
public static void assumeNoException(String msg, Exception e) {
Assume.assumeNoException(e == null ? null : new TestIgnoredException(msg, e));
}
public static <T> Set<T> asSet(T... args) {
return new HashSet<T>(Arrays.asList(args));
}
/**
* Convenience method for logging an iterator.
*
* @param label String logged before/after the items in the iterator
* @param iter Each next() is toString()ed and logged on it's own line. If iter is null this is logged differnetly then an empty iterator.
* @param stream Stream to log messages to.
*/
public static void dumpIterator(String label, Iterator<?> iter,
PrintStream stream) {
stream.println("*** BEGIN " + label + " ***");
if (null == iter) {
stream.println(" ... NULL ...");
} else {
while (iter.hasNext()) {
stream.println(iter.next().toString());
}
}
stream.println("*** END " + label + " ***");
}
/**
* Convenience method for logging an array. Wraps the array in an iterator and delegates
*
* @see #dumpIterator(String,Iterator,PrintStream)
*/
public static void dumpArray(String label, Object[] objs,
PrintStream stream) {
Iterator<?> iter = (null == objs) ? null : Arrays.asList(objs).iterator();
dumpIterator(label, iter, stream);
}
/** create a new index writer config with random defaults */
public static IndexWriterConfig newIndexWriterConfig(Version v, Analyzer a) {
return newIndexWriterConfig(random, v, a);
}
/** create a new index writer config with random defaults using the specified random */
public static IndexWriterConfig newIndexWriterConfig(Random r, Version v, Analyzer a) {
IndexWriterConfig c = new IndexWriterConfig(v, a);
if (r.nextBoolean()) {
c.setMergeScheduler(new SerialMergeScheduler());
}
if (r.nextBoolean()) {
if (rarely(r)) {
// crazy value
c.setMaxBufferedDocs(_TestUtil.nextInt(r, 2, 7));
} else {
// reasonable value
c.setMaxBufferedDocs(_TestUtil.nextInt(r, 8, 1000));
}
}
if (r.nextBoolean()) {
if (rarely(r)) {
// crazy value
c.setTermIndexInterval(random.nextBoolean() ? _TestUtil.nextInt(r, 1, 31) : _TestUtil.nextInt(r, 129, 1000));
} else {
// reasonable value
c.setTermIndexInterval(_TestUtil.nextInt(r, 32, 128));
}
}
if (r.nextBoolean()) {
c.setIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(_TestUtil.nextInt(r, 1, 20)));
}
if (r.nextBoolean()) {
c.setMergePolicy(newTieredMergePolicy());
} else if (r.nextBoolean()) {
c.setMergePolicy(newLogMergePolicy());
} else {
c.setMergePolicy(new MockRandomMergePolicy(r));
}
c.setReaderPooling(r.nextBoolean());
c.setReaderTermsIndexDivisor(_TestUtil.nextInt(r, 1, 4));
return c;
}
public static LogMergePolicy newLogMergePolicy() {
return newLogMergePolicy(random);
}
public static TieredMergePolicy newTieredMergePolicy() {
return newTieredMergePolicy(random);
}
public static LogMergePolicy newLogMergePolicy(Random r) {
LogMergePolicy logmp = r.nextBoolean() ? new LogDocMergePolicy() : new LogByteSizeMergePolicy();
logmp.setUseCompoundFile(r.nextBoolean());
logmp.setCalibrateSizeByDeletes(r.nextBoolean());
if (rarely(r)) {
logmp.setMergeFactor(_TestUtil.nextInt(r, 2, 4));
} else {
logmp.setMergeFactor(_TestUtil.nextInt(r, 5, 50));
}
return logmp;
}
public static TieredMergePolicy newTieredMergePolicy(Random r) {
TieredMergePolicy tmp = new TieredMergePolicy();
if (rarely(r)) {
tmp.setMaxMergeAtOnce(_TestUtil.nextInt(r, 2, 4));
tmp.setMaxMergeAtOnceExplicit(_TestUtil.nextInt(r, 2, 4));
} else {
tmp.setMaxMergeAtOnce(_TestUtil.nextInt(r, 5, 50));
tmp.setMaxMergeAtOnceExplicit(_TestUtil.nextInt(r, 5, 50));
}
tmp.setMaxMergedSegmentMB(0.2 + r.nextDouble() * 2.0);
tmp.setFloorSegmentMB(0.2 + r.nextDouble() * 2.0);
tmp.setExpungeDeletesPctAllowed(0.0 + r.nextDouble() * 30.0);
tmp.setSegmentsPerTier(_TestUtil.nextInt(r, 2, 20));
tmp.setUseCompoundFile(r.nextBoolean());
tmp.setNoCFSRatio(0.1 + r.nextDouble()*0.8);
return tmp;
}
public static LogMergePolicy newLogMergePolicy(boolean useCFS) {
LogMergePolicy logmp = newLogMergePolicy();
logmp.setUseCompoundFile(useCFS);
return logmp;
}
public static LogMergePolicy newLogMergePolicy(boolean useCFS, int mergeFactor) {
LogMergePolicy logmp = newLogMergePolicy();
logmp.setUseCompoundFile(useCFS);
logmp.setMergeFactor(mergeFactor);
return logmp;
}
public static LogMergePolicy newLogMergePolicy(int mergeFactor) {
LogMergePolicy logmp = newLogMergePolicy();
logmp.setMergeFactor(mergeFactor);
return logmp;
}
/**
* Returns a new Directory instance. Use this when the test does not
* care about the specific Directory implementation (most tests).
* <p>
* The Directory is wrapped with {@link MockDirectoryWrapper}.
* By default this means it will be picky, such as ensuring that you
* properly close it and all open files in your test. It will emulate
* some features of Windows, such as not allowing open files to be
* overwritten.
*/
public static MockDirectoryWrapper newDirectory() throws IOException {
return newDirectory(random);
}
/**
* Returns a new Directory instance, using the specified random.
* See {@link #newDirectory()} for more information.
*/
public static MockDirectoryWrapper newDirectory(Random r) throws IOException {
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, impl);
stores.put(dir, Thread.currentThread().getStackTrace());
dir.setThrottling(TEST_THROTTLING);
return dir;
}
/**
* Returns a new Directory instance, with contents copied from the
* provided directory. See {@link #newDirectory()} for more
* information.
*/
public static MockDirectoryWrapper newDirectory(Directory d) throws IOException {
return newDirectory(random, d);
}
/** Returns a new FSDirectory instance over the given file, which must be a folder. */
public static MockDirectoryWrapper newFSDirectory(File f) throws IOException {
return newFSDirectory(f, null);
}
/** Returns a new FSDirectory instance over the given file, which must be a folder. */
public static MockDirectoryWrapper newFSDirectory(File f, LockFactory lf) throws IOException {
String fsdirClass = TEST_DIRECTORY;
if (fsdirClass.equals("random")) {
fsdirClass = FS_DIRECTORIES[random.nextInt(FS_DIRECTORIES.length)];
}
if (fsdirClass.indexOf(".") == -1) {// if not fully qualified, assume .store
fsdirClass = "org.apache.lucene.store." + fsdirClass;
}
Class<? extends FSDirectory> clazz;
try {
try {
clazz = Class.forName(fsdirClass).asSubclass(FSDirectory.class);
} catch (ClassCastException e) {
// TEST_DIRECTORY is not a sub-class of FSDirectory, so draw one at random
fsdirClass = FS_DIRECTORIES[random.nextInt(FS_DIRECTORIES.length)];
if (fsdirClass.indexOf(".") == -1) {// if not fully qualified, assume .store
fsdirClass = "org.apache.lucene.store." + fsdirClass;
}
clazz = Class.forName(fsdirClass).asSubclass(FSDirectory.class);
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, newFSDirectoryImpl(clazz, f));
if (lf != null) {
dir.setLockFactory(lf);
}
stores.put(dir, Thread.currentThread().getStackTrace());
dir.setThrottling(TEST_THROTTLING);
return dir;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Returns a new Directory instance, using the specified random
* with contents copied from the provided directory. See
* {@link #newDirectory()} for more information.
*/
public static MockDirectoryWrapper newDirectory(Random r, Directory d) throws IOException {
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
for (String file : d.listAll()) {
d.copy(impl, file, file);
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, impl);
stores.put(dir, Thread.currentThread().getStackTrace());
dir.setThrottling(TEST_THROTTLING);
return dir;
}
/** Returns a new field instance.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(String name, String value, Index index) {
return newField(random, name, value, index);
}
/** Returns a new field instance.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(String name, String value, Store store, Index index) {
return newField(random, name, value, store, index);
}
/**
* Returns a new Field instance. Use this when the test does not
* care about some specific field settings (most tests)
* <ul>
* <li>If the store value is set to Store.NO, sometimes the field will be randomly stored.
* <li>More term vector data than you ask for might be indexed, for example if you choose YES
* it might index term vectors with offsets too.
* </ul>
*/
public static Field newField(String name, String value, Store store, Index index, TermVector tv) {
return newField(random, name, value, store, index, tv);
}
/** Returns a new field instance, using the specified random.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(Random random, String name, String value, Index index) {
return newField(random, name, value, Store.NO, index);
}
/** Returns a new field instance, using the specified random.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(Random random, String name, String value, Store store, Index index) {
return newField(random, name, value, store, index, TermVector.NO);
}
/** Returns a new field instance, using the specified random.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(Random random, String name, String value, Store store, Index index, TermVector tv) {
if (usually(random)) {
// most of the time, don't modify the params
return new Field(name, value, store, index, tv);
}
if (!index.isIndexed())
return new Field(name, value, store, index, tv);
if (!store.isStored() && random.nextBoolean())
store = Store.YES; // randomly store it
tv = randomTVSetting(random, tv);
return new Field(name, value, store, index, tv);
}
static final TermVector tvSettings[] = {
TermVector.NO, TermVector.YES, TermVector.WITH_OFFSETS,
TermVector.WITH_POSITIONS, TermVector.WITH_POSITIONS_OFFSETS
};
private static TermVector randomTVSetting(Random random, TermVector minimum) {
switch(minimum) {
case NO: return tvSettings[_TestUtil.nextInt(random, 0, tvSettings.length-1)];
case YES: return tvSettings[_TestUtil.nextInt(random, 1, tvSettings.length-1)];
case WITH_OFFSETS: return random.nextBoolean() ? TermVector.WITH_OFFSETS
: TermVector.WITH_POSITIONS_OFFSETS;
case WITH_POSITIONS: return random.nextBoolean() ? TermVector.WITH_POSITIONS
: TermVector.WITH_POSITIONS_OFFSETS;
default: return TermVector.WITH_POSITIONS_OFFSETS;
}
}
/** return a random Locale from the available locales on the system */
public static Locale randomLocale(Random random) {
Locale locales[] = Locale.getAvailableLocales();
return locales[random.nextInt(locales.length)];
}
/** return a random TimeZone from the available timezones on the system */
public static TimeZone randomTimeZone(Random random) {
String tzIds[] = TimeZone.getAvailableIDs();
return TimeZone.getTimeZone(tzIds[random.nextInt(tzIds.length)]);
}
/** return a Locale object equivalent to its programmatic name */
public static Locale localeForName(String localeName) {
String elements[] = localeName.split("\\_");
switch(elements.length) {
case 3: return new Locale(elements[0], elements[1], elements[2]);
case 2: return new Locale(elements[0], elements[1]);
case 1: return new Locale(elements[0]);
default: throw new IllegalArgumentException("Invalid Locale: " + localeName);
}
}
private static final String FS_DIRECTORIES[] = {
"SimpleFSDirectory",
"NIOFSDirectory",
"MMapDirectory"
};
private static final String CORE_DIRECTORIES[] = {
"RAMDirectory",
FS_DIRECTORIES[0], FS_DIRECTORIES[1], FS_DIRECTORIES[2]
};
public static String randomDirectory(Random random) {
if (rarely(random)) {
return CORE_DIRECTORIES[random.nextInt(CORE_DIRECTORIES.length)];
} else {
return "RAMDirectory";
}
}
private static Directory newFSDirectoryImpl(
Class<? extends FSDirectory> clazz, File file)
throws IOException {
FSDirectory d = null;
try {
// Assuming every FSDirectory has a ctor(File), but not all may take a
// LockFactory too, so setting it afterwards.
Constructor<? extends FSDirectory> ctor = clazz.getConstructor(File.class);
d = ctor.newInstance(file);
} catch (Exception e) {
d = FSDirectory.open(file);
}
return d;
}
/** Registers a temp file that will be deleted when tests are done. */
public static void registerTempFile(File tmpFile) {
tempDirs.put(tmpFile.getAbsoluteFile(), Thread.currentThread().getStackTrace());
}
static Directory newDirectoryImpl(Random random, String clazzName) {
if (clazzName.equals("random"))
clazzName = randomDirectory(random);
if (clazzName.indexOf(".") == -1) // if not fully qualified, assume .store
clazzName = "org.apache.lucene.store." + clazzName;
try {
final Class<? extends Directory> clazz = Class.forName(clazzName).asSubclass(Directory.class);
// If it is a FSDirectory type, try its ctor(File)
if (FSDirectory.class.isAssignableFrom(clazz)) {
final File tmpFile = _TestUtil.createTempFile("test", "tmp", TEMP_DIR);
tmpFile.delete();
tmpFile.mkdir();
registerTempFile(tmpFile);
return newFSDirectoryImpl(clazz.asSubclass(FSDirectory.class), tmpFile);
}
// try empty ctor
return clazz.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** create a new searcher over the reader.
* This searcher might randomly use threads. */
public static IndexSearcher newSearcher(IndexReader r) throws IOException {
return newSearcher(r, true);
}
/** create a new searcher over the reader.
* This searcher might randomly use threads.
* if <code>maybeWrap</code> is true, this searcher might wrap the reader
* with one that returns null for getSequentialSubReaders.
*/
public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) throws IOException {
if (random.nextBoolean()) {
if (maybeWrap && rarely()) {
return new IndexSearcher(new SlowMultiReaderWrapper(r));
} else {
return new IndexSearcher(r);
}
} else {
int threads = 0;
final ExecutorService ex = (random.nextBoolean()) ? null
: Executors.newFixedThreadPool(threads = _TestUtil.nextInt(random, 1, 8),
new NamedThreadFactory("LuceneTestCase"));
if (ex != null && VERBOSE) {
System.out.println("NOTE: newSearcher using ExecutorService with " + threads + " threads");
}
return new IndexSearcher(r.getTopReaderContext(), ex) {
@Override
public void close() throws IOException {
super.close();
if (ex != null) {
ex.shutdown();
try {
ex.awaitTermination(1000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
};
}
}
public String getName() {
return this.name;
}
/** Gets a resource from the classpath as {@link File}. This method should only be used,
* if a real file is needed. To get a stream, code should prefer
* {@link Class#getResourceAsStream} using {@code this.getClass()}.
*/
protected File getDataFile(String name) throws IOException {
try {
return new File(this.getClass().getResource(name).toURI());
} catch (Exception e) {
throw new IOException("Cannot find resource: " + name);
}
}
// We get here from InterceptTestCaseEvents on the 'failed' event....
public void reportAdditionalFailureInfo() {
System.err.println("NOTE: reproduce with: ant test -Dtestcase=" + getClass().getSimpleName()
+ " -Dtestmethod=" + getName() + " -Dtests.seed=" + new TwoLongs(staticSeed, seed)
+ reproduceWithExtraParams());
}
// extra params that were overridden needed to reproduce the command
private String reproduceWithExtraParams() {
StringBuilder sb = new StringBuilder();
if (!TEST_CODEC.equals("randomPerField")) sb.append(" -Dtests.codec=").append(TEST_CODEC);
if (!TEST_LOCALE.equals("random")) sb.append(" -Dtests.locale=").append(TEST_LOCALE);
if (!TEST_TIMEZONE.equals("random")) sb.append(" -Dtests.timezone=").append(TEST_TIMEZONE);
if (!TEST_DIRECTORY.equals("random")) sb.append(" -Dtests.directory=").append(TEST_DIRECTORY);
if (RANDOM_MULTIPLIER > 1) sb.append(" -Dtests.multiplier=").append(RANDOM_MULTIPLIER);
if (TEST_NIGHTLY) sb.append(" -Dtests.nightly=true");
return sb.toString();
}
// recorded seed: for beforeClass
private static long staticSeed;
// seed for individual test methods, changed in @before
private long seed;
private static final Random seedRand = new Random();
protected static final Random random = new Random(0);
private String name = "<unknown>";
/**
* Annotation for tests that should only be run during nightly builds.
*/
@Documented
@Inherited
@Retention(RetentionPolicy.RUNTIME)
public @interface Nightly {}
/** optionally filters the tests to be run by TEST_METHOD */
public static class LuceneTestCaseRunner extends BlockJUnit4ClassRunner {
private List<FrameworkMethod> testMethods;
@Override
protected List<FrameworkMethod> computeTestMethods() {
if (testMethods != null)
return testMethods;
testClassesRun.add(getTestClass().getJavaClass().getSimpleName());
testMethods = new ArrayList<FrameworkMethod>();
for (Method m : getTestClass().getJavaClass().getMethods()) {
// check if the current test's class has methods annotated with @Ignore
final Ignore ignored = m.getAnnotation(Ignore.class);
if (ignored != null && !m.getName().equals("alwaysIgnoredTestMethod")) {
System.err.println("NOTE: Ignoring test method '" + m.getName() + "': " + ignored.value());
}
// add methods starting with "test"
final int mod = m.getModifiers();
if (m.getAnnotation(Test.class) != null ||
(m.getName().startsWith("test") &&
!Modifier.isAbstract(mod) &&
m.getParameterTypes().length == 0 &&
m.getReturnType() == Void.TYPE))
{
if (Modifier.isStatic(mod))
throw new RuntimeException("Test methods must not be static.");
testMethods.add(new FrameworkMethod(m));
}
}
if (testMethods.isEmpty()) {
throw new RuntimeException("No runnable methods!");
}
if (TEST_NIGHTLY == false) {
if (getTestClass().getJavaClass().isAnnotationPresent(Nightly.class)) {
/* the test class is annotated with nightly, remove all methods */
String className = getTestClass().getJavaClass().getSimpleName();
System.err.println("NOTE: Ignoring nightly-only test class '" + className + "'");
testMethods.clear();
} else {
/* remove all nightly-only methods */
for (int i = 0; i < testMethods.size(); i++) {
final FrameworkMethod m = testMethods.get(i);
if (m.getAnnotation(Nightly.class) != null) {
System.err.println("NOTE: Ignoring nightly-only test method '" + m.getName() + "'");
testMethods.remove(i--);
}
}
}
/* dodge a possible "no-runnable methods" exception by adding a fake ignored test */
if (testMethods.isEmpty()) {
try {
testMethods.add(new FrameworkMethod(LuceneTestCase.class.getMethod("alwaysIgnoredTestMethod")));
} catch (Exception e) { throw new RuntimeException(e); }
}
}
return testMethods;
}
@Override
protected void runChild(FrameworkMethod arg0, RunNotifier arg1) {
if (VERBOSE) {
System.out.println("\nNOTE: running test " + arg0.getName());
}
// only print iteration info if the user requested more than one iterations
final boolean verbose = VERBOSE && TEST_ITER > 1;
final int currentIter[] = new int[1];
arg1.addListener(new RunListener() {
@Override
public void testFailure(Failure failure) throws Exception {
if (verbose) {
System.out.println("\nNOTE: iteration " + currentIter[0] + " failed! ");
}
}
});
for (int i = 0; i < TEST_ITER; i++) {
currentIter[0] = i;
if (verbose) {
System.out.println("\nNOTE: running iter=" + (1+i) + " of " + TEST_ITER);
}
super.runChild(arg0, arg1);
if (testsFailed) {
if (i >= TEST_ITER_MIN - 1) { // XXX is this still off-by-one?
break;
}
}
}
}
public LuceneTestCaseRunner(Class<?> clazz) throws InitializationError {
super(clazz);
Filter f = new Filter() {
@Override
public String describe() { return "filters according to TEST_METHOD"; }
@Override
public boolean shouldRun(Description d) {
return TEST_METHOD == null || d.getMethodName().equals(TEST_METHOD);
}
};
try {
f.apply(this);
} catch (NoTestsRemainException e) {
throw new RuntimeException(e);
}
}
}
private static class RandomCodecProvider extends CodecProvider {
private List<Codec> knownCodecs = new ArrayList<Codec>();
private Map<String,Codec> previousMappings = new HashMap<String,Codec>();
private final int perFieldSeed;
RandomCodecProvider(Random random) {
this.perFieldSeed = random.nextInt();
register(new StandardCodec());
register(new PreFlexCodec());
register(new PulsingCodec(1));
register(new SimpleTextCodec());
Collections.shuffle(knownCodecs, random);
}
@Override
public synchronized void register(Codec codec) {
if (!codec.name.equals("PreFlex"))
knownCodecs.add(codec);
super.register(codec);
}
@Override
public synchronized void unregister(Codec codec) {
knownCodecs.remove(codec);
super.unregister(codec);
}
@Override
public synchronized String getFieldCodec(String name) {
Codec codec = previousMappings.get(name);
if (codec == null) {
codec = knownCodecs.get(Math.abs(perFieldSeed ^ name.hashCode()) % knownCodecs.size());
if (codec instanceof SimpleTextCodec && perFieldSeed % 5 != 0) {
// make simpletext rarer, choose again
codec = knownCodecs.get(Math.abs(perFieldSeed ^ name.toUpperCase(Locale.ENGLISH).hashCode()) % knownCodecs.size());
}
previousMappings.put(name, codec);
}
return codec.name;
}
@Override
public synchronized boolean hasFieldCodec(String name) {
return true; // we have a codec for every field
}
@Override
public synchronized String toString() {
return "RandomCodecProvider: " + previousMappings.toString();
}
}
@Ignore("just a hack")
public final void alwaysIgnoredTestMethod() {}
}
| lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java | package org.apache.lucene.util;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.annotation.Documented;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.*;
import org.apache.lucene.index.codecs.Codec;
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.codecs.mockintblock.MockFixedIntBlockCodec;
import org.apache.lucene.index.codecs.mockintblock.MockVariableIntBlockCodec;
import org.apache.lucene.index.codecs.mocksep.MockSepCodec;
import org.apache.lucene.index.codecs.mockrandom.MockRandomCodec;
import org.apache.lucene.index.codecs.preflex.PreFlexCodec;
import org.apache.lucene.index.codecs.preflexrw.PreFlexRWCodec;
import org.apache.lucene.index.codecs.pulsing.PulsingCodec;
import org.apache.lucene.index.codecs.simpletext.SimpleTextCodec;
import org.apache.lucene.index.codecs.standard.StandardCodec;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldCache.CacheEntry;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockFactory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
import org.apache.lucene.util.FieldCacheSanityChecker.Insanity;
import org.junit.*;
import org.junit.rules.TestWatchman;
import org.junit.runner.Description;
import org.junit.runner.RunWith;
import org.junit.runner.manipulation.Filter;
import org.junit.runner.manipulation.NoTestsRemainException;
import org.junit.runner.notification.Failure;
import org.junit.runner.notification.RunListener;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
/**
* Base class for all Lucene unit tests, Junit3 or Junit4 variant.
* <p>
* </p>
* <p>
* If you
* override either <code>setUp()</code> or
* <code>tearDown()</code> in your unit test, make sure you
* call <code>super.setUp()</code> and
* <code>super.tearDown()</code>
* </p>
*
* <code>@After</code> - replaces setup
* <code>@Before</code> - replaces teardown
* <code>@Test</code> - any public method with this annotation is a test case, regardless
* of its name
* <p>
* <p>
* See Junit4 <a href="http://junit.org/junit/javadoc/4.7/">documentation</a> for a complete list of features.
* <p>
* Import from org.junit rather than junit.framework.
* <p>
* You should be able to use this class anywhere you used LuceneTestCase
* if you annotate your derived class correctly with the annotations above
* @see #assertSaneFieldCaches(String)
*/
@RunWith(LuceneTestCase.LuceneTestCaseRunner.class)
public abstract class LuceneTestCase extends Assert {
/**
* true iff tests are run in verbose mode. Note: if it is false, tests are not
* expected to print any messages.
*/
public static final boolean VERBOSE = Boolean.getBoolean("tests.verbose");
/** Use this constant when creating Analyzers and any other version-dependent stuff.
* <p><b>NOTE:</b> Change this when development starts for new Lucene version:
*/
public static final Version TEST_VERSION_CURRENT = Version.LUCENE_40;
/**
* If this is set, it is the only method that should run.
*/
static final String TEST_METHOD;
/** Create indexes in this directory, optimally use a subdir, named after the test */
public static final File TEMP_DIR;
static {
String method = System.getProperty("testmethod", "").trim();
TEST_METHOD = method.length() == 0 ? null : method;
String s = System.getProperty("tempDir", System.getProperty("java.io.tmpdir"));
if (s == null)
throw new RuntimeException("To run tests, you need to define system property 'tempDir' or 'java.io.tmpdir'.");
TEMP_DIR = new File(s);
TEMP_DIR.mkdirs();
}
/** set of directories we created, in afterclass we try to clean these up */
private static final Map<File, StackTraceElement[]> tempDirs = Collections.synchronizedMap(new HashMap<File, StackTraceElement[]>());
// by default we randomly pick a different codec for
// each test case (non-J4 tests) and each test class (J4
// tests)
/** Gets the codec to run tests with. */
public static final String TEST_CODEC = System.getProperty("tests.codec", "randomPerField");
/** Gets the codecprovider to run tests with */
public static final String TEST_CODECPROVIDER = System.getProperty("tests.codecprovider", "random");
/** Gets the locale to run tests with */
public static final String TEST_LOCALE = System.getProperty("tests.locale", "random");
/** Gets the timezone to run tests with */
public static final String TEST_TIMEZONE = System.getProperty("tests.timezone", "random");
/** Gets the directory to run tests with */
public static final String TEST_DIRECTORY = System.getProperty("tests.directory", "random");
/** Get the number of times to run tests */
public static final int TEST_ITER = Integer.parseInt(System.getProperty("tests.iter", "1"));
/** Get the minimum number of times to run tests until a failure happens */
public static final int TEST_ITER_MIN = Integer.parseInt(System.getProperty("tests.iter.min", Integer.toString(TEST_ITER)));
/** Get the random seed for tests */
public static final String TEST_SEED = System.getProperty("tests.seed", "random");
/** whether or not nightly tests should run */
public static final boolean TEST_NIGHTLY = Boolean.parseBoolean(System.getProperty("tests.nightly", "false"));
/** the line file used by LineFileDocs */
public static final String TEST_LINE_DOCS_FILE = System.getProperty("tests.linedocsfile", "europarl.lines.txt.gz");
/** whether or not to clean threads between test invocations: "false", "perMethod", "perClass" */
public static final String TEST_CLEAN_THREADS = System.getProperty("tests.cleanthreads", "perClass");
/** whether or not to clean threads between test invocations: "false", "perMethod", "perClass" */
public static final Throttling TEST_THROTTLING = TEST_NIGHTLY ? Throttling.SOMETIMES : Throttling.NEVER;
private static final Pattern codecWithParam = Pattern.compile("(.*)\\(\\s*(\\d+)\\s*\\)");
/**
* A random multiplier which you should use when writing random tests:
* multiply it by the number of iterations
*/
public static final int RANDOM_MULTIPLIER = Integer.parseInt(System.getProperty("tests.multiplier", "1"));
private int savedBoolMaxClauseCount;
private volatile Thread.UncaughtExceptionHandler savedUncaughtExceptionHandler = null;
/** Used to track if setUp and tearDown are called correctly from subclasses */
private static State state = State.INITIAL;
private static enum State {
INITIAL, // no tests ran yet
SETUP, // test has called setUp()
RANTEST, // test is running
TEARDOWN // test has called tearDown()
}
/**
* Some tests expect the directory to contain a single segment, and want to do tests on that segment's reader.
* This is an utility method to help them.
*/
public static SegmentReader getOnlySegmentReader(IndexReader reader) {
if (reader instanceof SegmentReader)
return (SegmentReader) reader;
IndexReader[] subReaders = reader.getSequentialSubReaders();
if (subReaders.length != 1)
throw new IllegalArgumentException(reader + " has " + subReaders.length + " segments instead of exactly one");
return (SegmentReader) subReaders[0];
}
private static class UncaughtExceptionEntry {
public final Thread thread;
public final Throwable exception;
public UncaughtExceptionEntry(Thread thread, Throwable exception) {
this.thread = thread;
this.exception = exception;
}
}
private List<UncaughtExceptionEntry> uncaughtExceptions = Collections.synchronizedList(new ArrayList<UncaughtExceptionEntry>());
// saves default codec: we do this statically as many build indexes in @beforeClass
private static String savedDefaultCodec;
// default codec: not set when we use a per-field provider.
private static Codec codec;
// default codec provider
private static CodecProvider savedCodecProvider;
private static Locale locale;
private static Locale savedLocale;
private static TimeZone timeZone;
private static TimeZone savedTimeZone;
protected static Map<MockDirectoryWrapper,StackTraceElement[]> stores;
private static final String[] TEST_CODECS = new String[] {"MockSep", "MockFixedIntBlock", "MockVariableIntBlock", "MockRandom"};
private static void swapCodec(Codec c, CodecProvider cp) {
Codec prior = null;
try {
prior = cp.lookup(c.name);
} catch (IllegalArgumentException iae) {
}
if (prior != null) {
cp.unregister(prior);
}
cp.register(c);
}
// returns current default codec
static Codec installTestCodecs(String codec, CodecProvider cp) {
savedDefaultCodec = cp.getDefaultFieldCodec();
final boolean codecHasParam;
int codecParam = 0;
if (codec.equals("randomPerField")) {
// lie
codec = "Standard";
codecHasParam = false;
} else if (codec.equals("random")) {
codec = pickRandomCodec(random);
codecHasParam = false;
} else {
Matcher m = codecWithParam.matcher(codec);
if (m.matches()) {
// codec has a fixed param
codecHasParam = true;
codec = m.group(1);
codecParam = Integer.parseInt(m.group(2));
} else {
codecHasParam = false;
}
}
cp.setDefaultFieldCodec(codec);
if (codec.equals("PreFlex")) {
// If we're running w/ PreFlex codec we must swap in the
// test-only PreFlexRW codec (since core PreFlex can
// only read segments):
swapCodec(new PreFlexRWCodec(), cp);
}
swapCodec(new MockSepCodec(), cp);
swapCodec(new PulsingCodec(codecHasParam && "Pulsing".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 20)), cp);
swapCodec(new MockFixedIntBlockCodec(codecHasParam && "MockFixedIntBlock".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 2000)), cp);
// baseBlockSize cannot be over 127:
swapCodec(new MockVariableIntBlockCodec(codecHasParam && "MockVariableIntBlock".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 127)), cp);
swapCodec(new MockRandomCodec(random), cp);
return cp.lookup(codec);
}
// returns current PreFlex codec
static void removeTestCodecs(Codec codec, CodecProvider cp) {
if (codec.name.equals("PreFlex")) {
final Codec preFlex = cp.lookup("PreFlex");
if (preFlex != null) {
cp.unregister(preFlex);
}
cp.register(new PreFlexCodec());
}
cp.unregister(cp.lookup("MockSep"));
cp.unregister(cp.lookup("MockFixedIntBlock"));
cp.unregister(cp.lookup("MockVariableIntBlock"));
cp.unregister(cp.lookup("MockRandom"));
swapCodec(new PulsingCodec(1), cp);
cp.setDefaultFieldCodec(savedDefaultCodec);
}
// randomly picks from core and test codecs
static String pickRandomCodec(Random rnd) {
int idx = rnd.nextInt(CodecProvider.CORE_CODECS.length +
TEST_CODECS.length);
if (idx < CodecProvider.CORE_CODECS.length) {
return CodecProvider.CORE_CODECS[idx];
} else {
return TEST_CODECS[idx - CodecProvider.CORE_CODECS.length];
}
}
private static class TwoLongs {
public final long l1, l2;
public TwoLongs(long l1, long l2) {
this.l1 = l1;
this.l2 = l2;
}
@Override
public String toString() {
return l1 + ":" + l2;
}
public static TwoLongs fromString(String s) {
final int i = s.indexOf(':');
assert i != -1;
return new TwoLongs(Long.parseLong(s.substring(0, i)),
Long.parseLong(s.substring(1+i)));
}
}
/** @deprecated (4.0) until we fix no-fork problems in solr tests */
@Deprecated
private static List<String> testClassesRun = new ArrayList<String>();
@BeforeClass
public static void beforeClassLuceneTestCaseJ4() {
state = State.INITIAL;
staticSeed = "random".equals(TEST_SEED) ? seedRand.nextLong() : TwoLongs.fromString(TEST_SEED).l1;
random.setSeed(staticSeed);
tempDirs.clear();
stores = Collections.synchronizedMap(new IdentityHashMap<MockDirectoryWrapper,StackTraceElement[]>());
// enable this by default, for IDE consistency with ant tests (as its the default from ant)
// TODO: really should be in solr base classes, but some extend LTC directly.
// we do this in beforeClass, because some tests currently disable it
if (System.getProperty("solr.directoryFactory") == null) {
System.setProperty("solr.directoryFactory", "org.apache.solr.core.MockDirectoryFactory");
}
savedCodecProvider = CodecProvider.getDefault();
if ("random".equals(TEST_CODECPROVIDER)) {
if ("randomPerField".equals(TEST_CODEC)) {
if (random.nextInt(4) == 0) { // preflex-only setup
codec = installTestCodecs("PreFlex", CodecProvider.getDefault());
} else { // per-field setup
CodecProvider.setDefault(new RandomCodecProvider(random));
codec = installTestCodecs(TEST_CODEC, CodecProvider.getDefault());
}
} else { // ordinary setup
codec = installTestCodecs(TEST_CODEC, CodecProvider.getDefault());
}
} else {
// someone specified their own codecprovider by class
try {
Class<? extends CodecProvider> cpClazz = Class.forName(TEST_CODECPROVIDER).asSubclass(CodecProvider.class);
CodecProvider cp = cpClazz.newInstance();
String codecName;
if (TEST_CODEC.startsWith("random")) { // TODO: somehow do random per-field?!
Set<String> codecSet = cp.listAll();
String availableCodecs[] = codecSet.toArray(new String[codecSet.size()]);
codecName = availableCodecs[random.nextInt(availableCodecs.length)];
} else {
codecName = TEST_CODEC;
}
codec = cp.lookup(codecName);
cp.setDefaultFieldCodec(codecName);
CodecProvider.setDefault(cp);
} catch (Exception e) {
System.err.println("Could not instantiate CodecProvider: " + TEST_CODECPROVIDER);
throw new RuntimeException(e);
}
}
savedLocale = Locale.getDefault();
locale = TEST_LOCALE.equals("random") ? randomLocale(random) : localeForName(TEST_LOCALE);
Locale.setDefault(locale);
savedTimeZone = TimeZone.getDefault();
timeZone = TEST_TIMEZONE.equals("random") ? randomTimeZone(random) : TimeZone.getTimeZone(TEST_TIMEZONE);
TimeZone.setDefault(timeZone);
testsFailed = false;
}
@AfterClass
public static void afterClassLuceneTestCaseJ4() {
if (!testsFailed) {
assertTrue("ensure your setUp() calls super.setUp() and your tearDown() calls super.tearDown()!!!",
state == State.INITIAL || state == State.TEARDOWN);
}
state = State.INITIAL;
if (! "false".equals(TEST_CLEAN_THREADS)) {
int rogueThreads = threadCleanup("test class");
if (rogueThreads > 0) {
// TODO: fail here once the leaks are fixed.
System.err.println("RESOURCE LEAK: test class left " + rogueThreads + " thread(s) running");
}
}
String codecDescription;
CodecProvider cp = CodecProvider.getDefault();
if ("randomPerField".equals(TEST_CODEC) && cp instanceof RandomCodecProvider) {
codecDescription = cp.toString();
} else {
codecDescription = codec.toString();
}
if ("random".equals(TEST_CODECPROVIDER) && CodecProvider.getDefault() == savedCodecProvider)
removeTestCodecs(codec, CodecProvider.getDefault());
CodecProvider.setDefault(savedCodecProvider);
Locale.setDefault(savedLocale);
TimeZone.setDefault(savedTimeZone);
System.clearProperty("solr.solr.home");
System.clearProperty("solr.data.dir");
// now look for unclosed resources
if (!testsFailed)
for (MockDirectoryWrapper d : stores.keySet()) {
if (d.isOpen()) {
StackTraceElement elements[] = stores.get(d);
// Look for the first class that is not LuceneTestCase that requested
// a Directory. The first two items are of Thread's, so skipping over
// them.
StackTraceElement element = null;
for (int i = 2; i < elements.length; i++) {
StackTraceElement ste = elements[i];
if (ste.getClassName().indexOf("LuceneTestCase") == -1) {
element = ste;
break;
}
}
fail("directory of test was not closed, opened from: " + element);
}
}
stores = null;
// if verbose or tests failed, report some information back
if (VERBOSE || testsFailed)
System.err.println("NOTE: test params are: codec=" + codecDescription +
", locale=" + locale +
", timezone=" + (timeZone == null ? "(null)" : timeZone.getID()));
if (testsFailed) {
System.err.println("NOTE: all tests run in this JVM:");
System.err.println(Arrays.toString(testClassesRun.toArray()));
System.err.println("NOTE: " + System.getProperty("os.name") + " "
+ System.getProperty("os.version") + " "
+ System.getProperty("os.arch") + "/"
+ System.getProperty("java.vendor") + " "
+ System.getProperty("java.version") + " "
+ (Constants.JRE_IS_64BIT ? "(64-bit)" : "(32-bit)") + "/"
+ "cpus=" + Runtime.getRuntime().availableProcessors() + ","
+ "threads=" + Thread.activeCount() + ","
+ "free=" + Runtime.getRuntime().freeMemory() + ","
+ "total=" + Runtime.getRuntime().totalMemory());
}
// clear out any temp directories if we can
if (!testsFailed) {
for (Entry<File, StackTraceElement[]> entry : tempDirs.entrySet()) {
try {
_TestUtil.rmDir(entry.getKey());
} catch (IOException e) {
e.printStackTrace();
System.err.println("path " + entry.getKey() + " allocated from");
// first two STE's are Java's
StackTraceElement[] elements = entry.getValue();
for (int i = 2; i < elements.length; i++) {
StackTraceElement ste = elements[i];
// print only our code's stack information
if (ste.getClassName().indexOf("org.apache.lucene") == -1) break;
System.err.println("\t" + ste);
}
fail("could not remove temp dir: " + entry.getKey());
}
}
}
}
private static boolean testsFailed; /* true if any tests failed */
// This is how we get control when errors occur.
// Think of this as start/end/success/failed
// events.
@Rule
public final TestWatchman intercept = new TestWatchman() {
@Override
public void failed(Throwable e, FrameworkMethod method) {
// org.junit.internal.AssumptionViolatedException in older releases
// org.junit.Assume.AssumptionViolatedException in recent ones
if (e.getClass().getName().endsWith("AssumptionViolatedException")) {
if (e.getCause() instanceof TestIgnoredException)
e = e.getCause();
System.err.print("NOTE: Assume failed in '" + method.getName() + "' (ignored):");
if (VERBOSE) {
System.err.println();
e.printStackTrace(System.err);
} else {
System.err.print(" ");
System.err.println(e.getMessage());
}
} else {
testsFailed = true;
reportAdditionalFailureInfo();
}
super.failed(e, method);
}
@Override
public void starting(FrameworkMethod method) {
// set current method name for logging
LuceneTestCase.this.name = method.getName();
if (!testsFailed) {
assertTrue("ensure your setUp() calls super.setUp()!!!", state == State.SETUP);
}
state = State.RANTEST;
super.starting(method);
}
};
@Before
public void setUp() throws Exception {
seed = "random".equals(TEST_SEED) ? seedRand.nextLong() : TwoLongs.fromString(TEST_SEED).l2;
random.setSeed(seed);
if (!testsFailed) {
assertTrue("ensure your tearDown() calls super.tearDown()!!!", (state == State.INITIAL || state == State.TEARDOWN));
}
state = State.SETUP;
savedUncaughtExceptionHandler = Thread.getDefaultUncaughtExceptionHandler();
Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
testsFailed = true;
uncaughtExceptions.add(new UncaughtExceptionEntry(t, e));
if (savedUncaughtExceptionHandler != null)
savedUncaughtExceptionHandler.uncaughtException(t, e);
}
});
savedBoolMaxClauseCount = BooleanQuery.getMaxClauseCount();
}
/**
* Forcible purges all cache entries from the FieldCache.
* <p>
* This method will be called by tearDown to clean up FieldCache.DEFAULT.
* If a (poorly written) test has some expectation that the FieldCache
* will persist across test methods (ie: a static IndexReader) this
* method can be overridden to do nothing.
* </p>
*
* @see FieldCache#purgeAllCaches()
*/
protected void purgeFieldCache(final FieldCache fc) {
fc.purgeAllCaches();
}
protected String getTestLabel() {
return getClass().getName() + "." + getName();
}
@After
public void tearDown() throws Exception {
if (!testsFailed) {
// Note: we allow a test to go straight from SETUP -> TEARDOWN (without ever entering the RANTEST state)
// because if you assume() inside setUp(), it skips the test and the TestWatchman has no way to know...
assertTrue("ensure your setUp() calls super.setUp()!!!", state == State.RANTEST || state == State.SETUP);
}
state = State.TEARDOWN;
BooleanQuery.setMaxClauseCount(savedBoolMaxClauseCount);
if ("perMethod".equals(TEST_CLEAN_THREADS)) {
int rogueThreads = threadCleanup("test method: '" + getName() + "'");
if (rogueThreads > 0) {
System.err.println("RESOURCE LEAK: test method: '" + getName()
+ "' left " + rogueThreads + " thread(s) running");
// TODO: fail, but print seed for now.
if (!testsFailed && uncaughtExceptions.isEmpty()) {
reportAdditionalFailureInfo();
}
}
}
Thread.setDefaultUncaughtExceptionHandler(savedUncaughtExceptionHandler);
try {
if (!uncaughtExceptions.isEmpty()) {
testsFailed = true;
reportAdditionalFailureInfo();
System.err.println("The following exceptions were thrown by threads:");
for (UncaughtExceptionEntry entry : uncaughtExceptions) {
System.err.println("*** Thread: " + entry.thread.getName() + " ***");
entry.exception.printStackTrace(System.err);
}
fail("Some threads threw uncaught exceptions!");
}
// calling assertSaneFieldCaches here isn't as useful as having test
// classes call it directly from the scope where the index readers
// are used, because they could be gc'ed just before this tearDown
// method is called.
//
// But it's better then nothing.
//
// If you are testing functionality that you know for a fact
// "violates" FieldCache sanity, then you should either explicitly
// call purgeFieldCache at the end of your test method, or refactor
// your Test class so that the inconsistant FieldCache usages are
// isolated in distinct test methods
assertSaneFieldCaches(getTestLabel());
} finally {
purgeFieldCache(FieldCache.DEFAULT);
}
}
private final static int THREAD_STOP_GRACE_MSEC = 50;
// jvm-wide list of 'rogue threads' we found, so they only get reported once.
private final static IdentityHashMap<Thread,Boolean> rogueThreads = new IdentityHashMap<Thread,Boolean>();
static {
// just a hack for things like eclipse test-runner threads
for (Thread t : Thread.getAllStackTraces().keySet()) {
rogueThreads.put(t, true);
}
}
/**
* Looks for leftover running threads, trying to kill them off,
* so they don't fail future tests.
* returns the number of rogue threads that it found.
*/
private static int threadCleanup(String context) {
// educated guess
Thread[] stillRunning = new Thread[Thread.activeCount()+1];
int threadCount = 0;
int rogueCount = 0;
if ((threadCount = Thread.enumerate(stillRunning)) > 1) {
while (threadCount == stillRunning.length) {
// truncated response
stillRunning = new Thread[stillRunning.length*2];
threadCount = Thread.enumerate(stillRunning);
}
for (int i = 0; i < threadCount; i++) {
Thread t = stillRunning[i];
if (t.isAlive() &&
!rogueThreads.containsKey(t) &&
t != Thread.currentThread() &&
/* its ok to keep your searcher across test cases */
(t.getName().startsWith("LuceneTestCase") && context.startsWith("test method")) == false) {
System.err.println("WARNING: " + context + " left thread running: " + t);
rogueThreads.put(t, true);
rogueCount++;
if (t.getName().startsWith("LuceneTestCase")) {
System.err.println("PLEASE CLOSE YOUR INDEXSEARCHERS IN YOUR TEST!!!!");
continue;
} else {
// wait on the thread to die of natural causes
try {
t.join(THREAD_STOP_GRACE_MSEC);
} catch (InterruptedException e) { e.printStackTrace(); }
}
// try to stop the thread:
t.setUncaughtExceptionHandler(null);
Thread.setDefaultUncaughtExceptionHandler(null);
if (!t.getName().startsWith("SyncThread")) // avoid zookeeper jre crash
t.interrupt();
}
}
}
return rogueCount;
}
/**
* Asserts that FieldCacheSanityChecker does not detect any
* problems with FieldCache.DEFAULT.
* <p>
* If any problems are found, they are logged to System.err
* (allong with the msg) when the Assertion is thrown.
* </p>
* <p>
* This method is called by tearDown after every test method,
* however IndexReaders scoped inside test methods may be garbage
* collected prior to this method being called, causing errors to
* be overlooked. Tests are encouraged to keep their IndexReaders
* scoped at the class level, or to explicitly call this method
* directly in the same scope as the IndexReader.
* </p>
*
* @see org.apache.lucene.util.FieldCacheSanityChecker
*/
protected void assertSaneFieldCaches(final String msg) {
final CacheEntry[] entries = FieldCache.DEFAULT.getCacheEntries();
Insanity[] insanity = null;
try {
try {
insanity = FieldCacheSanityChecker.checkSanity(entries);
} catch (RuntimeException e) {
dumpArray(msg + ": FieldCache", entries, System.err);
throw e;
}
assertEquals(msg + ": Insane FieldCache usage(s) found",
0, insanity.length);
insanity = null;
} finally {
// report this in the event of any exception/failure
// if no failure, then insanity will be null anyway
if (null != insanity) {
dumpArray(msg + ": Insane FieldCache usage(s)", insanity, System.err);
}
}
}
/**
* Returns a number of at least <code>i</code>
* <p>
* The actual number returned will be influenced by whether {@link TEST_NIGHTLY}
* is active and {@link RANDOM_MULTIPLIER}, but also with some random fudge.
*/
public static int atLeast(Random random, int i) {
int min = (TEST_NIGHTLY ? 5*i : i) * RANDOM_MULTIPLIER;
int max = min+(min/2);
return _TestUtil.nextInt(random, min, max);
}
public static int atLeast(int i) {
return atLeast(random, i);
}
/**
* Returns true if something should happen rarely,
* <p>
* The actual number returned will be influenced by whether {@link TEST_NIGHTLY}
* is active and {@link RANDOM_MULTIPLIER}.
*/
public static boolean rarely(Random random) {
int p = TEST_NIGHTLY ? 25 : 5;
p += (p * Math.log(RANDOM_MULTIPLIER));
int min = 100 - Math.min(p, 90); // never more than 90
return random.nextInt(100) >= min;
}
public static boolean rarely() {
return rarely(random);
}
public static boolean usually(Random random) {
return !rarely(random);
}
public static boolean usually() {
return usually(random);
}
// @deprecated (4.0) These deprecated methods should be removed soon, when all tests using no Epsilon are fixed:
@Deprecated
static public void assertEquals(double expected, double actual) {
assertEquals(null, expected, actual);
}
@Deprecated
static public void assertEquals(String message, double expected, double actual) {
assertEquals(message, Double.valueOf(expected), Double.valueOf(actual));
}
@Deprecated
static public void assertEquals(float expected, float actual) {
assertEquals(null, expected, actual);
}
@Deprecated
static public void assertEquals(String message, float expected, float actual) {
assertEquals(message, Float.valueOf(expected), Float.valueOf(actual));
}
// Replacement for Assume jUnit class, so we can add a message with explanation:
private static final class TestIgnoredException extends RuntimeException {
TestIgnoredException(String msg) {
super(msg);
}
TestIgnoredException(String msg, Throwable t) {
super(msg, t);
}
@Override
public String getMessage() {
StringBuilder sb = new StringBuilder(super.getMessage());
if (getCause() != null)
sb.append(" - ").append(getCause());
return sb.toString();
}
// only this one is called by our code, exception is not used outside this class:
@Override
public void printStackTrace(PrintStream s) {
if (getCause() != null) {
s.println(super.toString() + " - Caused by:");
getCause().printStackTrace(s);
} else {
super.printStackTrace(s);
}
}
}
public static void assumeTrue(String msg, boolean b) {
Assume.assumeNoException(b ? null : new TestIgnoredException(msg));
}
public static void assumeFalse(String msg, boolean b) {
assumeTrue(msg, !b);
}
public static void assumeNoException(String msg, Exception e) {
Assume.assumeNoException(e == null ? null : new TestIgnoredException(msg, e));
}
public static <T> Set<T> asSet(T... args) {
return new HashSet<T>(Arrays.asList(args));
}
/**
* Convenience method for logging an iterator.
*
* @param label String logged before/after the items in the iterator
* @param iter Each next() is toString()ed and logged on it's own line. If iter is null this is logged differnetly then an empty iterator.
* @param stream Stream to log messages to.
*/
public static void dumpIterator(String label, Iterator<?> iter,
PrintStream stream) {
stream.println("*** BEGIN " + label + " ***");
if (null == iter) {
stream.println(" ... NULL ...");
} else {
while (iter.hasNext()) {
stream.println(iter.next().toString());
}
}
stream.println("*** END " + label + " ***");
}
/**
* Convenience method for logging an array. Wraps the array in an iterator and delegates
*
* @see #dumpIterator(String,Iterator,PrintStream)
*/
public static void dumpArray(String label, Object[] objs,
PrintStream stream) {
Iterator<?> iter = (null == objs) ? null : Arrays.asList(objs).iterator();
dumpIterator(label, iter, stream);
}
/** create a new index writer config with random defaults */
public static IndexWriterConfig newIndexWriterConfig(Version v, Analyzer a) {
return newIndexWriterConfig(random, v, a);
}
/** create a new index writer config with random defaults using the specified random */
public static IndexWriterConfig newIndexWriterConfig(Random r, Version v, Analyzer a) {
IndexWriterConfig c = new IndexWriterConfig(v, a);
if (r.nextBoolean()) {
c.setMergeScheduler(new SerialMergeScheduler());
}
if (r.nextBoolean()) {
if (rarely(r)) {
// crazy value
c.setMaxBufferedDocs(_TestUtil.nextInt(r, 2, 7));
} else {
// reasonable value
c.setMaxBufferedDocs(_TestUtil.nextInt(r, 8, 1000));
}
}
if (r.nextBoolean()) {
if (rarely(r)) {
// crazy value
c.setTermIndexInterval(random.nextBoolean() ? _TestUtil.nextInt(r, 1, 31) : _TestUtil.nextInt(r, 129, 1000));
} else {
// reasonable value
c.setTermIndexInterval(_TestUtil.nextInt(r, 32, 128));
}
}
if (r.nextBoolean()) {
c.setIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(_TestUtil.nextInt(r, 1, 20)));
}
if (r.nextBoolean()) {
c.setMergePolicy(newTieredMergePolicy());
} else if (r.nextBoolean()) {
c.setMergePolicy(newLogMergePolicy());
} else {
c.setMergePolicy(new MockRandomMergePolicy(r));
}
c.setReaderPooling(r.nextBoolean());
c.setReaderTermsIndexDivisor(_TestUtil.nextInt(r, 1, 4));
return c;
}
public static LogMergePolicy newLogMergePolicy() {
return newLogMergePolicy(random);
}
public static TieredMergePolicy newTieredMergePolicy() {
return newTieredMergePolicy(random);
}
public static LogMergePolicy newLogMergePolicy(Random r) {
LogMergePolicy logmp = r.nextBoolean() ? new LogDocMergePolicy() : new LogByteSizeMergePolicy();
logmp.setUseCompoundFile(r.nextBoolean());
logmp.setCalibrateSizeByDeletes(r.nextBoolean());
if (rarely(r)) {
logmp.setMergeFactor(_TestUtil.nextInt(r, 2, 4));
} else {
logmp.setMergeFactor(_TestUtil.nextInt(r, 5, 50));
}
return logmp;
}
public static TieredMergePolicy newTieredMergePolicy(Random r) {
TieredMergePolicy tmp = new TieredMergePolicy();
if (rarely(r)) {
tmp.setMaxMergeAtOnce(_TestUtil.nextInt(r, 2, 4));
tmp.setMaxMergeAtOnceExplicit(_TestUtil.nextInt(r, 2, 4));
} else {
tmp.setMaxMergeAtOnce(_TestUtil.nextInt(r, 5, 50));
tmp.setMaxMergeAtOnceExplicit(_TestUtil.nextInt(r, 5, 50));
}
tmp.setMaxMergedSegmentMB(0.2 + r.nextDouble() * 2.0);
tmp.setFloorSegmentMB(0.2 + r.nextDouble() * 2.0);
tmp.setExpungeDeletesPctAllowed(0.0 + r.nextDouble() * 30.0);
tmp.setSegmentsPerTier(_TestUtil.nextInt(r, 2, 20));
tmp.setUseCompoundFile(r.nextBoolean());
tmp.setNoCFSRatio(0.1 + r.nextDouble()*0.8);
return tmp;
}
public static LogMergePolicy newLogMergePolicy(boolean useCFS) {
LogMergePolicy logmp = newLogMergePolicy();
logmp.setUseCompoundFile(useCFS);
return logmp;
}
public static LogMergePolicy newLogMergePolicy(boolean useCFS, int mergeFactor) {
LogMergePolicy logmp = newLogMergePolicy();
logmp.setUseCompoundFile(useCFS);
logmp.setMergeFactor(mergeFactor);
return logmp;
}
public static LogMergePolicy newLogMergePolicy(int mergeFactor) {
LogMergePolicy logmp = newLogMergePolicy();
logmp.setMergeFactor(mergeFactor);
return logmp;
}
/**
* Returns a new Directory instance. Use this when the test does not
* care about the specific Directory implementation (most tests).
* <p>
* The Directory is wrapped with {@link MockDirectoryWrapper}.
* By default this means it will be picky, such as ensuring that you
* properly close it and all open files in your test. It will emulate
* some features of Windows, such as not allowing open files to be
* overwritten.
*/
public static MockDirectoryWrapper newDirectory() throws IOException {
return newDirectory(random);
}
/**
* Returns a new Directory instance, using the specified random.
* See {@link #newDirectory()} for more information.
*/
public static MockDirectoryWrapper newDirectory(Random r) throws IOException {
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, impl);
stores.put(dir, Thread.currentThread().getStackTrace());
dir.setThrottling(TEST_THROTTLING);
return dir;
}
/**
* Returns a new Directory instance, with contents copied from the
* provided directory. See {@link #newDirectory()} for more
* information.
*/
public static MockDirectoryWrapper newDirectory(Directory d) throws IOException {
return newDirectory(random, d);
}
/** Returns a new FSDirectory instance over the given file, which must be a folder. */
public static MockDirectoryWrapper newFSDirectory(File f) throws IOException {
return newFSDirectory(f, null);
}
/** Returns a new FSDirectory instance over the given file, which must be a folder. */
public static MockDirectoryWrapper newFSDirectory(File f, LockFactory lf) throws IOException {
String fsdirClass = TEST_DIRECTORY;
if (fsdirClass.equals("random")) {
fsdirClass = FS_DIRECTORIES[random.nextInt(FS_DIRECTORIES.length)];
}
if (fsdirClass.indexOf(".") == -1) {// if not fully qualified, assume .store
fsdirClass = "org.apache.lucene.store." + fsdirClass;
}
Class<? extends FSDirectory> clazz;
try {
try {
clazz = Class.forName(fsdirClass).asSubclass(FSDirectory.class);
} catch (ClassCastException e) {
// TEST_DIRECTORY is not a sub-class of FSDirectory, so draw one at random
fsdirClass = FS_DIRECTORIES[random.nextInt(FS_DIRECTORIES.length)];
if (fsdirClass.indexOf(".") == -1) {// if not fully qualified, assume .store
fsdirClass = "org.apache.lucene.store." + fsdirClass;
}
clazz = Class.forName(fsdirClass).asSubclass(FSDirectory.class);
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, newFSDirectoryImpl(clazz, f));
if (lf != null) {
dir.setLockFactory(lf);
}
stores.put(dir, Thread.currentThread().getStackTrace());
dir.setThrottling(TEST_THROTTLING);
return dir;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Returns a new Directory instance, using the specified random
* with contents copied from the provided directory. See
* {@link #newDirectory()} for more information.
*/
public static MockDirectoryWrapper newDirectory(Random r, Directory d) throws IOException {
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
for (String file : d.listAll()) {
d.copy(impl, file, file);
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, impl);
stores.put(dir, Thread.currentThread().getStackTrace());
dir.setThrottling(TEST_THROTTLING);
return dir;
}
/** Returns a new field instance.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(String name, String value, Index index) {
return newField(random, name, value, index);
}
/** Returns a new field instance.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(String name, String value, Store store, Index index) {
return newField(random, name, value, store, index);
}
/**
* Returns a new Field instance. Use this when the test does not
* care about some specific field settings (most tests)
* <ul>
* <li>If the store value is set to Store.NO, sometimes the field will be randomly stored.
* <li>More term vector data than you ask for might be indexed, for example if you choose YES
* it might index term vectors with offsets too.
* </ul>
*/
public static Field newField(String name, String value, Store store, Index index, TermVector tv) {
return newField(random, name, value, store, index, tv);
}
/** Returns a new field instance, using the specified random.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(Random random, String name, String value, Index index) {
return newField(random, name, value, Store.NO, index);
}
/** Returns a new field instance, using the specified random.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(Random random, String name, String value, Store store, Index index) {
return newField(random, name, value, store, index, TermVector.NO);
}
/** Returns a new field instance, using the specified random.
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
public static Field newField(Random random, String name, String value, Store store, Index index, TermVector tv) {
if (usually(random)) {
// most of the time, don't modify the params
return new Field(name, value, store, index, tv);
}
if (!index.isIndexed())
return new Field(name, value, store, index, tv);
if (!store.isStored() && random.nextBoolean())
store = Store.YES; // randomly store it
tv = randomTVSetting(random, tv);
return new Field(name, value, store, index, tv);
}
static final TermVector tvSettings[] = {
TermVector.NO, TermVector.YES, TermVector.WITH_OFFSETS,
TermVector.WITH_POSITIONS, TermVector.WITH_POSITIONS_OFFSETS
};
private static TermVector randomTVSetting(Random random, TermVector minimum) {
switch(minimum) {
case NO: return tvSettings[_TestUtil.nextInt(random, 0, tvSettings.length-1)];
case YES: return tvSettings[_TestUtil.nextInt(random, 1, tvSettings.length-1)];
case WITH_OFFSETS: return random.nextBoolean() ? TermVector.WITH_OFFSETS
: TermVector.WITH_POSITIONS_OFFSETS;
case WITH_POSITIONS: return random.nextBoolean() ? TermVector.WITH_POSITIONS
: TermVector.WITH_POSITIONS_OFFSETS;
default: return TermVector.WITH_POSITIONS_OFFSETS;
}
}
/** return a random Locale from the available locales on the system */
public static Locale randomLocale(Random random) {
Locale locales[] = Locale.getAvailableLocales();
return locales[random.nextInt(locales.length)];
}
/** return a random TimeZone from the available timezones on the system */
public static TimeZone randomTimeZone(Random random) {
String tzIds[] = TimeZone.getAvailableIDs();
return TimeZone.getTimeZone(tzIds[random.nextInt(tzIds.length)]);
}
/** return a Locale object equivalent to its programmatic name */
public static Locale localeForName(String localeName) {
String elements[] = localeName.split("\\_");
switch(elements.length) {
case 3: return new Locale(elements[0], elements[1], elements[2]);
case 2: return new Locale(elements[0], elements[1]);
case 1: return new Locale(elements[0]);
default: throw new IllegalArgumentException("Invalid Locale: " + localeName);
}
}
private static final String FS_DIRECTORIES[] = {
"SimpleFSDirectory",
"NIOFSDirectory",
"MMapDirectory"
};
private static final String CORE_DIRECTORIES[] = {
"RAMDirectory",
FS_DIRECTORIES[0], FS_DIRECTORIES[1], FS_DIRECTORIES[2]
};
public static String randomDirectory(Random random) {
if (rarely(random)) {
return CORE_DIRECTORIES[random.nextInt(CORE_DIRECTORIES.length)];
} else {
return "RAMDirectory";
}
}
private static Directory newFSDirectoryImpl(
Class<? extends FSDirectory> clazz, File file)
throws IOException {
FSDirectory d = null;
try {
// Assuming every FSDirectory has a ctor(File), but not all may take a
// LockFactory too, so setting it afterwards.
Constructor<? extends FSDirectory> ctor = clazz.getConstructor(File.class);
d = ctor.newInstance(file);
} catch (Exception e) {
d = FSDirectory.open(file);
}
return d;
}
/** Registers a temp file that will be deleted when tests are done. */
public static void registerTempFile(File tmpFile) {
tempDirs.put(tmpFile.getAbsoluteFile(), Thread.currentThread().getStackTrace());
}
static Directory newDirectoryImpl(Random random, String clazzName) {
if (clazzName.equals("random"))
clazzName = randomDirectory(random);
if (clazzName.indexOf(".") == -1) // if not fully qualified, assume .store
clazzName = "org.apache.lucene.store." + clazzName;
try {
final Class<? extends Directory> clazz = Class.forName(clazzName).asSubclass(Directory.class);
// If it is a FSDirectory type, try its ctor(File)
if (FSDirectory.class.isAssignableFrom(clazz)) {
final File tmpFile = _TestUtil.createTempFile("test", "tmp", TEMP_DIR);
tmpFile.delete();
tmpFile.mkdir();
registerTempFile(tmpFile);
return newFSDirectoryImpl(clazz.asSubclass(FSDirectory.class), tmpFile);
}
// try empty ctor
return clazz.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** create a new searcher over the reader.
* This searcher might randomly use threads. */
public static IndexSearcher newSearcher(IndexReader r) throws IOException {
return newSearcher(r, true);
}
/** create a new searcher over the reader.
* This searcher might randomly use threads.
* if <code>maybeWrap</code> is true, this searcher might wrap the reader
* with one that returns null for getSequentialSubReaders.
*/
public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) throws IOException {
if (random.nextBoolean()) {
if (maybeWrap && rarely()) {
return new IndexSearcher(new SlowMultiReaderWrapper(r));
} else {
return new IndexSearcher(r);
}
} else {
int threads = 0;
final ExecutorService ex = (random.nextBoolean()) ? null
: Executors.newFixedThreadPool(threads = _TestUtil.nextInt(random, 1, 8),
new NamedThreadFactory("LuceneTestCase"));
if (ex != null && VERBOSE) {
System.out.println("NOTE: newSearcher using ExecutorService with " + threads + " threads");
}
return new IndexSearcher(r.getTopReaderContext(), ex) {
@Override
public void close() throws IOException {
super.close();
if (ex != null) {
ex.shutdown();
try {
ex.awaitTermination(1000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
};
}
}
public String getName() {
return this.name;
}
/** Gets a resource from the classpath as {@link File}. This method should only be used,
* if a real file is needed. To get a stream, code should prefer
* {@link Class#getResourceAsStream} using {@code this.getClass()}.
*/
protected File getDataFile(String name) throws IOException {
try {
return new File(this.getClass().getResource(name).toURI());
} catch (Exception e) {
throw new IOException("Cannot find resource: " + name);
}
}
// We get here from InterceptTestCaseEvents on the 'failed' event....
public void reportAdditionalFailureInfo() {
System.err.println("NOTE: reproduce with: ant test -Dtestcase=" + getClass().getSimpleName()
+ " -Dtestmethod=" + getName() + " -Dtests.seed=" + new TwoLongs(staticSeed, seed)
+ reproduceWithExtraParams());
}
// extra params that were overridden needed to reproduce the command
private String reproduceWithExtraParams() {
StringBuilder sb = new StringBuilder();
if (!TEST_CODEC.equals("randomPerField")) sb.append(" -Dtests.codec=").append(TEST_CODEC);
if (!TEST_LOCALE.equals("random")) sb.append(" -Dtests.locale=").append(TEST_LOCALE);
if (!TEST_TIMEZONE.equals("random")) sb.append(" -Dtests.timezone=").append(TEST_TIMEZONE);
if (!TEST_DIRECTORY.equals("random")) sb.append(" -Dtests.directory=").append(TEST_DIRECTORY);
if (RANDOM_MULTIPLIER > 1) sb.append(" -Dtests.multiplier=").append(RANDOM_MULTIPLIER);
if (TEST_NIGHTLY) sb.append(" -Dtests.nightly=true");
return sb.toString();
}
// recorded seed: for beforeClass
private static long staticSeed;
// seed for individual test methods, changed in @before
private long seed;
private static final Random seedRand = new Random();
protected static final Random random = new Random(0);
private String name = "<unknown>";
/**
* Annotation for tests that should only be run during nightly builds.
*/
@Documented
@Inherited
@Retention(RetentionPolicy.RUNTIME)
public @interface Nightly {}
/** optionally filters the tests to be run by TEST_METHOD */
public static class LuceneTestCaseRunner extends BlockJUnit4ClassRunner {
private List<FrameworkMethod> testMethods;
@Override
protected List<FrameworkMethod> computeTestMethods() {
if (testMethods != null)
return testMethods;
testClassesRun.add(getTestClass().getJavaClass().getSimpleName());
testMethods = new ArrayList<FrameworkMethod>();
for (Method m : getTestClass().getJavaClass().getMethods()) {
// check if the current test's class has methods annotated with @Ignore
final Ignore ignored = m.getAnnotation(Ignore.class);
if (ignored != null && !m.getName().equals("alwaysIgnoredTestMethod")) {
System.err.println("NOTE: Ignoring test method '" + m.getName() + "': " + ignored.value());
}
// add methods starting with "test"
final int mod = m.getModifiers();
if (m.getAnnotation(Test.class) != null ||
(m.getName().startsWith("test") &&
!Modifier.isAbstract(mod) &&
m.getParameterTypes().length == 0 &&
m.getReturnType() == Void.TYPE))
{
if (Modifier.isStatic(mod))
throw new RuntimeException("Test methods must not be static.");
testMethods.add(new FrameworkMethod(m));
}
}
if (testMethods.isEmpty()) {
throw new RuntimeException("No runnable methods!");
}
if (TEST_NIGHTLY == false) {
if (getTestClass().getJavaClass().isAnnotationPresent(Nightly.class)) {
/* the test class is annotated with nightly, remove all methods */
String className = getTestClass().getJavaClass().getSimpleName();
System.err.println("NOTE: Ignoring nightly-only test class '" + className + "'");
testMethods.clear();
} else {
/* remove all nightly-only methods */
for (int i = 0; i < testMethods.size(); i++) {
final FrameworkMethod m = testMethods.get(i);
if (m.getAnnotation(Nightly.class) != null) {
System.err.println("NOTE: Ignoring nightly-only test method '" + m.getName() + "'");
testMethods.remove(i--);
}
}
}
/* dodge a possible "no-runnable methods" exception by adding a fake ignored test */
if (testMethods.isEmpty()) {
try {
testMethods.add(new FrameworkMethod(LuceneTestCase.class.getMethod("alwaysIgnoredTestMethod")));
} catch (Exception e) { throw new RuntimeException(e); }
}
}
return testMethods;
}
@Override
protected void runChild(FrameworkMethod arg0, RunNotifier arg1) {
if (VERBOSE) {
System.out.println("\nNOTE: running test " + arg0.getName());
}
// only print iteration info if the user requested more than one iterations
final boolean verbose = VERBOSE && TEST_ITER > 1;
final int currentIter[] = new int[1];
arg1.addListener(new RunListener() {
@Override
public void testFailure(Failure failure) throws Exception {
if (verbose) {
System.out.println("\nNOTE: iteration " + currentIter[0] + " failed! ");
}
}
});
for (int i = 0; i < TEST_ITER; i++) {
currentIter[0] = i;
if (verbose) {
System.out.println("\nNOTE: running iter=" + (1+i) + " of " + TEST_ITER);
}
super.runChild(arg0, arg1);
if (testsFailed) {
if (i >= TEST_ITER_MIN - 1) { // XXX is this still off-by-one?
break;
}
}
}
}
public LuceneTestCaseRunner(Class<?> clazz) throws InitializationError {
super(clazz);
Filter f = new Filter() {
@Override
public String describe() { return "filters according to TEST_METHOD"; }
@Override
public boolean shouldRun(Description d) {
return TEST_METHOD == null || d.getMethodName().equals(TEST_METHOD);
}
};
try {
f.apply(this);
} catch (NoTestsRemainException e) {
throw new RuntimeException(e);
}
}
}
private static class RandomCodecProvider extends CodecProvider {
private List<Codec> knownCodecs = new ArrayList<Codec>();
private Map<String,Codec> previousMappings = new HashMap<String,Codec>();
private final int perFieldSeed;
RandomCodecProvider(Random random) {
this.perFieldSeed = random.nextInt();
register(new StandardCodec());
register(new PreFlexCodec());
register(new PulsingCodec(1));
register(new SimpleTextCodec());
Collections.shuffle(knownCodecs, random);
}
@Override
public synchronized void register(Codec codec) {
if (!codec.name.equals("PreFlex"))
knownCodecs.add(codec);
super.register(codec);
}
@Override
public synchronized void unregister(Codec codec) {
knownCodecs.remove(codec);
super.unregister(codec);
}
@Override
public synchronized String getFieldCodec(String name) {
Codec codec = previousMappings.get(name);
if (codec == null) {
codec = knownCodecs.get(Math.abs(perFieldSeed ^ name.hashCode()) % knownCodecs.size());
if (codec instanceof SimpleTextCodec && perFieldSeed % 5 != 0) {
// make simpletext rarer, choose again
codec = knownCodecs.get(Math.abs(perFieldSeed ^ name.toUpperCase(Locale.ENGLISH).hashCode()) % knownCodecs.size());
}
previousMappings.put(name, codec);
}
return codec.name;
}
@Override
public synchronized boolean hasFieldCodec(String name) {
return true; // we have a codec for every field
}
@Override
public synchronized String toString() {
return "RandomCodecProvider: " + previousMappings.toString();
}
}
@Ignore("just a hack")
public final void alwaysIgnoredTestMethod() {}
}
| fix javadocs
git-svn-id: 308d55f399f3bd9aa0560a10e81a003040006c48@1133631 13f79535-47bb-0310-9956-ffa450edef68
| lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java | fix javadocs |
|
Java | apache-2.0 | 4a7fb40d434afadaae7e47fb7f83acad839263fa | 0 | EBIvariation/variation-commons,EBIvariation/variation-commons | /*
* European Variation Archive (EVA) - Open-access database of all types of genetic
* variation data from all species
*
* Copyright 2017 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.eva.commons.mongodb.filter;
import uk.ac.ebi.eva.commons.core.models.VariantType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Class for building filters for querying using the VariantRepository
*/
public class FilterBuilder {
private List<VariantRepositoryFilter> filters = new ArrayList<>();
public List<VariantRepositoryFilter> getVariantEntityRepositoryFilters(String maf,
String polyphenScore,
String siftScore,
List<String> studies,
List<String> consequenceType) {
return this.withMaf(maf)
.withPolyphenScore(polyphenScore)
.withSiftScore(siftScore)
.withStudies(studies)
.withConsequenceType(consequenceType)
.build();
}
public List<VariantRepositoryFilter> getBeaconFilters(Long start,
Long startMin,
Long startMax,
Long end,
Long endMin,
Long endMax,
String referenceBases,
String alternateBases,
VariantType variantType,
List<String> studies){
return this.withStart(start)
.withStartMin(startMin)
.withStartMax(startMax)
.withEnd(end)
.withEndMin(endMin)
.withEndMax(endMax)
.withReferenceBases(referenceBases)
.withAlternates(alternateBases)
.withVariantTypes(variantType)
.withStudies(studies).build();
}
public List<VariantRepositoryFilter> build() {
return filters;
}
public FilterBuilder withMaf(String maf) {
if (maf != null && !maf.isEmpty()) {
filters.add(new VariantRepositoryMafFilter(maf));
}
return this;
}
public FilterBuilder withPolyphenScore(String polyphenScore) {
if (polyphenScore != null && !polyphenScore.isEmpty()) {
filters.add(new VariantRepositoryPolyphenFilter(polyphenScore));
}
return this;
}
public FilterBuilder withSiftScore(String siftScore) {
if (siftScore != null && !siftScore.isEmpty()) {
filters.add(new VariantRepositorySiftFilter(siftScore));
}
return this;
}
public FilterBuilder withStudies(List<String> studies) {
if (studies != null && !studies.isEmpty()) {
filters.add(new VariantRepositoryStudyFilter(studies));
}
return this;
}
public FilterBuilder withConsequenceType(List<String> consequenceType) {
if (consequenceType != null && !consequenceType.isEmpty()) {
filters.add(new VariantRepositoryConsequenceTypeFilter(consequenceType));
}
return this;
}
public FilterBuilder withFiles(List<String> files) {
if (files != null && !files.isEmpty()) {
filters.add(new VariantRepositoryFileFilter(files));
}
return this;
}
public FilterBuilder withVariantTypes(List<VariantType> types) {
if (types != null && !types.isEmpty()) {
filters.add(new VariantRepositoryTypeFilter(types));
}
return this;
}
public FilterBuilder withVariantTypes(VariantType variantType) {
if (variantType != null) {
filters.add(new VariantRepositoryTypeFilter(new ArrayList<>(Arrays.asList(variantType))));
}
return this;
}
public FilterBuilder withAlternates(List<String> alternates) {
if (alternates != null && !alternates.isEmpty()) {
filters.add(new VariantRepositoryAlternateFilter(alternates));
}
return this;
}
public FilterBuilder withAlternates(String alternate) {
if (alternate != null) {
filters.add(new VariantRepositoryAlternateFilter(new ArrayList<>(Arrays.asList(alternate))));
}
return this;
}
public FilterBuilder withReferenceBases(String referenceBases) {
if (referenceBases != null) {
filters.add(new VariantRepositoryReferenceBasesFilter(new ArrayList<>(Arrays.asList(referenceBases))));
}
return this;
}
public FilterBuilder withStart(Long start) {
if (start != null) {
filters.add(new VariantRepositoryStartFilter(start, RelationalOperator.EQ));
}
return this;
}
public FilterBuilder withStartMin(Long startMin) {
if (startMin != null) {
filters.add(new VariantRepositoryStartFilter(startMin, RelationalOperator.GTE));
}
return this;
}
public FilterBuilder withStartMax(Long startMax) {
if (startMax != null) {
filters.add(new VariantRepositoryStartFilter(startMax, RelationalOperator.LTE));
}
return this;
}
public FilterBuilder withEnd(Long end) {
if (end != null) {
filters.add(new VariantRepositoryEndFilter(end, RelationalOperator.EQ));
}
return this;
}
public FilterBuilder withEndMin(Long endMin) {
if (endMin != null) {
filters.add(new VariantRepositoryEndFilter(endMin, RelationalOperator.GTE));
}
return this;
}
public FilterBuilder withEndMax(Long endMax) {
if (endMax != null) {
filters.add(new VariantRepositoryEndFilter(endMax, RelationalOperator.LTE));
}
return this;
}
}
| variation-commons-mongodb/src/main/java/uk/ac/ebi/eva/commons/mongodb/filter/FilterBuilder.java | /*
* European Variation Archive (EVA) - Open-access database of all types of genetic
* variation data from all species
*
* Copyright 2017 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.eva.commons.mongodb.filter;
import uk.ac.ebi.eva.commons.core.models.VariantType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Class for building filters for querying using the VariantRepository
*/
public class FilterBuilder {
private List<VariantRepositoryFilter> filters = new ArrayList<>();
public List<VariantRepositoryFilter> getVariantEntityRepositoryFilters(String maf,
String polyphenScore,
String siftScore,
List<String> studies,
List<String> consequenceType) {
return this.withMaf(maf)
.withPolyphenScore(polyphenScore)
.withSiftScore(siftScore)
.withStudies(studies)
.withConsequenceType(consequenceType)
.build();
}
public List<VariantRepositoryFilter> getBeaconFilters(Long start,
Long startMin,
Long startMax,
Long end,
Long endMin,
Long endMax,
String referenceBases,
String alternateBases,
VariantType variantType,
List<String> studies){
return this.withStart(start)
.withStartMin(startMin)
.withStartMax(startMax)
.withEnd(end)
.withEndMin(endMin)
.withEndMax(endMax)
.withReferenceBases(referenceBases)
.withAlternates(alternateBases)
.withVariantTypes(variantType)
.withStudies(studies).build();
}
public List<VariantRepositoryFilter> build() {
return filters;
}
public FilterBuilder withMaf(String maf) {
if (maf != null && !maf.isEmpty()) {
filters.add(new VariantRepositoryMafFilter(maf));
}
return this;
}
public FilterBuilder withPolyphenScore(String polyphenScore) {
if (polyphenScore != null && !polyphenScore.isEmpty()) {
filters.add(new VariantRepositoryPolyphenFilter(polyphenScore));
}
return this;
}
public FilterBuilder withSiftScore(String siftScore) {
if (siftScore != null && !siftScore.isEmpty()) {
filters.add(new VariantRepositorySiftFilter(siftScore));
}
return this;
}
public FilterBuilder withStudies(List<String> studies) {
if (studies != null && !studies.isEmpty()) {
filters.add(new VariantRepositoryStudyFilter(studies));
}
return this;
}
public FilterBuilder withConsequenceType(List<String> consequenceType) {
if (consequenceType != null && !consequenceType.isEmpty()) {
filters.add(new VariantRepositoryConsequenceTypeFilter(consequenceType));
}
return this;
}
public FilterBuilder withFiles(List<String> files) {
if (files != null && !files.isEmpty()) {
filters.add(new VariantRepositoryFileFilter(files));
}
return this;
}
public FilterBuilder withVariantTypes(List<VariantType> types) {
if (types != null && !types.isEmpty()) {
filters.add(new VariantRepositoryTypeFilter(types));
}
return this;
}
public FilterBuilder withVariantTypes(VariantType variantType) {
if (variantType != null) {
filters.add(new VariantRepositoryTypeFilter(new ArrayList<>(Arrays.asList(variantType))));
}
return this;
}
public FilterBuilder withAlternates(List<String> alternates) {
if (alternates != null && !alternates.isEmpty()) {
filters.add(new VariantRepositoryAlternateFilter(alternates));
}
return this;
}
public FilterBuilder withAlternates(String alternate) {
if (alternate != null) {
filters.add(new VariantRepositoryAlternateFilter(new ArrayList<>(Arrays.asList(alternate))));
}
return this;
}
public FilterBuilder withReferenceBases(String referenceBases) {
if (referenceBases != null) {
filters.add(new VariantRepositoryReferenceBasesFilter(new ArrayList<>(Arrays.asList(referenceBases))));
}
return this;
}
public FilterBuilder withStart(Long start) {
if (start != null) {
filters.add(new VariantRepositoryStartFilter(start, RelationalOperator.EQ));
}
return this;
}
public FilterBuilder withStartMin(Long startMin) {
if (startMin != null) {
filters.add(new VariantRepositoryStartFilter(startMin, RelationalOperator.GTE));
}
return this;
}
public FilterBuilder withStartMax(Long startMax) {
if (startMax != null) {
filters.add(new VariantRepositoryStartFilter(startMax, RelationalOperator.LTE));
}
return this;
}
public FilterBuilder withEnd(Long end) {
if (end != null) {
filters.add(new VariantRepositoryEndFilter(end, RelationalOperator.EQ));
}
return this;
}
public FilterBuilder withEndMin(Long endMin) {
if (endMin != null) {
filters.add(new VariantRepositoryEndFilter(endMin, RelationalOperator.GTE));
}
return this;
}
public FilterBuilder withEndMax(Long endMax) {
if (endMax != null) {
filters.add(new VariantRepositoryEndFilter(endMax, RelationalOperator.LTE));
}
return this;
}
}
| Made changes for coding style
| variation-commons-mongodb/src/main/java/uk/ac/ebi/eva/commons/mongodb/filter/FilterBuilder.java | Made changes for coding style |
|
Java | apache-2.0 | e61681d65903b3d8aa33b4e57af318123b418f34 | 0 | xufeifandj/download-manager,novoda/download-manager | /*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.novoda.downloadmanager.lib;
import android.content.ContentProvider;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.UriMatcher;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.net.Uri;
import android.os.Binder;
import android.os.ParcelFileDescriptor;
import android.os.Process;
import android.provider.OpenableColumns;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import com.novoda.notils.logger.simple.Log;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Allows application to interact with the download manager.
*/
public final class DownloadProvider extends ContentProvider {
/**
* Added so we can use our own ContentProvider
*/
public static final String AUTHORITY = Reflector.reflectAuthority();
/**
* Database filename
*/
private static final String DB_NAME = "downloads.db";
/**
* MIME type for the entire download list
*/
private static final String DOWNLOAD_LIST_TYPE = "vnd.android.cursor.dir/download";
/**
* MIME type for an individual download
*/
private static final String DOWNLOAD_TYPE = "vnd.android.cursor.item/download";
/**
* MIME type for the entire batch list
*/
private static final String BATCH_LIST_TYPE = "vnd.android.cursor.dir/batch";
/**
* MIME type for an individual batch
*/
private static final String BATCH_TYPE = "vnd.android.cursor.item/batch";
/**
* MIME type for the list of download by batch
*/
private static final String DOWNLOADS_BY_BATCH_TYPE = "vnd.android.cursor.dir/download_by_batch";
/**
* URI matcher used to recognize URIs sent by applications
*/
private static final UriMatcher URI_MATCHER = new UriMatcher(UriMatcher.NO_MATCH);
/**
* URI matcher constant for the URI of all downloads belonging to the calling UID
*/
private static final int MY_DOWNLOADS = 1;
/**
* URI matcher constant for the URI of an individual download belonging to the calling UID
*/
private static final int MY_DOWNLOADS_ID = 2;
/**
* URI matcher constant for the URI of all downloads in the system
*/
private static final int ALL_DOWNLOADS = 3;
/**
* URI matcher constant for the URI of an individual download
*/
private static final int ALL_DOWNLOADS_ID = 4;
/**
* URI matcher constant for the URI of a download's request headers
*/
private static final int REQUEST_HEADERS_URI = 5;
/**
* URI matcher constant for the public URI returned by
* {@link DownloadManager#getUriForDownloadedFile(long)} if the given downloaded file
* is publicly accessible.
*/
private static final int PUBLIC_DOWNLOAD_ID = 6;
/**
* URI matcher constant for the URI of a download's request headers
*/
private static final int BATCHES = 7;
/**
* URI matcher constant for the URI of a download's request headers
*/
private static final int BATCHES_ID = 8;
/**
* URI matcher constant for the URI of downloads with their batch data
*/
private static final int DOWNLOADS_BY_BATCH = 9;
static {
URI_MATCHER.addURI(AUTHORITY, "my_downloads", MY_DOWNLOADS);
URI_MATCHER.addURI(AUTHORITY, "my_downloads/#", MY_DOWNLOADS_ID);
URI_MATCHER.addURI(AUTHORITY, "all_downloads", ALL_DOWNLOADS);
URI_MATCHER.addURI(AUTHORITY, "all_downloads/#", ALL_DOWNLOADS_ID);
URI_MATCHER.addURI(AUTHORITY, "batches", BATCHES);
URI_MATCHER.addURI(AUTHORITY, "batches/#", BATCHES_ID);
URI_MATCHER.addURI(AUTHORITY, "downloads_by_batch", DOWNLOADS_BY_BATCH);
URI_MATCHER.addURI(AUTHORITY, "my_downloads/#/" + DownloadsColumnsRequestHeaders.URI_SEGMENT, REQUEST_HEADERS_URI);
URI_MATCHER.addURI(AUTHORITY, "all_downloads/#/" + DownloadsColumnsRequestHeaders.URI_SEGMENT, REQUEST_HEADERS_URI);
// temporary, for backwards compatibility
URI_MATCHER.addURI(AUTHORITY, "download", MY_DOWNLOADS);
URI_MATCHER.addURI(AUTHORITY, "download/#", MY_DOWNLOADS_ID);
URI_MATCHER.addURI(AUTHORITY, "download/#/" + DownloadsColumnsRequestHeaders.URI_SEGMENT, REQUEST_HEADERS_URI);
URI_MATCHER.addURI(AUTHORITY, DownloadsDestination.PUBLICLY_ACCESSIBLE_DOWNLOADS_URI_SEGMENT + "/#", PUBLIC_DOWNLOAD_ID);
}
private static final String[] APP_READABLE_COLUMNS_ARRAY = new String[]{
DownloadsColumns._ID,
DownloadsColumns.COLUMN_APP_DATA,
DownloadsColumns.COLUMN_DATA,
DownloadsColumns.COLUMN_MIME_TYPE,
DownloadsColumns.COLUMN_DESTINATION,
DownloadsColumns.COLUMN_CONTROL,
DownloadsColumns.COLUMN_STATUS,
DownloadsColumns.COLUMN_LAST_MODIFICATION,
DownloadsColumns.COLUMN_NOTIFICATION_CLASS,
DownloadsColumns.COLUMN_TOTAL_BYTES,
DownloadsColumns.COLUMN_CURRENT_BYTES,
DownloadsColumns.COLUMN_URI,
DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI,
DownloadsColumns.COLUMN_FILE_NAME_HINT,
DownloadsColumns.COLUMN_MEDIAPROVIDER_URI,
DownloadsColumns.COLUMN_DELETED,
DownloadsColumns.COLUMN_NOTIFICATION_EXTRAS,
DownloadsColumns.COLUMN_BATCH_ID,
DownloadsColumnsBatches._ID,
DownloadsColumnsBatches.COLUMN_STATUS,
DownloadsColumnsBatches.COLUMN_TITLE,
DownloadsColumnsBatches.COLUMN_DESCRIPTION,
DownloadsColumnsBatches.COLUMN_BIG_PICTURE,
DownloadsColumnsBatches.COLUMN_VISIBILITY,
DownloadsColumnsBatches.COLUMN_TOTAL_BYTES,
DownloadsColumnsBatches.COLUMN_CURRENT_BYTES,
OpenableColumns.DISPLAY_NAME,
OpenableColumns.SIZE,
};
private static final HashSet<String> APP_READABLE_COLUMNS_SET;
private static final HashMap<String, String> COLUMNS_MAP;
static {
APP_READABLE_COLUMNS_SET = new HashSet<>();
Collections.addAll(APP_READABLE_COLUMNS_SET, APP_READABLE_COLUMNS_ARRAY);
COLUMNS_MAP = new HashMap<>();
COLUMNS_MAP.put(OpenableColumns.DISPLAY_NAME, DownloadsColumnsBatches.COLUMN_TITLE + " AS " + OpenableColumns.DISPLAY_NAME);
COLUMNS_MAP.put(OpenableColumns.SIZE, DownloadsColumns.COLUMN_TOTAL_BYTES + " AS " + OpenableColumns.SIZE);
}
private static final List<String> DOWNLOAD_MANAGER_COLUMNS_LIST = Arrays.asList(DownloadManager.UNDERLYING_COLUMNS);
/**
* Different base URIs that could be used to access an individual download
*/
private final Uri[] baseUris;
/**
* The database that lies underneath this content provider
*/
private SQLiteOpenHelper openHelper = null;
/**
* List of uids that can access the downloads
*/
private int systemUid = -1;
private int defcontaineruid = -1;
private File downloadsDataDir;
// @VisibleForTesting
SystemFacade systemFacade;
private DownloadsUriProvider downloadsUriProvider;
public DownloadProvider() {
downloadsUriProvider = DownloadsUriProvider.getInstance();
baseUris = new Uri[]{
downloadsUriProvider.getContentUri(),
downloadsUriProvider.getAllDownloadsUri(),
downloadsUriProvider.getBatchesUri()
};;
}
/**
* This class encapsulates a SQL where clause and its parameters. It makes it possible for
* shared methods (like {@link DownloadProvider#getWhereClause(Uri, String, String[], int)})
* to return both pieces of information, and provides some utility logic to ease piece-by-piece
* construction of selections.
*/
private static class SqlSelection {
public final StringBuilder whereClause = new StringBuilder();
public final List<String> parameters = new ArrayList<>();
public void appendClause(String newClause, final String... parameters) {
if (newClause == null || newClause.isEmpty()) {
return;
}
if (whereClause.length() != 0) {
whereClause.append(" AND ");
}
whereClause.append("(");
whereClause.append(newClause);
whereClause.append(")");
if (parameters != null) {
for (String parameter : parameters) {
this.parameters.add(parameter);
}
}
}
public String getSelection() {
return whereClause.toString();
}
public String[] getParameters() {
String[] array = new String[parameters.size()];
return parameters.toArray(array);
}
}
/**
* Initializes the content provider when it is created.
*/
@Override
public boolean onCreate() {
if (systemFacade == null) {
systemFacade = new RealSystemFacade(getContext());
}
Context context = getContext();
PackageManager packageManager = context.getPackageManager();
String packageName = context.getApplicationContext().getPackageName();
DatabaseFilenameProvider databaseFilenameProvider = new DatabaseFilenameProvider(packageManager, packageName, DB_NAME);
String databaseFilename = databaseFilenameProvider.getDatabaseFilename();
openHelper = new DatabaseHelper(context, databaseFilename);
// Initialize the system uid
systemUid = Process.SYSTEM_UID;
// Initialize the default container uid. Package name hardcoded
// for now.
ApplicationInfo appInfo = null;
try {
appInfo = getContext().getPackageManager().
getApplicationInfo("com.android.defcontainer", 0);
} catch (NameNotFoundException e) {
Log.wtf("Could not get ApplicationInfo for com.android.defconatiner", e);
}
if (appInfo != null) {
defcontaineruid = appInfo.uid;
}
// start the DownloadService class. don't wait for the 1st download to be issued.
// saves us by getting some initialization code in DownloadService out of the way.
context.startService(new Intent(context, DownloadService.class));
// downloadsDataDir = StorageManager.getDownloadDataDirectory(getContext());
downloadsDataDir = context.getCacheDir();
// try {
// android.os.SELinux.restorecon(downloadsDataDir.getCanonicalPath());
// } catch (IOException e) {
// Log.wtf("Could not get canonical path for download directory", e);
// }
return true;
}
/**
* Returns the content-provider-style MIME types of the various
* types accessible through this content provider.
*/
@NonNull
@Override
public String getType(@NonNull Uri uri) {
int match = URI_MATCHER.match(uri);
switch (match) {
case MY_DOWNLOADS:
case ALL_DOWNLOADS: {
return DOWNLOAD_LIST_TYPE;
}
case MY_DOWNLOADS_ID:
case ALL_DOWNLOADS_ID:
case PUBLIC_DOWNLOAD_ID: {
// return the mimetype of this id from the database
final String id = getDownloadIdFromUri(uri);
final SQLiteDatabase db = openHelper.getReadableDatabase();
final String mimeType = DatabaseUtils.stringForQuery(db,
"SELECT " + DownloadsColumns.COLUMN_MIME_TYPE + " FROM " + DownloadsTables.DOWNLOADS_TABLE_NAME +
" WHERE " + DownloadsColumns._ID + " = ?",
new String[]{id});
if (TextUtils.isEmpty(mimeType)) {
return DOWNLOAD_TYPE;
} else {
return mimeType;
}
}
case BATCHES: {
return BATCH_LIST_TYPE;
}
case BATCHES_ID: {
return BATCH_TYPE;
}
case DOWNLOADS_BY_BATCH: {
return DOWNLOADS_BY_BATCH_TYPE;
}
default: {
Log.v("calling getType on an unknown URI: " + uri);
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
}
/**
* Inserts a row in the database
*/
@Override
public Uri insert(@NonNull Uri uri, ContentValues values) {
SQLiteDatabase db = openHelper.getWritableDatabase();
// note we disallow inserting into ALL_DOWNLOADS
int match = URI_MATCHER.match(uri);
if (match == MY_DOWNLOADS) {
checkDownloadInsertPermissions(values);
return insertDownload(uri, values, db, match);
}
if (match == BATCHES) {
long rowId = db.insert(DownloadsColumnsBatches.BATCHES_TABLE_NAME, null, values);
return ContentUris.withAppendedId(downloadsUriProvider.getBatchesUri(), rowId);
}
Log.d("calling insert on an unknown/invalid URI: " + uri);
throw new IllegalArgumentException("Unknown/Invalid URI " + uri);
}
@Nullable
private Uri insertDownload(Uri uri, ContentValues values, SQLiteDatabase db, int match) {
// copy some of the input values as it
ContentValues filteredValues = new ContentValues();
copyString(DownloadsColumns.COLUMN_URI, values, filteredValues);
copyString(DownloadsColumns.COLUMN_APP_DATA, values, filteredValues);
copyBoolean(DownloadsColumns.COLUMN_NO_INTEGRITY, values, filteredValues);
copyString(DownloadsColumns.COLUMN_FILE_NAME_HINT, values, filteredValues);
copyString(DownloadsColumns.COLUMN_MIME_TYPE, values, filteredValues);
// validate the destination column
Integer dest = values.getAsInteger(DownloadsColumns.COLUMN_DESTINATION);
if (dest != null) {
if (getContext().checkCallingPermission(DownloadsPermission.PERMISSION_ACCESS_ADVANCED)
!= PackageManager.PERMISSION_GRANTED
&& (dest == DownloadsDestination.DESTINATION_CACHE_PARTITION
|| dest == DownloadsDestination.DESTINATION_CACHE_PARTITION_NOROAMING
|| dest == DownloadsDestination.DESTINATION_SYSTEMCACHE_PARTITION)) {
throw new SecurityException(
"setting destination to : " + dest +
" not allowed, unless PERMISSION_ACCESS_ADVANCED is granted");
}
// for public API behavior, if an app has CACHE_NON_PURGEABLE permission, automatically
// switch to non-purgeable download
boolean hasNonPurgeablePermission =
getContext().checkCallingPermission(DownloadsPermission.PERMISSION_CACHE_NON_PURGEABLE) == PackageManager.PERMISSION_GRANTED;
if (dest == DownloadsDestination.DESTINATION_CACHE_PARTITION_PURGEABLE && hasNonPurgeablePermission) {
dest = DownloadsDestination.DESTINATION_CACHE_PARTITION;
}
if (dest == DownloadsDestination.DESTINATION_FILE_URI) {
getContext().enforcePermission(android.Manifest.permission.WRITE_EXTERNAL_STORAGE, Binder.getCallingPid(), Binder.getCallingUid(),
"need WRITE_EXTERNAL_STORAGE permission to use DESTINATION_FILE_URI");
checkFileUriDestination(values);
} else if (dest == DownloadsDestination.DESTINATION_SYSTEMCACHE_PARTITION) {
getContext().enforcePermission("android.permission.ACCESS_CACHE_FILESYSTEM", Binder.getCallingPid(), Binder.getCallingUid(),
"need ACCESS_CACHE_FILESYSTEM permission to use system cache");
}
filteredValues.put(DownloadsColumns.COLUMN_DESTINATION, dest);
}
// copy the control column as is
copyInteger(DownloadsColumns.COLUMN_CONTROL, values, filteredValues);
/*
* requests coming from
* DownloadManager.addCompletedDownload(String, String, String,
* boolean, String, String, long) need special treatment
*/
if (values.getAsInteger(DownloadsColumns.COLUMN_DESTINATION) == DownloadsDestination.DESTINATION_NON_DOWNLOADMANAGER_DOWNLOAD) {
// these requests always are marked as 'completed'
filteredValues.put(DownloadsColumns.COLUMN_STATUS, DownloadsStatus.STATUS_SUCCESS);
filteredValues.put(DownloadsColumns.COLUMN_TOTAL_BYTES, values.getAsLong(DownloadsColumns.COLUMN_TOTAL_BYTES));
filteredValues.put(DownloadsColumns.COLUMN_CURRENT_BYTES, 0);
copyInteger(DownloadsColumns.COLUMN_MEDIA_SCANNED, values, filteredValues);
copyString(DownloadsColumns.COLUMN_DATA, values, filteredValues);
} else {
filteredValues.put(DownloadsColumns.COLUMN_STATUS, DownloadsStatus.STATUS_PENDING);
filteredValues.put(DownloadsColumns.COLUMN_TOTAL_BYTES, -1);
filteredValues.put(DownloadsColumns.COLUMN_CURRENT_BYTES, 0);
}
// set lastupdate to current time
long lastMod = systemFacade.currentTimeMillis();
filteredValues.put(DownloadsColumns.COLUMN_LAST_MODIFICATION, lastMod);
// use packagename of the caller to set the notification columns
String clazz = values.getAsString(DownloadsColumns.COLUMN_NOTIFICATION_CLASS);
if (clazz != null) {
int uid = Binder.getCallingUid();
try {
if ((uid == 0) || systemFacade.userOwnsPackage(uid, getContext().getPackageName())) {
filteredValues.put(DownloadsColumns.COLUMN_NOTIFICATION_CLASS, clazz);
}
} catch (NameNotFoundException ex) {
/* ignored for now */
}
}
// copy some more columns as is
copyString(DownloadsColumns.COLUMN_NOTIFICATION_EXTRAS, values, filteredValues);
copyString(DownloadsColumns.COLUMN_COOKIE_DATA, values, filteredValues);
copyString(DownloadsColumns.COLUMN_USER_AGENT, values, filteredValues);
copyString(DownloadsColumns.COLUMN_REFERER, values, filteredValues);
// UID, PID columns
if (getContext().checkCallingPermission(DownloadsPermission.PERMISSION_ACCESS_ADVANCED) == PackageManager.PERMISSION_GRANTED) {
copyInteger(DownloadsColumns.COLUMN_OTHER_UID, values, filteredValues);
}
filteredValues.put(Constants.UID, Binder.getCallingUid());
if (Binder.getCallingUid() == 0) {
copyInteger(Constants.UID, values, filteredValues);
}
// is_visible_in_downloads_ui column
if (values.containsKey(DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI)) {
copyBoolean(DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI, values, filteredValues);
} else {
// by default, make external downloads visible in the UI
boolean isExternal = (dest == null || dest == DownloadsDestination.DESTINATION_EXTERNAL);
filteredValues.put(DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI, isExternal);
}
// public api requests and networktypes/roaming columns
copyInteger(DownloadsColumns.COLUMN_ALLOWED_NETWORK_TYPES, values, filteredValues);
copyBoolean(DownloadsColumns.COLUMN_ALLOW_ROAMING, values, filteredValues);
copyBoolean(DownloadsColumns.COLUMN_ALLOW_METERED, values, filteredValues);
copyInteger(DownloadsColumns.COLUMN_BATCH_ID, values, filteredValues);
Log.v("initiating download with UID " + filteredValues.getAsInteger(Constants.UID));
if (filteredValues.containsKey(DownloadsColumns.COLUMN_OTHER_UID)) {
Log.v("other UID " + filteredValues.getAsInteger(DownloadsColumns.COLUMN_OTHER_UID));
}
long rowID = db.insert(DownloadsTables.DOWNLOADS_TABLE_NAME, null, filteredValues);
if (rowID == -1) {
Log.d("couldn't insert into downloads database");
return null;
}
insertRequestHeaders(db, rowID, values);
/*
* requests coming from
* DownloadManager.addCompletedDownload(String, String, String,
* boolean, String, String, long) need special treatment
*/
Context context = getContext();
context.startService(new Intent(context, DownloadService.class));
notifyContentChanged(uri, match);
return ContentUris.withAppendedId(downloadsUriProvider.getContentUri(), rowID);
}
/**
* Check that the file URI provided for DESTINATION_FILE_URI is valid.
*/
private void checkFileUriDestination(ContentValues values) {
String fileUri = values.getAsString(DownloadsColumns.COLUMN_FILE_NAME_HINT);
if (fileUri == null) {
throw new IllegalArgumentException(
"DESTINATION_FILE_URI must include a file URI under COLUMN_FILE_NAME_HINT");
}
Uri uri = Uri.parse(fileUri);
String scheme = uri.getScheme();
if (scheme == null || !scheme.equals("file")) {
throw new IllegalArgumentException("Not a file URI: " + uri);
}
final String path = uri.getPath();
if (path == null) {
throw new IllegalArgumentException("Invalid file URI: " + uri);
}
// try {
// final String canonicalPath = new File(path).getCanonicalPath();
// final String externalPath = Environment.getExternalStorageDirectory().getAbsolutePath();
// if (!canonicalPath.startsWith(externalPath)) {
// throw new SecurityException("Destination must be on external storage: " + uri);
// }
// } catch (IOException e) {
// throw new SecurityException("Problem resolving path: " + uri);
// }
}
/**
* Apps with the ACCESS_DOWNLOAD_MANAGER permission can access this provider freely, subject to
* constraints in the rest of the code. Apps without that may still access this provider through
* the public API, but additional restrictions are imposed. We check those restrictions here.
*
* @param values ContentValues provided to insert()
* @throws SecurityException if the caller has insufficient permissions
*/
private void checkDownloadInsertPermissions(ContentValues values) {
if (getContext().checkCallingOrSelfPermission(DownloadsPermission.PERMISSION_ACCESS) == PackageManager.PERMISSION_GRANTED) {
return;
}
getContext().enforceCallingOrSelfPermission(android.Manifest.permission.INTERNET, "INTERNET permission is required to use the download manager");
// ensure the request fits within the bounds of a public API request
// first copy so we can remove values
values = new ContentValues(values);
// validate the destination column
if (values.getAsInteger(DownloadsColumns.COLUMN_DESTINATION) == DownloadsDestination.DESTINATION_NON_DOWNLOADMANAGER_DOWNLOAD) {
/* this row is inserted by
* DownloadManager.addCompletedDownload(String, String, String, boolean, String, String, long)
*/
values.remove(DownloadsColumns.COLUMN_TOTAL_BYTES);
values.remove(DownloadsColumns.COLUMN_DATA);
values.remove(DownloadsColumns.COLUMN_STATUS);
}
enforceAllowedValues(
values, DownloadsColumns.COLUMN_DESTINATION,
DownloadsDestination.DESTINATION_CACHE_PARTITION_PURGEABLE,
DownloadsDestination.DESTINATION_FILE_URI,
DownloadsDestination.DESTINATION_NON_DOWNLOADMANAGER_DOWNLOAD);
// remove the rest of the columns that are allowed (with any value)
values.remove(DownloadsColumns.COLUMN_URI);
values.remove(DownloadsColumns.COLUMN_NOTIFICATION_EXTRAS);
values.remove(DownloadsColumns.COLUMN_BATCH_ID);
values.remove(DownloadsColumns.COLUMN_MIME_TYPE);
values.remove(DownloadsColumns.COLUMN_FILE_NAME_HINT); // checked later in insert()
values.remove(DownloadsColumns.COLUMN_ALLOWED_NETWORK_TYPES);
values.remove(DownloadsColumns.COLUMN_ALLOW_ROAMING);
values.remove(DownloadsColumns.COLUMN_ALLOW_METERED);
values.remove(DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI);
values.remove(DownloadsColumns.COLUMN_MEDIA_SCANNED);
Iterator<Map.Entry<String, Object>> iterator = values.valueSet().iterator();
while (iterator.hasNext()) {
String key = iterator.next().getKey();
if (key.startsWith(DownloadsColumnsRequestHeaders.INSERT_KEY_PREFIX)) {
iterator.remove();
}
}
// any extra columns are extraneous and disallowed
if (values.size() > 0) {
StringBuilder error = new StringBuilder("Invalid columns in request: ");
boolean first = true;
for (Map.Entry<String, Object> entry : values.valueSet()) {
if (!first) {
error.append(", ");
}
error.append(entry.getKey());
}
throw new SecurityException(error.toString());
}
}
/**
* Remove column from values, and throw a SecurityException if the value isn't within the
* specified allowedValues.
*/
private void enforceAllowedValues(ContentValues values, String column, Object... allowedValues) {
Object value = values.get(column);
values.remove(column);
for (Object allowedValue : allowedValues) {
if (value == null && allowedValue == null) {
return;
}
if (value != null && value.equals(allowedValue)) {
return;
}
}
throw new SecurityException("Invalid value for " + column + ": " + value);
}
/**
* Starts a database query
*/
@NonNull
@Override
public Cursor query(@NonNull Uri uri, String[] projection, String selection, String[] selectionArgs, String sort) {
Helpers.validateSelection(selection, APP_READABLE_COLUMNS_SET);
SQLiteDatabase db = openHelper.getReadableDatabase();
int match = URI_MATCHER.match(uri);
switch (match) {
case ALL_DOWNLOADS:
case ALL_DOWNLOADS_ID:
case MY_DOWNLOADS:
case MY_DOWNLOADS_ID:
return queryDownloads(uri, projection, selection, selectionArgs, sort, db, match);
case BATCHES:
case BATCHES_ID:
SqlSelection batchSelection = getWhereClause(uri, selection, selectionArgs, match);
return db.query(
DownloadsColumnsBatches.BATCHES_TABLE_NAME, projection, batchSelection.getSelection(),
batchSelection.getParameters(), null, null, sort);
case DOWNLOADS_BY_BATCH:
return db.query(DownloadsTables.VIEW_NAME_DOWNLOADS_BY_BATCH, projection, selection, selectionArgs, null, null, sort);
case REQUEST_HEADERS_URI:
if (projection != null || selection != null || sort != null) {
throw new UnsupportedOperationException(
"Request header queries do not support "
+ "projections, selections or sorting");
}
return queryRequestHeaders(db, uri);
default:
Log.v("querying unknown URI: " + uri);
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
@Nullable
private Cursor queryDownloads(Uri uri, String[] projection, String selection, String[] selectionArgs, String sort, SQLiteDatabase db, int match) {
SqlSelection fullSelection = getWhereClause(uri, selection, selectionArgs, match);
if (shouldRestrictVisibility()) {
if (projection == null) {
projection = APP_READABLE_COLUMNS_ARRAY.clone();
} else {
// check the validity of the columns in projection
for (int i = 0; i < projection.length; ++i) {
if (!APP_READABLE_COLUMNS_SET.contains(projection[i]) &&
!DOWNLOAD_MANAGER_COLUMNS_LIST.contains(projection[i])) {
throw new IllegalArgumentException(
"column " + projection[i] + " is not allowed in queries");
}
}
}
for (int i = 0; i < projection.length; i++) {
final String newColumn = COLUMNS_MAP.get(projection[i]);
if (newColumn != null) {
projection[i] = newColumn;
}
}
}
if (GlobalState.hasVerboseLogging()) {
logVerboseQueryInfo(projection, selection, selectionArgs, sort, db);
}
Cursor ret = db.query(
DownloadsTables.DOWNLOADS_TABLE_NAME, projection, fullSelection.getSelection(),
fullSelection.getParameters(), null, null, sort);
if (ret == null) {
Log.v("query failed in downloads database");
} else {
ret.setNotificationUri(getContext().getContentResolver(), uri);
Log.v("created cursor " + ret + " on behalf of " + Binder.getCallingPid());
}
return ret;
}
private void logVerboseQueryInfo(String[] projection,
final String selection,
final String[] selectionArgs,
final String sort,
SQLiteDatabase db) {
java.lang.StringBuilder sb = new java.lang.StringBuilder();
sb.append("starting query, database is ");
if (db != null) {
sb.append("not ");
}
sb.append("null; ");
if (projection == null) {
sb.append("projection is null; ");
} else if (projection.length == 0) {
sb.append("projection is empty; ");
} else {
for (int i = 0; i < projection.length; ++i) {
sb.append("projection[");
sb.append(i);
sb.append("] is ");
sb.append(projection[i]);
sb.append("; ");
}
}
sb.append("selection is ");
sb.append(selection);
sb.append("; ");
if (selectionArgs == null) {
sb.append("selectionArgs is null; ");
} else if (selectionArgs.length == 0) {
sb.append("selectionArgs is empty; ");
} else {
for (int i = 0; i < selectionArgs.length; ++i) {
sb.append("selectionArgs[");
sb.append(i);
sb.append("] is ");
sb.append(selectionArgs[i]);
sb.append("; ");
}
}
sb.append("sort is ");
sb.append(sort);
sb.append(".");
Log.v(sb.toString());
}
private String getDownloadIdFromUri(final Uri uri) {
return uri.getPathSegments().get(1);
}
/**
* Insert request headers for a download into the DB.
*/
private void insertRequestHeaders(SQLiteDatabase db, long downloadId, ContentValues values) {
ContentValues rowValues = new ContentValues();
rowValues.put(DownloadsColumnsRequestHeaders.COLUMN_DOWNLOAD_ID, downloadId);
for (Map.Entry<String, Object> entry : values.valueSet()) {
String key = entry.getKey();
if (key.startsWith(DownloadsColumnsRequestHeaders.INSERT_KEY_PREFIX)) {
String headerLine = entry.getValue().toString();
if (!headerLine.contains(":")) {
throw new IllegalArgumentException("Invalid HTTP header line: " + headerLine);
}
String[] parts = headerLine.split(":", 2);
rowValues.put(DownloadsColumnsRequestHeaders.COLUMN_HEADER, parts[0].trim());
rowValues.put(DownloadsColumnsRequestHeaders.COLUMN_VALUE, parts[1].trim());
db.insert(DownloadsColumnsRequestHeaders.HEADERS_DB_TABLE, null, rowValues);
}
}
}
/**
* Handle a query for the custom request headers registered for a download.
*/
private Cursor queryRequestHeaders(SQLiteDatabase db, Uri uri) {
String where = DownloadsColumnsRequestHeaders.COLUMN_DOWNLOAD_ID + "="
+ getDownloadIdFromUri(uri);
String[] projection = new String[]{DownloadsColumnsRequestHeaders.COLUMN_HEADER,
DownloadsColumnsRequestHeaders.COLUMN_VALUE};
return db.query(
DownloadsColumnsRequestHeaders.HEADERS_DB_TABLE, projection, where,
null, null, null, null);
}
/**
* Delete request headers for downloads matching the given query.
*/
private void deleteRequestHeaders(SQLiteDatabase db, String where, String[] whereArgs) {
String[] projection = new String[]{DownloadsColumns._ID};
Cursor cursor = db.query(DownloadsTables.DOWNLOADS_TABLE_NAME, projection, where, whereArgs, null, null, null, null);
try {
for (cursor.moveToFirst(); !cursor.isAfterLast(); cursor.moveToNext()) {
long id = cursor.getLong(0);
String idWhere = DownloadsColumnsRequestHeaders.COLUMN_DOWNLOAD_ID + "=" + id;
db.delete(DownloadsColumnsRequestHeaders.HEADERS_DB_TABLE, idWhere, null);
}
} finally {
cursor.close();
}
}
/**
* @return true if we should restrict the columns readable by this caller
*/
private boolean shouldRestrictVisibility() {
int callingUid = Binder.getCallingUid();
return Binder.getCallingPid() != Process.myPid() &&
callingUid != systemUid &&
callingUid != defcontaineruid;
}
/**
* Updates a row in the database
*/
@Override
public int update(final Uri uri, final ContentValues values, final String where, final String[] whereArgs) {
Helpers.validateSelection(where, APP_READABLE_COLUMNS_SET);
SQLiteDatabase db = openHelper.getWritableDatabase();
int count;
boolean startService = false;
if (values.containsKey(DownloadsColumns.COLUMN_DELETED)) {
if (values.getAsInteger(DownloadsColumns.COLUMN_DELETED) == 1) {
// some rows are to be 'deleted'. need to start DownloadService.
startService = true;
}
}
ContentValues filteredValues;
if (Binder.getCallingPid() != Process.myPid()) {
filteredValues = new ContentValues();
copyString(DownloadsColumns.COLUMN_APP_DATA, values, filteredValues);
Integer i = values.getAsInteger(DownloadsColumns.COLUMN_CONTROL);
if (i != null) {
filteredValues.put(DownloadsColumns.COLUMN_CONTROL, i);
startService = true;
}
copyInteger(DownloadsColumns.COLUMN_CONTROL, values, filteredValues);
copyString(DownloadsColumns.COLUMN_MEDIAPROVIDER_URI, values, filteredValues);
copyInteger(DownloadsColumns.COLUMN_DELETED, values, filteredValues);
} else {
filteredValues = values;
Integer status = values.getAsInteger(DownloadsColumns.COLUMN_STATUS);
boolean isRestart = status != null && status == DownloadsStatus.STATUS_PENDING;
boolean isUserBypassingSizeLimit =
values.containsKey(DownloadsColumns.COLUMN_BYPASS_RECOMMENDED_SIZE_LIMIT);
if (isRestart || isUserBypassingSizeLimit) {
startService = true;
}
}
int match = URI_MATCHER.match(uri);
switch (match) {
case MY_DOWNLOADS:
case MY_DOWNLOADS_ID:
case ALL_DOWNLOADS:
case ALL_DOWNLOADS_ID:
SqlSelection selection = getWhereClause(uri, where, whereArgs, match);
if (filteredValues.size() > 0) {
count = db.update(
DownloadsTables.DOWNLOADS_TABLE_NAME, filteredValues, selection.getSelection(),
selection.getParameters());
} else {
count = 0;
}
break;
case BATCHES:
case BATCHES_ID:
SqlSelection batchSelection = getWhereClause(uri, where, whereArgs, match);
count = db.update(
DownloadsColumnsBatches.BATCHES_TABLE_NAME, values, batchSelection.getSelection(),
batchSelection.getParameters());
break;
default:
Log.d("updating unknown/invalid URI: " + uri);
throw new UnsupportedOperationException("Cannot update URI: " + uri);
}
notifyContentChanged(uri, match);
if (startService) {
Context context = getContext();
context.startService(new Intent(context, DownloadService.class));
}
return count;
}
/**
* Notify of a change through both URIs (/my_downloads and /all_downloads)
*
* @param uri either URI for the changed download(s)
* @param uriMatch the match ID from {@link #URI_MATCHER}
*/
private void notifyContentChanged(final Uri uri, int uriMatch) {
Long downloadId = null;
if (uriMatch == MY_DOWNLOADS_ID || uriMatch == ALL_DOWNLOADS_ID) {
downloadId = Long.parseLong(getDownloadIdFromUri(uri));
}
for (Uri uriToNotify : baseUris) {
if (downloadId != null) {
uriToNotify = ContentUris.withAppendedId(uriToNotify, downloadId);
}
getContext().getContentResolver().notifyChange(uriToNotify, null);
}
}
private SqlSelection getWhereClause(final Uri uri, final String where, final String[] whereArgs,
int uriMatch) {
SqlSelection selection = new SqlSelection();
selection.appendClause(where, whereArgs);
if (uriMatch == MY_DOWNLOADS_ID || uriMatch == ALL_DOWNLOADS_ID ||
uriMatch == PUBLIC_DOWNLOAD_ID) {
selection.appendClause(DownloadsColumns._ID + " = ?", getDownloadIdFromUri(uri));
}
if (uriMatch == BATCHES_ID) {
selection.appendClause(DownloadsColumnsBatches._ID + " = ?", uri.getLastPathSegment());
}
if ((uriMatch == MY_DOWNLOADS || uriMatch == MY_DOWNLOADS_ID)
&& getContext().checkCallingPermission(DownloadsPermission.PERMISSION_ACCESS_ALL)
!= PackageManager.PERMISSION_GRANTED) {
String callingUid = String.valueOf(Binder.getCallingUid());
selection.appendClause(
Constants.UID + "= ? OR " + DownloadsColumns.COLUMN_OTHER_UID + "= ?",
callingUid, callingUid);
}
return selection;
}
/**
* Deletes a row in the database
*/
@Override
public int delete(@NonNull Uri uri, String where, String[] whereArgs) {
Helpers.validateSelection(where, APP_READABLE_COLUMNS_SET);
SQLiteDatabase db = openHelper.getWritableDatabase();
int count;
int match = URI_MATCHER.match(uri);
switch (match) {
case MY_DOWNLOADS:
case MY_DOWNLOADS_ID:
case ALL_DOWNLOADS:
case ALL_DOWNLOADS_ID:
SqlSelection selection = getWhereClause(uri, where, whereArgs, match);
deleteRequestHeaders(db, selection.getSelection(), selection.getParameters());
count = db.delete(DownloadsTables.DOWNLOADS_TABLE_NAME, selection.getSelection(), selection.getParameters());
break;
case BATCHES:
case BATCHES_ID:
SqlSelection batchSelection = getWhereClause(uri, where, whereArgs, match);
count = db.delete(DownloadsColumnsBatches.BATCHES_TABLE_NAME, batchSelection.getSelection(), batchSelection.getParameters());
break;
default:
Log.d("deleting unknown/invalid URI: " + uri);
throw new UnsupportedOperationException("Cannot delete URI: " + uri);
}
notifyContentChanged(uri, match);
return count;
}
/**
* Remotely opens a file
*/
@Override
public ParcelFileDescriptor openFile(@NonNull Uri uri, String mode) throws FileNotFoundException {
if (GlobalState.hasVerboseLogging()) {
logVerboseOpenFileInfo(uri, mode);
}
Cursor cursor = query(uri, new String[]{"_data"}, null, null, null);
String path;
try {
int count = (cursor != null) ? cursor.getCount() : 0;
if (count != 1) {
// If there is not exactly one result, throw an appropriate exception.
if (count == 0) {
throw new FileNotFoundException("No entry for " + uri);
}
throw new FileNotFoundException("Multiple items at " + uri);
}
cursor.moveToFirst();
path = cursor.getString(0);
} finally {
if (cursor != null) {
cursor.close();
}
}
if (path == null) {
throw new FileNotFoundException("No filename found.");
}
if (!Helpers.isFilenameValid(path, downloadsDataDir)) {
Log.d("INTERNAL FILE DOWNLOAD LOL COMMENTED EXCEPTION");
// throw new FileNotFoundException("Invalid filename: " + path);
}
if (!"r".equals(mode)) {
throw new FileNotFoundException("Bad mode for " + uri + ": " + mode);
}
ParcelFileDescriptor ret = ParcelFileDescriptor.open(new File(path),
ParcelFileDescriptor.MODE_READ_ONLY);
if (ret == null) {
Log.v("couldn't open file");
throw new FileNotFoundException("couldn't open file");
}
return ret;
}
@Override
public void dump(FileDescriptor fd, @NonNull PrintWriter writer, String[] args) {
Log.e("I want dump, but nothing to dump into");
}
private void logVerboseOpenFileInfo(Uri uri, String mode) {
Log.v("openFile uri: " + uri + ", mode: " + mode
+ ", uid: " + Binder.getCallingUid());
Cursor cursor = query(
downloadsUriProvider.getContentUri(),
new String[]{"_id"}, null, null, "_id");
if (cursor == null) {
Log.v("null cursor in openFile");
} else {
if (!cursor.moveToFirst()) {
Log.v("empty cursor in openFile");
} else {
do {
Log.v("row " + cursor.getInt(0) + " available");
} while (cursor.moveToNext());
}
cursor.close();
}
cursor = query(uri, new String[]{"_data"}, null, null, null);
if (cursor == null) {
Log.v("null cursor in openFile");
} else {
if (!cursor.moveToFirst()) {
Log.v("empty cursor in openFile");
} else {
String filename = cursor.getString(0);
Log.v("filename in openFile: " + filename);
if (new java.io.File(filename).isFile()) {
Log.v("file exists in openFile");
}
}
cursor.close();
}
}
private static void copyInteger(String key, ContentValues from, ContentValues to) {
Integer i = from.getAsInteger(key);
if (i != null) {
to.put(key, i);
}
}
private static void copyBoolean(String key, ContentValues from, ContentValues to) {
Boolean b = from.getAsBoolean(key);
if (b != null) {
to.put(key, b);
}
}
private static void copyString(String key, ContentValues from, ContentValues to) {
String s = from.getAsString(key);
if (s != null) {
to.put(key, s);
}
}
}
| library/src/main/java/com/novoda/downloadmanager/lib/DownloadProvider.java | /*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.novoda.downloadmanager.lib;
import android.content.ContentProvider;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.UriMatcher;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.net.Uri;
import android.os.Binder;
import android.os.ParcelFileDescriptor;
import android.os.Process;
import android.provider.OpenableColumns;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import com.novoda.notils.logger.simple.Log;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Allows application to interact with the download manager.
*/
public final class DownloadProvider extends ContentProvider {
/**
* Added so we can use our own ContentProvider
*/
public static final String AUTHORITY = Reflector.reflectAuthority();
/**
* Database filename
*/
private static final String DB_NAME = "downloads.db";
/**
* MIME type for the entire download list
*/
private static final String DOWNLOAD_LIST_TYPE = "vnd.android.cursor.dir/download";
/**
* MIME type for an individual download
*/
private static final String DOWNLOAD_TYPE = "vnd.android.cursor.item/download";
/**
* MIME type for the entire batch list
*/
private static final String BATCH_LIST_TYPE = "vnd.android.cursor.dir/batch";
/**
* MIME type for an individual batch
*/
private static final String BATCH_TYPE = "vnd.android.cursor.item/batch";
/**
* MIME type for the list of download by batch
*/
private static final String DOWNLOADS_BY_BATCH_TYPE = "vnd.android.cursor.dir/download_by_batch";
/**
* URI matcher used to recognize URIs sent by applications
*/
private static final UriMatcher URI_MATCHER = new UriMatcher(UriMatcher.NO_MATCH);
/**
* URI matcher constant for the URI of all downloads belonging to the calling UID
*/
private static final int MY_DOWNLOADS = 1;
/**
* URI matcher constant for the URI of an individual download belonging to the calling UID
*/
private static final int MY_DOWNLOADS_ID = 2;
/**
* URI matcher constant for the URI of all downloads in the system
*/
private static final int ALL_DOWNLOADS = 3;
/**
* URI matcher constant for the URI of an individual download
*/
private static final int ALL_DOWNLOADS_ID = 4;
/**
* URI matcher constant for the URI of a download's request headers
*/
private static final int REQUEST_HEADERS_URI = 5;
/**
* URI matcher constant for the public URI returned by
* {@link DownloadManager#getUriForDownloadedFile(long)} if the given downloaded file
* is publicly accessible.
*/
private static final int PUBLIC_DOWNLOAD_ID = 6;
/**
* URI matcher constant for the URI of a download's request headers
*/
private static final int BATCHES = 7;
/**
* URI matcher constant for the URI of a download's request headers
*/
private static final int BATCHES_ID = 8;
/**
* URI matcher constant for the URI of downloads with their batch data
*/
private static final int DOWNLOADS_BY_BATCH = 9;
static {
URI_MATCHER.addURI(AUTHORITY, "my_downloads", MY_DOWNLOADS);
URI_MATCHER.addURI(AUTHORITY, "my_downloads/#", MY_DOWNLOADS_ID);
URI_MATCHER.addURI(AUTHORITY, "all_downloads", ALL_DOWNLOADS);
URI_MATCHER.addURI(AUTHORITY, "all_downloads/#", ALL_DOWNLOADS_ID);
URI_MATCHER.addURI(AUTHORITY, "batches", BATCHES);
URI_MATCHER.addURI(AUTHORITY, "batches/#", BATCHES_ID);
URI_MATCHER.addURI(AUTHORITY, "downloads_by_batch", DOWNLOADS_BY_BATCH);
URI_MATCHER.addURI(AUTHORITY, "my_downloads/#/" + DownloadsColumnsRequestHeaders.URI_SEGMENT, REQUEST_HEADERS_URI);
URI_MATCHER.addURI(AUTHORITY, "all_downloads/#/" + DownloadsColumnsRequestHeaders.URI_SEGMENT, REQUEST_HEADERS_URI);
// temporary, for backwards compatibility
URI_MATCHER.addURI(AUTHORITY, "download", MY_DOWNLOADS);
URI_MATCHER.addURI(AUTHORITY, "download/#", MY_DOWNLOADS_ID);
URI_MATCHER.addURI(AUTHORITY, "download/#/" + DownloadsColumnsRequestHeaders.URI_SEGMENT, REQUEST_HEADERS_URI);
URI_MATCHER.addURI(AUTHORITY, DownloadsDestination.PUBLICLY_ACCESSIBLE_DOWNLOADS_URI_SEGMENT + "/#", PUBLIC_DOWNLOAD_ID);
}
/**
* Different base URIs that could be used to access an individual download
*/
private final Uri[] baseUris;
private static final String[] APP_READABLE_COLUMNS_ARRAY = new String[]{
DownloadsColumns._ID,
DownloadsColumns.COLUMN_APP_DATA,
DownloadsColumns.COLUMN_DATA,
DownloadsColumns.COLUMN_MIME_TYPE,
DownloadsColumns.COLUMN_DESTINATION,
DownloadsColumns.COLUMN_CONTROL,
DownloadsColumns.COLUMN_STATUS,
DownloadsColumns.COLUMN_LAST_MODIFICATION,
DownloadsColumns.COLUMN_NOTIFICATION_CLASS,
DownloadsColumns.COLUMN_TOTAL_BYTES,
DownloadsColumns.COLUMN_CURRENT_BYTES,
DownloadsColumns.COLUMN_URI,
DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI,
DownloadsColumns.COLUMN_FILE_NAME_HINT,
DownloadsColumns.COLUMN_MEDIAPROVIDER_URI,
DownloadsColumns.COLUMN_DELETED,
DownloadsColumns.COLUMN_NOTIFICATION_EXTRAS,
DownloadsColumns.COLUMN_BATCH_ID,
DownloadsColumnsBatches._ID,
DownloadsColumnsBatches.COLUMN_STATUS,
DownloadsColumnsBatches.COLUMN_TITLE,
DownloadsColumnsBatches.COLUMN_DESCRIPTION,
DownloadsColumnsBatches.COLUMN_BIG_PICTURE,
DownloadsColumnsBatches.COLUMN_VISIBILITY,
DownloadsColumnsBatches.COLUMN_TOTAL_BYTES,
DownloadsColumnsBatches.COLUMN_CURRENT_BYTES,
OpenableColumns.DISPLAY_NAME,
OpenableColumns.SIZE,
};
private static final HashSet<String> APP_READABLE_COLUMNS_SET;
private static final HashMap<String, String> COLUMNS_MAP;
static {
APP_READABLE_COLUMNS_SET = new HashSet<>();
Collections.addAll(APP_READABLE_COLUMNS_SET, APP_READABLE_COLUMNS_ARRAY);
COLUMNS_MAP = new HashMap<>();
COLUMNS_MAP.put(OpenableColumns.DISPLAY_NAME, DownloadsColumnsBatches.COLUMN_TITLE + " AS " + OpenableColumns.DISPLAY_NAME);
COLUMNS_MAP.put(OpenableColumns.SIZE, DownloadsColumns.COLUMN_TOTAL_BYTES + " AS " + OpenableColumns.SIZE);
}
private static final List<String> DOWNLOAD_MANAGER_COLUMNS_LIST = Arrays.asList(DownloadManager.UNDERLYING_COLUMNS);
/**
* The database that lies underneath this content provider
*/
private SQLiteOpenHelper openHelper = null;
/**
* List of uids that can access the downloads
*/
private int systemUid = -1;
private int defcontaineruid = -1;
private File downloadsDataDir;
// @VisibleForTesting
SystemFacade systemFacade;
private DownloadsUriProvider downloadsUriProvider;
public DownloadProvider() {
downloadsUriProvider = DownloadsUriProvider.getInstance();
baseUris = new Uri[]{
downloadsUriProvider.getContentUri(),
downloadsUriProvider.getAllDownloadsUri(),
downloadsUriProvider.getBatchesUri()
};;
}
/**
* This class encapsulates a SQL where clause and its parameters. It makes it possible for
* shared methods (like {@link DownloadProvider#getWhereClause(Uri, String, String[], int)})
* to return both pieces of information, and provides some utility logic to ease piece-by-piece
* construction of selections.
*/
private static class SqlSelection {
public final StringBuilder whereClause = new StringBuilder();
public final List<String> parameters = new ArrayList<>();
public void appendClause(String newClause, final String... parameters) {
if (newClause == null || newClause.isEmpty()) {
return;
}
if (whereClause.length() != 0) {
whereClause.append(" AND ");
}
whereClause.append("(");
whereClause.append(newClause);
whereClause.append(")");
if (parameters != null) {
for (String parameter : parameters) {
this.parameters.add(parameter);
}
}
}
public String getSelection() {
return whereClause.toString();
}
public String[] getParameters() {
String[] array = new String[parameters.size()];
return parameters.toArray(array);
}
}
/**
* Initializes the content provider when it is created.
*/
@Override
public boolean onCreate() {
if (systemFacade == null) {
systemFacade = new RealSystemFacade(getContext());
}
Context context = getContext();
PackageManager packageManager = context.getPackageManager();
String packageName = context.getApplicationContext().getPackageName();
DatabaseFilenameProvider databaseFilenameProvider = new DatabaseFilenameProvider(packageManager, packageName, DB_NAME);
String databaseFilename = databaseFilenameProvider.getDatabaseFilename();
openHelper = new DatabaseHelper(context, databaseFilename);
// Initialize the system uid
systemUid = Process.SYSTEM_UID;
// Initialize the default container uid. Package name hardcoded
// for now.
ApplicationInfo appInfo = null;
try {
appInfo = getContext().getPackageManager().
getApplicationInfo("com.android.defcontainer", 0);
} catch (NameNotFoundException e) {
Log.wtf("Could not get ApplicationInfo for com.android.defconatiner", e);
}
if (appInfo != null) {
defcontaineruid = appInfo.uid;
}
// start the DownloadService class. don't wait for the 1st download to be issued.
// saves us by getting some initialization code in DownloadService out of the way.
context.startService(new Intent(context, DownloadService.class));
// downloadsDataDir = StorageManager.getDownloadDataDirectory(getContext());
downloadsDataDir = context.getCacheDir();
// try {
// android.os.SELinux.restorecon(downloadsDataDir.getCanonicalPath());
// } catch (IOException e) {
// Log.wtf("Could not get canonical path for download directory", e);
// }
return true;
}
/**
* Returns the content-provider-style MIME types of the various
* types accessible through this content provider.
*/
@NonNull
@Override
public String getType(@NonNull Uri uri) {
int match = URI_MATCHER.match(uri);
switch (match) {
case MY_DOWNLOADS:
case ALL_DOWNLOADS: {
return DOWNLOAD_LIST_TYPE;
}
case MY_DOWNLOADS_ID:
case ALL_DOWNLOADS_ID:
case PUBLIC_DOWNLOAD_ID: {
// return the mimetype of this id from the database
final String id = getDownloadIdFromUri(uri);
final SQLiteDatabase db = openHelper.getReadableDatabase();
final String mimeType = DatabaseUtils.stringForQuery(db,
"SELECT " + DownloadsColumns.COLUMN_MIME_TYPE + " FROM " + DownloadsTables.DOWNLOADS_TABLE_NAME +
" WHERE " + DownloadsColumns._ID + " = ?",
new String[]{id});
if (TextUtils.isEmpty(mimeType)) {
return DOWNLOAD_TYPE;
} else {
return mimeType;
}
}
case BATCHES: {
return BATCH_LIST_TYPE;
}
case BATCHES_ID: {
return BATCH_TYPE;
}
case DOWNLOADS_BY_BATCH: {
return DOWNLOADS_BY_BATCH_TYPE;
}
default: {
Log.v("calling getType on an unknown URI: " + uri);
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
}
/**
* Inserts a row in the database
*/
@Override
public Uri insert(@NonNull Uri uri, ContentValues values) {
SQLiteDatabase db = openHelper.getWritableDatabase();
// note we disallow inserting into ALL_DOWNLOADS
int match = URI_MATCHER.match(uri);
if (match == MY_DOWNLOADS) {
checkDownloadInsertPermissions(values);
return insertDownload(uri, values, db, match);
}
if (match == BATCHES) {
long rowId = db.insert(DownloadsColumnsBatches.BATCHES_TABLE_NAME, null, values);
return ContentUris.withAppendedId(downloadsUriProvider.getBatchesUri(), rowId);
}
Log.d("calling insert on an unknown/invalid URI: " + uri);
throw new IllegalArgumentException("Unknown/Invalid URI " + uri);
}
@Nullable
private Uri insertDownload(Uri uri, ContentValues values, SQLiteDatabase db, int match) {
// copy some of the input values as it
ContentValues filteredValues = new ContentValues();
copyString(DownloadsColumns.COLUMN_URI, values, filteredValues);
copyString(DownloadsColumns.COLUMN_APP_DATA, values, filteredValues);
copyBoolean(DownloadsColumns.COLUMN_NO_INTEGRITY, values, filteredValues);
copyString(DownloadsColumns.COLUMN_FILE_NAME_HINT, values, filteredValues);
copyString(DownloadsColumns.COLUMN_MIME_TYPE, values, filteredValues);
// validate the destination column
Integer dest = values.getAsInteger(DownloadsColumns.COLUMN_DESTINATION);
if (dest != null) {
if (getContext().checkCallingPermission(DownloadsPermission.PERMISSION_ACCESS_ADVANCED)
!= PackageManager.PERMISSION_GRANTED
&& (dest == DownloadsDestination.DESTINATION_CACHE_PARTITION
|| dest == DownloadsDestination.DESTINATION_CACHE_PARTITION_NOROAMING
|| dest == DownloadsDestination.DESTINATION_SYSTEMCACHE_PARTITION)) {
throw new SecurityException(
"setting destination to : " + dest +
" not allowed, unless PERMISSION_ACCESS_ADVANCED is granted");
}
// for public API behavior, if an app has CACHE_NON_PURGEABLE permission, automatically
// switch to non-purgeable download
boolean hasNonPurgeablePermission =
getContext().checkCallingPermission(DownloadsPermission.PERMISSION_CACHE_NON_PURGEABLE) == PackageManager.PERMISSION_GRANTED;
if (dest == DownloadsDestination.DESTINATION_CACHE_PARTITION_PURGEABLE && hasNonPurgeablePermission) {
dest = DownloadsDestination.DESTINATION_CACHE_PARTITION;
}
if (dest == DownloadsDestination.DESTINATION_FILE_URI) {
getContext().enforcePermission(android.Manifest.permission.WRITE_EXTERNAL_STORAGE, Binder.getCallingPid(), Binder.getCallingUid(),
"need WRITE_EXTERNAL_STORAGE permission to use DESTINATION_FILE_URI");
checkFileUriDestination(values);
} else if (dest == DownloadsDestination.DESTINATION_SYSTEMCACHE_PARTITION) {
getContext().enforcePermission("android.permission.ACCESS_CACHE_FILESYSTEM", Binder.getCallingPid(), Binder.getCallingUid(),
"need ACCESS_CACHE_FILESYSTEM permission to use system cache");
}
filteredValues.put(DownloadsColumns.COLUMN_DESTINATION, dest);
}
// copy the control column as is
copyInteger(DownloadsColumns.COLUMN_CONTROL, values, filteredValues);
/*
* requests coming from
* DownloadManager.addCompletedDownload(String, String, String,
* boolean, String, String, long) need special treatment
*/
if (values.getAsInteger(DownloadsColumns.COLUMN_DESTINATION) == DownloadsDestination.DESTINATION_NON_DOWNLOADMANAGER_DOWNLOAD) {
// these requests always are marked as 'completed'
filteredValues.put(DownloadsColumns.COLUMN_STATUS, DownloadsStatus.STATUS_SUCCESS);
filteredValues.put(DownloadsColumns.COLUMN_TOTAL_BYTES, values.getAsLong(DownloadsColumns.COLUMN_TOTAL_BYTES));
filteredValues.put(DownloadsColumns.COLUMN_CURRENT_BYTES, 0);
copyInteger(DownloadsColumns.COLUMN_MEDIA_SCANNED, values, filteredValues);
copyString(DownloadsColumns.COLUMN_DATA, values, filteredValues);
} else {
filteredValues.put(DownloadsColumns.COLUMN_STATUS, DownloadsStatus.STATUS_PENDING);
filteredValues.put(DownloadsColumns.COLUMN_TOTAL_BYTES, -1);
filteredValues.put(DownloadsColumns.COLUMN_CURRENT_BYTES, 0);
}
// set lastupdate to current time
long lastMod = systemFacade.currentTimeMillis();
filteredValues.put(DownloadsColumns.COLUMN_LAST_MODIFICATION, lastMod);
// use packagename of the caller to set the notification columns
String clazz = values.getAsString(DownloadsColumns.COLUMN_NOTIFICATION_CLASS);
if (clazz != null) {
int uid = Binder.getCallingUid();
try {
if ((uid == 0) || systemFacade.userOwnsPackage(uid, getContext().getPackageName())) {
filteredValues.put(DownloadsColumns.COLUMN_NOTIFICATION_CLASS, clazz);
}
} catch (NameNotFoundException ex) {
/* ignored for now */
}
}
// copy some more columns as is
copyString(DownloadsColumns.COLUMN_NOTIFICATION_EXTRAS, values, filteredValues);
copyString(DownloadsColumns.COLUMN_COOKIE_DATA, values, filteredValues);
copyString(DownloadsColumns.COLUMN_USER_AGENT, values, filteredValues);
copyString(DownloadsColumns.COLUMN_REFERER, values, filteredValues);
// UID, PID columns
if (getContext().checkCallingPermission(DownloadsPermission.PERMISSION_ACCESS_ADVANCED) == PackageManager.PERMISSION_GRANTED) {
copyInteger(DownloadsColumns.COLUMN_OTHER_UID, values, filteredValues);
}
filteredValues.put(Constants.UID, Binder.getCallingUid());
if (Binder.getCallingUid() == 0) {
copyInteger(Constants.UID, values, filteredValues);
}
// is_visible_in_downloads_ui column
if (values.containsKey(DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI)) {
copyBoolean(DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI, values, filteredValues);
} else {
// by default, make external downloads visible in the UI
boolean isExternal = (dest == null || dest == DownloadsDestination.DESTINATION_EXTERNAL);
filteredValues.put(DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI, isExternal);
}
// public api requests and networktypes/roaming columns
copyInteger(DownloadsColumns.COLUMN_ALLOWED_NETWORK_TYPES, values, filteredValues);
copyBoolean(DownloadsColumns.COLUMN_ALLOW_ROAMING, values, filteredValues);
copyBoolean(DownloadsColumns.COLUMN_ALLOW_METERED, values, filteredValues);
copyInteger(DownloadsColumns.COLUMN_BATCH_ID, values, filteredValues);
Log.v("initiating download with UID " + filteredValues.getAsInteger(Constants.UID));
if (filteredValues.containsKey(DownloadsColumns.COLUMN_OTHER_UID)) {
Log.v("other UID " + filteredValues.getAsInteger(DownloadsColumns.COLUMN_OTHER_UID));
}
long rowID = db.insert(DownloadsTables.DOWNLOADS_TABLE_NAME, null, filteredValues);
if (rowID == -1) {
Log.d("couldn't insert into downloads database");
return null;
}
insertRequestHeaders(db, rowID, values);
/*
* requests coming from
* DownloadManager.addCompletedDownload(String, String, String,
* boolean, String, String, long) need special treatment
*/
Context context = getContext();
context.startService(new Intent(context, DownloadService.class));
notifyContentChanged(uri, match);
return ContentUris.withAppendedId(downloadsUriProvider.getContentUri(), rowID);
}
/**
* Check that the file URI provided for DESTINATION_FILE_URI is valid.
*/
private void checkFileUriDestination(ContentValues values) {
String fileUri = values.getAsString(DownloadsColumns.COLUMN_FILE_NAME_HINT);
if (fileUri == null) {
throw new IllegalArgumentException(
"DESTINATION_FILE_URI must include a file URI under COLUMN_FILE_NAME_HINT");
}
Uri uri = Uri.parse(fileUri);
String scheme = uri.getScheme();
if (scheme == null || !scheme.equals("file")) {
throw new IllegalArgumentException("Not a file URI: " + uri);
}
final String path = uri.getPath();
if (path == null) {
throw new IllegalArgumentException("Invalid file URI: " + uri);
}
// try {
// final String canonicalPath = new File(path).getCanonicalPath();
// final String externalPath = Environment.getExternalStorageDirectory().getAbsolutePath();
// if (!canonicalPath.startsWith(externalPath)) {
// throw new SecurityException("Destination must be on external storage: " + uri);
// }
// } catch (IOException e) {
// throw new SecurityException("Problem resolving path: " + uri);
// }
}
/**
* Apps with the ACCESS_DOWNLOAD_MANAGER permission can access this provider freely, subject to
* constraints in the rest of the code. Apps without that may still access this provider through
* the public API, but additional restrictions are imposed. We check those restrictions here.
*
* @param values ContentValues provided to insert()
* @throws SecurityException if the caller has insufficient permissions
*/
private void checkDownloadInsertPermissions(ContentValues values) {
if (getContext().checkCallingOrSelfPermission(DownloadsPermission.PERMISSION_ACCESS) == PackageManager.PERMISSION_GRANTED) {
return;
}
getContext().enforceCallingOrSelfPermission(android.Manifest.permission.INTERNET, "INTERNET permission is required to use the download manager");
// ensure the request fits within the bounds of a public API request
// first copy so we can remove values
values = new ContentValues(values);
// validate the destination column
if (values.getAsInteger(DownloadsColumns.COLUMN_DESTINATION) == DownloadsDestination.DESTINATION_NON_DOWNLOADMANAGER_DOWNLOAD) {
/* this row is inserted by
* DownloadManager.addCompletedDownload(String, String, String, boolean, String, String, long)
*/
values.remove(DownloadsColumns.COLUMN_TOTAL_BYTES);
values.remove(DownloadsColumns.COLUMN_DATA);
values.remove(DownloadsColumns.COLUMN_STATUS);
}
enforceAllowedValues(
values, DownloadsColumns.COLUMN_DESTINATION,
DownloadsDestination.DESTINATION_CACHE_PARTITION_PURGEABLE,
DownloadsDestination.DESTINATION_FILE_URI,
DownloadsDestination.DESTINATION_NON_DOWNLOADMANAGER_DOWNLOAD);
// remove the rest of the columns that are allowed (with any value)
values.remove(DownloadsColumns.COLUMN_URI);
values.remove(DownloadsColumns.COLUMN_NOTIFICATION_EXTRAS);
values.remove(DownloadsColumns.COLUMN_BATCH_ID);
values.remove(DownloadsColumns.COLUMN_MIME_TYPE);
values.remove(DownloadsColumns.COLUMN_FILE_NAME_HINT); // checked later in insert()
values.remove(DownloadsColumns.COLUMN_ALLOWED_NETWORK_TYPES);
values.remove(DownloadsColumns.COLUMN_ALLOW_ROAMING);
values.remove(DownloadsColumns.COLUMN_ALLOW_METERED);
values.remove(DownloadsColumns.COLUMN_IS_VISIBLE_IN_DOWNLOADS_UI);
values.remove(DownloadsColumns.COLUMN_MEDIA_SCANNED);
Iterator<Map.Entry<String, Object>> iterator = values.valueSet().iterator();
while (iterator.hasNext()) {
String key = iterator.next().getKey();
if (key.startsWith(DownloadsColumnsRequestHeaders.INSERT_KEY_PREFIX)) {
iterator.remove();
}
}
// any extra columns are extraneous and disallowed
if (values.size() > 0) {
StringBuilder error = new StringBuilder("Invalid columns in request: ");
boolean first = true;
for (Map.Entry<String, Object> entry : values.valueSet()) {
if (!first) {
error.append(", ");
}
error.append(entry.getKey());
}
throw new SecurityException(error.toString());
}
}
/**
* Remove column from values, and throw a SecurityException if the value isn't within the
* specified allowedValues.
*/
private void enforceAllowedValues(ContentValues values, String column, Object... allowedValues) {
Object value = values.get(column);
values.remove(column);
for (Object allowedValue : allowedValues) {
if (value == null && allowedValue == null) {
return;
}
if (value != null && value.equals(allowedValue)) {
return;
}
}
throw new SecurityException("Invalid value for " + column + ": " + value);
}
/**
* Starts a database query
*/
@NonNull
@Override
public Cursor query(@NonNull Uri uri, String[] projection, String selection, String[] selectionArgs, String sort) {
Helpers.validateSelection(selection, APP_READABLE_COLUMNS_SET);
SQLiteDatabase db = openHelper.getReadableDatabase();
int match = URI_MATCHER.match(uri);
switch (match) {
case ALL_DOWNLOADS:
case ALL_DOWNLOADS_ID:
case MY_DOWNLOADS:
case MY_DOWNLOADS_ID:
return queryDownloads(uri, projection, selection, selectionArgs, sort, db, match);
case BATCHES:
case BATCHES_ID:
SqlSelection batchSelection = getWhereClause(uri, selection, selectionArgs, match);
return db.query(
DownloadsColumnsBatches.BATCHES_TABLE_NAME, projection, batchSelection.getSelection(),
batchSelection.getParameters(), null, null, sort);
case DOWNLOADS_BY_BATCH:
return db.query(DownloadsTables.VIEW_NAME_DOWNLOADS_BY_BATCH, projection, selection, selectionArgs, null, null, sort);
case REQUEST_HEADERS_URI:
if (projection != null || selection != null || sort != null) {
throw new UnsupportedOperationException(
"Request header queries do not support "
+ "projections, selections or sorting");
}
return queryRequestHeaders(db, uri);
default:
Log.v("querying unknown URI: " + uri);
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
@Nullable
private Cursor queryDownloads(Uri uri, String[] projection, String selection, String[] selectionArgs, String sort, SQLiteDatabase db, int match) {
SqlSelection fullSelection = getWhereClause(uri, selection, selectionArgs, match);
if (shouldRestrictVisibility()) {
if (projection == null) {
projection = APP_READABLE_COLUMNS_ARRAY.clone();
} else {
// check the validity of the columns in projection
for (int i = 0; i < projection.length; ++i) {
if (!APP_READABLE_COLUMNS_SET.contains(projection[i]) &&
!DOWNLOAD_MANAGER_COLUMNS_LIST.contains(projection[i])) {
throw new IllegalArgumentException(
"column " + projection[i] + " is not allowed in queries");
}
}
}
for (int i = 0; i < projection.length; i++) {
final String newColumn = COLUMNS_MAP.get(projection[i]);
if (newColumn != null) {
projection[i] = newColumn;
}
}
}
if (GlobalState.hasVerboseLogging()) {
logVerboseQueryInfo(projection, selection, selectionArgs, sort, db);
}
Cursor ret = db.query(
DownloadsTables.DOWNLOADS_TABLE_NAME, projection, fullSelection.getSelection(),
fullSelection.getParameters(), null, null, sort);
if (ret == null) {
Log.v("query failed in downloads database");
} else {
ret.setNotificationUri(getContext().getContentResolver(), uri);
Log.v("created cursor " + ret + " on behalf of " + Binder.getCallingPid());
}
return ret;
}
private void logVerboseQueryInfo(String[] projection,
final String selection,
final String[] selectionArgs,
final String sort,
SQLiteDatabase db) {
java.lang.StringBuilder sb = new java.lang.StringBuilder();
sb.append("starting query, database is ");
if (db != null) {
sb.append("not ");
}
sb.append("null; ");
if (projection == null) {
sb.append("projection is null; ");
} else if (projection.length == 0) {
sb.append("projection is empty; ");
} else {
for (int i = 0; i < projection.length; ++i) {
sb.append("projection[");
sb.append(i);
sb.append("] is ");
sb.append(projection[i]);
sb.append("; ");
}
}
sb.append("selection is ");
sb.append(selection);
sb.append("; ");
if (selectionArgs == null) {
sb.append("selectionArgs is null; ");
} else if (selectionArgs.length == 0) {
sb.append("selectionArgs is empty; ");
} else {
for (int i = 0; i < selectionArgs.length; ++i) {
sb.append("selectionArgs[");
sb.append(i);
sb.append("] is ");
sb.append(selectionArgs[i]);
sb.append("; ");
}
}
sb.append("sort is ");
sb.append(sort);
sb.append(".");
Log.v(sb.toString());
}
private String getDownloadIdFromUri(final Uri uri) {
return uri.getPathSegments().get(1);
}
/**
* Insert request headers for a download into the DB.
*/
private void insertRequestHeaders(SQLiteDatabase db, long downloadId, ContentValues values) {
ContentValues rowValues = new ContentValues();
rowValues.put(DownloadsColumnsRequestHeaders.COLUMN_DOWNLOAD_ID, downloadId);
for (Map.Entry<String, Object> entry : values.valueSet()) {
String key = entry.getKey();
if (key.startsWith(DownloadsColumnsRequestHeaders.INSERT_KEY_PREFIX)) {
String headerLine = entry.getValue().toString();
if (!headerLine.contains(":")) {
throw new IllegalArgumentException("Invalid HTTP header line: " + headerLine);
}
String[] parts = headerLine.split(":", 2);
rowValues.put(DownloadsColumnsRequestHeaders.COLUMN_HEADER, parts[0].trim());
rowValues.put(DownloadsColumnsRequestHeaders.COLUMN_VALUE, parts[1].trim());
db.insert(DownloadsColumnsRequestHeaders.HEADERS_DB_TABLE, null, rowValues);
}
}
}
/**
* Handle a query for the custom request headers registered for a download.
*/
private Cursor queryRequestHeaders(SQLiteDatabase db, Uri uri) {
String where = DownloadsColumnsRequestHeaders.COLUMN_DOWNLOAD_ID + "="
+ getDownloadIdFromUri(uri);
String[] projection = new String[]{DownloadsColumnsRequestHeaders.COLUMN_HEADER,
DownloadsColumnsRequestHeaders.COLUMN_VALUE};
return db.query(
DownloadsColumnsRequestHeaders.HEADERS_DB_TABLE, projection, where,
null, null, null, null);
}
/**
* Delete request headers for downloads matching the given query.
*/
private void deleteRequestHeaders(SQLiteDatabase db, String where, String[] whereArgs) {
String[] projection = new String[]{DownloadsColumns._ID};
Cursor cursor = db.query(DownloadsTables.DOWNLOADS_TABLE_NAME, projection, where, whereArgs, null, null, null, null);
try {
for (cursor.moveToFirst(); !cursor.isAfterLast(); cursor.moveToNext()) {
long id = cursor.getLong(0);
String idWhere = DownloadsColumnsRequestHeaders.COLUMN_DOWNLOAD_ID + "=" + id;
db.delete(DownloadsColumnsRequestHeaders.HEADERS_DB_TABLE, idWhere, null);
}
} finally {
cursor.close();
}
}
/**
* @return true if we should restrict the columns readable by this caller
*/
private boolean shouldRestrictVisibility() {
int callingUid = Binder.getCallingUid();
return Binder.getCallingPid() != Process.myPid() &&
callingUid != systemUid &&
callingUid != defcontaineruid;
}
/**
* Updates a row in the database
*/
@Override
public int update(final Uri uri, final ContentValues values, final String where, final String[] whereArgs) {
Helpers.validateSelection(where, APP_READABLE_COLUMNS_SET);
SQLiteDatabase db = openHelper.getWritableDatabase();
int count;
boolean startService = false;
if (values.containsKey(DownloadsColumns.COLUMN_DELETED)) {
if (values.getAsInteger(DownloadsColumns.COLUMN_DELETED) == 1) {
// some rows are to be 'deleted'. need to start DownloadService.
startService = true;
}
}
ContentValues filteredValues;
if (Binder.getCallingPid() != Process.myPid()) {
filteredValues = new ContentValues();
copyString(DownloadsColumns.COLUMN_APP_DATA, values, filteredValues);
Integer i = values.getAsInteger(DownloadsColumns.COLUMN_CONTROL);
if (i != null) {
filteredValues.put(DownloadsColumns.COLUMN_CONTROL, i);
startService = true;
}
copyInteger(DownloadsColumns.COLUMN_CONTROL, values, filteredValues);
copyString(DownloadsColumns.COLUMN_MEDIAPROVIDER_URI, values, filteredValues);
copyInteger(DownloadsColumns.COLUMN_DELETED, values, filteredValues);
} else {
filteredValues = values;
Integer status = values.getAsInteger(DownloadsColumns.COLUMN_STATUS);
boolean isRestart = status != null && status == DownloadsStatus.STATUS_PENDING;
boolean isUserBypassingSizeLimit =
values.containsKey(DownloadsColumns.COLUMN_BYPASS_RECOMMENDED_SIZE_LIMIT);
if (isRestart || isUserBypassingSizeLimit) {
startService = true;
}
}
int match = URI_MATCHER.match(uri);
switch (match) {
case MY_DOWNLOADS:
case MY_DOWNLOADS_ID:
case ALL_DOWNLOADS:
case ALL_DOWNLOADS_ID:
SqlSelection selection = getWhereClause(uri, where, whereArgs, match);
if (filteredValues.size() > 0) {
count = db.update(
DownloadsTables.DOWNLOADS_TABLE_NAME, filteredValues, selection.getSelection(),
selection.getParameters());
} else {
count = 0;
}
break;
case BATCHES:
case BATCHES_ID:
SqlSelection batchSelection = getWhereClause(uri, where, whereArgs, match);
count = db.update(
DownloadsColumnsBatches.BATCHES_TABLE_NAME, values, batchSelection.getSelection(),
batchSelection.getParameters());
break;
default:
Log.d("updating unknown/invalid URI: " + uri);
throw new UnsupportedOperationException("Cannot update URI: " + uri);
}
notifyContentChanged(uri, match);
if (startService) {
Context context = getContext();
context.startService(new Intent(context, DownloadService.class));
}
return count;
}
/**
* Notify of a change through both URIs (/my_downloads and /all_downloads)
*
* @param uri either URI for the changed download(s)
* @param uriMatch the match ID from {@link #URI_MATCHER}
*/
private void notifyContentChanged(final Uri uri, int uriMatch) {
Long downloadId = null;
if (uriMatch == MY_DOWNLOADS_ID || uriMatch == ALL_DOWNLOADS_ID) {
downloadId = Long.parseLong(getDownloadIdFromUri(uri));
}
for (Uri uriToNotify : baseUris) {
if (downloadId != null) {
uriToNotify = ContentUris.withAppendedId(uriToNotify, downloadId);
}
getContext().getContentResolver().notifyChange(uriToNotify, null);
}
}
private SqlSelection getWhereClause(final Uri uri, final String where, final String[] whereArgs,
int uriMatch) {
SqlSelection selection = new SqlSelection();
selection.appendClause(where, whereArgs);
if (uriMatch == MY_DOWNLOADS_ID || uriMatch == ALL_DOWNLOADS_ID ||
uriMatch == PUBLIC_DOWNLOAD_ID) {
selection.appendClause(DownloadsColumns._ID + " = ?", getDownloadIdFromUri(uri));
}
if (uriMatch == BATCHES_ID) {
selection.appendClause(DownloadsColumnsBatches._ID + " = ?", uri.getLastPathSegment());
}
if ((uriMatch == MY_DOWNLOADS || uriMatch == MY_DOWNLOADS_ID)
&& getContext().checkCallingPermission(DownloadsPermission.PERMISSION_ACCESS_ALL)
!= PackageManager.PERMISSION_GRANTED) {
String callingUid = String.valueOf(Binder.getCallingUid());
selection.appendClause(
Constants.UID + "= ? OR " + DownloadsColumns.COLUMN_OTHER_UID + "= ?",
callingUid, callingUid);
}
return selection;
}
/**
* Deletes a row in the database
*/
@Override
public int delete(@NonNull Uri uri, String where, String[] whereArgs) {
Helpers.validateSelection(where, APP_READABLE_COLUMNS_SET);
SQLiteDatabase db = openHelper.getWritableDatabase();
int count;
int match = URI_MATCHER.match(uri);
switch (match) {
case MY_DOWNLOADS:
case MY_DOWNLOADS_ID:
case ALL_DOWNLOADS:
case ALL_DOWNLOADS_ID:
SqlSelection selection = getWhereClause(uri, where, whereArgs, match);
deleteRequestHeaders(db, selection.getSelection(), selection.getParameters());
count = db.delete(DownloadsTables.DOWNLOADS_TABLE_NAME, selection.getSelection(), selection.getParameters());
break;
case BATCHES:
case BATCHES_ID:
SqlSelection batchSelection = getWhereClause(uri, where, whereArgs, match);
count = db.delete(DownloadsColumnsBatches.BATCHES_TABLE_NAME, batchSelection.getSelection(), batchSelection.getParameters());
break;
default:
Log.d("deleting unknown/invalid URI: " + uri);
throw new UnsupportedOperationException("Cannot delete URI: " + uri);
}
notifyContentChanged(uri, match);
return count;
}
/**
* Remotely opens a file
*/
@Override
public ParcelFileDescriptor openFile(@NonNull Uri uri, String mode) throws FileNotFoundException {
if (GlobalState.hasVerboseLogging()) {
logVerboseOpenFileInfo(uri, mode);
}
Cursor cursor = query(uri, new String[]{"_data"}, null, null, null);
String path;
try {
int count = (cursor != null) ? cursor.getCount() : 0;
if (count != 1) {
// If there is not exactly one result, throw an appropriate exception.
if (count == 0) {
throw new FileNotFoundException("No entry for " + uri);
}
throw new FileNotFoundException("Multiple items at " + uri);
}
cursor.moveToFirst();
path = cursor.getString(0);
} finally {
if (cursor != null) {
cursor.close();
}
}
if (path == null) {
throw new FileNotFoundException("No filename found.");
}
if (!Helpers.isFilenameValid(path, downloadsDataDir)) {
Log.d("INTERNAL FILE DOWNLOAD LOL COMMENTED EXCEPTION");
// throw new FileNotFoundException("Invalid filename: " + path);
}
if (!"r".equals(mode)) {
throw new FileNotFoundException("Bad mode for " + uri + ": " + mode);
}
ParcelFileDescriptor ret = ParcelFileDescriptor.open(new File(path),
ParcelFileDescriptor.MODE_READ_ONLY);
if (ret == null) {
Log.v("couldn't open file");
throw new FileNotFoundException("couldn't open file");
}
return ret;
}
@Override
public void dump(FileDescriptor fd, @NonNull PrintWriter writer, String[] args) {
Log.e("I want dump, but nothing to dump into");
}
private void logVerboseOpenFileInfo(Uri uri, String mode) {
Log.v("openFile uri: " + uri + ", mode: " + mode
+ ", uid: " + Binder.getCallingUid());
Cursor cursor = query(
downloadsUriProvider.getContentUri(),
new String[]{"_id"}, null, null, "_id");
if (cursor == null) {
Log.v("null cursor in openFile");
} else {
if (!cursor.moveToFirst()) {
Log.v("empty cursor in openFile");
} else {
do {
Log.v("row " + cursor.getInt(0) + " available");
} while (cursor.moveToNext());
}
cursor.close();
}
cursor = query(uri, new String[]{"_data"}, null, null, null);
if (cursor == null) {
Log.v("null cursor in openFile");
} else {
if (!cursor.moveToFirst()) {
Log.v("empty cursor in openFile");
} else {
String filename = cursor.getString(0);
Log.v("filename in openFile: " + filename);
if (new java.io.File(filename).isFile()) {
Log.v("file exists in openFile");
}
}
cursor.close();
}
}
private static void copyInteger(String key, ContentValues from, ContentValues to) {
Integer i = from.getAsInteger(key);
if (i != null) {
to.put(key, i);
}
}
private static void copyBoolean(String key, ContentValues from, ContentValues to) {
Boolean b = from.getAsBoolean(key);
if (b != null) {
to.put(key, b);
}
}
private static void copyString(String key, ContentValues from, ContentValues to) {
String s = from.getAsString(key);
if (s != null) {
to.put(key, s);
}
}
}
| Move final baseUris near all the other final fields
| library/src/main/java/com/novoda/downloadmanager/lib/DownloadProvider.java | Move final baseUris near all the other final fields |
|
Java | apache-2.0 | 696f4a05f22a38079cdfa812d978b77b97e25b00 | 0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.editor.actions;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorBundle;
import com.intellij.openapi.editor.actionSystem.EditorAction;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.impl.FontFallbackIterator;
import com.intellij.openapi.editor.impl.view.IterationState;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.components.JBTextArea;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import sun.font.CompositeFont;
import sun.font.Font2D;
import sun.font.FontSubstitution;
import javax.swing.*;
import java.awt.*;
import java.awt.font.FontRenderContext;
import java.lang.reflect.Method;
import java.util.List;
import java.util.*;
import java.util.function.IntUnaryOperator;
public class ShowFontsUsedByEditorAction extends EditorAction {
private static final Logger LOG = Logger.getInstance(ShowFontsUsedByEditorAction.class);
public ShowFontsUsedByEditorAction() {
super(new Handler());
}
private static class Handler extends EditorActionHandler {
@Override
protected boolean isEnabledForCaret(@NotNull Editor editor, @NotNull Caret caret, DataContext dataContext) {
return editor instanceof EditorEx;
}
@Override
protected void doExecute(@NotNull Editor editor, @Nullable Caret caret, DataContext dataContext) {
new Task.Modal(editor.getProject(), EditorBundle.message("fonts.used.by.editor.progress"), true) {
private String textToShow;
@Override
public void run(@NotNull ProgressIndicator indicator) {
Set<String> fontNames = ReadAction.compute(() -> collectFontNames((EditorEx)editor));
textToShow = StringUtil.join(fontNames, "\n");
}
@Override
public void onSuccess() {
if (textToShow != null) {
new MyDialog(textToShow).show();
}
}
}.queue();
}
private static Set<String> collectFontNames(@NotNull EditorEx editor) {
Set<String> result = new TreeSet<>();
Document document = editor.getDocument();
CharSequence text = document.getImmutableCharSequence();
IterationState it = new IterationState(editor, 0, document.getTextLength(), null, false, true, false, false);
FontFallbackIterator ffi = new FontFallbackIterator().setPreferredFonts(editor.getColorsScheme().getFontPreferences());
while (!it.atEnd()) {
ffi.setFontStyle(it.getMergedAttributes().getFontType());
int start = it.getStartOffset();
int end = it.getEndOffset();
for (int i = start; i < end; i++) {
if ("\r\n\t".indexOf(text.charAt(i)) >= 0) {
collectFontNames(result, text, start, i, ffi);
start = i + 1;
}
}
collectFontNames(result, text, start, end, ffi);
it.advance();
}
return result;
}
private static void collectFontNames(@NotNull Set<String> result,
@NotNull CharSequence text,
int startOffset,
int endOffset,
@NotNull FontFallbackIterator ffi) {
if (startOffset >= endOffset) return;
ffi.start(text, startOffset, endOffset);
while (!ffi.atEnd()) {
Font font = ffi.getFont();
List<String> components = null;
try {
components = AccessingInternalJdkFontApi.getRelevantComponents(font, text, ffi.getStart(), ffi.getEnd());
}
catch (Throwable e) {
LOG.debug(e);
}
if (components == null) {
result.add(font.getFontName() + " (*)");
}
else {
result.addAll(components);
}
ffi.advance();
}
}
private static class MyDialog extends DialogWrapper {
private final JBTextArea myTextArea;
private MyDialog(String text) {
super(false);
setTitle(EditorBundle.message("fonts.used.by.editor.title"));
myTextArea = new JBTextArea(text, 10, 50);
myTextArea.setEditable(false);
init();
}
@Override
protected Action @NotNull [] createActions() {
return new Action[0];
}
@Override
protected JComponent createCenterPanel() {
return new JBScrollPane(myTextArea);
}
}
}
}
class AccessingInternalJdkFontApi {
private static final Method GET_FONT_2D_METHOD = ReflectionUtil.getDeclaredMethod(Font.class, "getFont2D");
private static final FontRenderContext DUMMY_CONTEXT = new FontRenderContext(null, false, false);
@SuppressWarnings("InstanceofIncompatibleInterface")
static List<String> getRelevantComponents(@NotNull Font font, @NotNull CharSequence text, int startOffset, int endOffset)
throws Exception {
if (GET_FONT_2D_METHOD != null) {
Font2D font2D = (Font2D)GET_FONT_2D_METHOD.invoke(font);
if (font2D != null) {
CompositeFont compositeFont = null;
IntUnaryOperator charToGlyphMapper = null;
if (font2D instanceof CompositeFont) {
compositeFont = (CompositeFont)font2D;
charToGlyphMapper = c -> font2D.charToGlyph(c);
}
else if (font2D instanceof FontSubstitution) {
compositeFont = ((FontSubstitution)font2D).getCompositeFont2D();
charToGlyphMapper = c -> font.createGlyphVector(DUMMY_CONTEXT, new String(new int[]{c}, 0, 1)).getGlyphCode(0);
}
List<Font2D> components = new ArrayList<>();
if (compositeFont == null) {
components.add(font2D);
}
else {
for (int i = startOffset; i < endOffset; ) {
int codePoint = Character.codePointAt(text, i);
int glyph = charToGlyphMapper.applyAsInt(codePoint);
int slot = glyph >>> 24;
components.add(compositeFont.getSlotFont(slot));
i += Character.charCount(codePoint);
}
}
return ContainerUtil.map(components, f -> f.getFontName(null));
}
}
return null;
}
}
| platform/platform-impl/src/com/intellij/openapi/editor/actions/ShowFontsUsedByEditorAction.java | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.editor.actions;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorBundle;
import com.intellij.openapi.editor.actionSystem.EditorAction;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.impl.FontFallbackIterator;
import com.intellij.openapi.editor.impl.view.IterationState;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.components.JBTextArea;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import sun.font.CompositeFont;
import sun.font.Font2D;
import sun.font.FontSubstitution;
import javax.swing.*;
import java.awt.*;
import java.awt.font.FontRenderContext;
import java.lang.reflect.Method;
import java.util.List;
import java.util.*;
import java.util.function.IntUnaryOperator;
public class ShowFontsUsedByEditorAction extends EditorAction {
public ShowFontsUsedByEditorAction() {
super(new Handler());
}
private static class Handler extends EditorActionHandler {
@Override
protected boolean isEnabledForCaret(@NotNull Editor editor, @NotNull Caret caret, DataContext dataContext) {
return editor instanceof EditorEx;
}
@Override
protected void doExecute(@NotNull Editor editor, @Nullable Caret caret, DataContext dataContext) {
new Task.Modal(editor.getProject(), EditorBundle.message("fonts.used.by.editor.progress"), true) {
private String textToShow;
@Override
public void run(@NotNull ProgressIndicator indicator) {
Set<String> fontNames = ReadAction.compute(() -> collectFontNames((EditorEx)editor));
textToShow = StringUtil.join(fontNames, "\n");
}
@Override
public void onSuccess() {
if (textToShow != null) {
new MyDialog(textToShow).show();
}
}
}.queue();
}
private static Set<String> collectFontNames(@NotNull EditorEx editor) {
Set<String> result = new TreeSet<>();
Document document = editor.getDocument();
CharSequence text = document.getImmutableCharSequence();
IterationState it = new IterationState(editor, 0, document.getTextLength(), null, false, true, false, false);
FontFallbackIterator ffi = new FontFallbackIterator().setPreferredFonts(editor.getColorsScheme().getFontPreferences());
while (!it.atEnd()) {
ffi.setFontStyle(it.getMergedAttributes().getFontType());
int start = it.getStartOffset();
int end = it.getEndOffset();
for (int i = start; i < end; i++) {
if ("\r\n\t".indexOf(text.charAt(i)) >= 0) {
collectFontNames(result, text, start, i, ffi);
start = i + 1;
}
}
collectFontNames(result, text, start, end, ffi);
it.advance();
}
return result;
}
private static void collectFontNames(@NotNull Set<String> result,
@NotNull CharSequence text,
int startOffset,
int endOffset,
@NotNull FontFallbackIterator ffi) {
if (startOffset >= endOffset) return;
ffi.start(text, startOffset, endOffset);
while (!ffi.atEnd()) {
Font font = ffi.getFont();
List<String> components = null;
try {
components = AccessingInternalJdkFontApi.getRelevantComponents(font, text, ffi.getStart(), ffi.getEnd());
}
catch (Throwable ignored) {}
if (components == null) {
result.add(font.getFontName());
}
else {
result.addAll(components);
}
ffi.advance();
}
}
private static class MyDialog extends DialogWrapper {
private final JBTextArea myTextArea;
private MyDialog(String text) {
super(false);
setTitle(EditorBundle.message("fonts.used.by.editor.title"));
myTextArea = new JBTextArea(text, 10, 50);
myTextArea.setEditable(false);
init();
}
@Override
protected Action @NotNull [] createActions() {
return new Action[0];
}
@Override
protected JComponent createCenterPanel() {
return new JBScrollPane(myTextArea);
}
}
}
}
class AccessingInternalJdkFontApi {
private static final Method GET_FONT_2D_METHOD = ReflectionUtil.getDeclaredMethod(Font.class, "getFont2D");
private static final FontRenderContext DUMMY_CONTEXT = new FontRenderContext(null, false, false);
@SuppressWarnings("InstanceofIncompatibleInterface")
static List<String> getRelevantComponents(@NotNull Font font, @NotNull CharSequence text, int startOffset, int endOffset) {
try {
if (GET_FONT_2D_METHOD != null) {
Font2D font2D = (Font2D)GET_FONT_2D_METHOD.invoke(font);
if (font2D != null) {
CompositeFont compositeFont = null;
IntUnaryOperator charToGlyphMapper = null;
if (font2D instanceof CompositeFont) {
compositeFont = (CompositeFont)font2D;
charToGlyphMapper = c -> font2D.charToGlyph(c);
}
else if (font2D instanceof FontSubstitution) {
compositeFont = ((FontSubstitution)font2D).getCompositeFont2D();
charToGlyphMapper = c -> font.createGlyphVector(DUMMY_CONTEXT, new String(new int[]{c}, 0, 1)).getGlyphCode(0);
}
List<Font2D> components = new ArrayList<>();
if (compositeFont == null) {
components.add(font2D);
}
else {
for (int i = startOffset; i < endOffset; ) {
int codePoint = Character.codePointAt(text, i);
int glyph = charToGlyphMapper.applyAsInt(codePoint);
int slot = glyph >>> 24;
components.add(compositeFont.getSlotFont(slot));
i += Character.charCount(codePoint);
}
}
return ContainerUtil.map(components, f -> f.getFontName(null));
}
}
}
catch (Throwable ignored) {}
return null;
}
}
| IDEA-232177 Add the way to find out what fonts are actually used in editor component to render the text
make it visible in UI when extracting exact font name using JDK internal APIs failed
GitOrigin-RevId: 351fbe0dbf5efec85f33c5d56a8fab5c8ce2fd14 | platform/platform-impl/src/com/intellij/openapi/editor/actions/ShowFontsUsedByEditorAction.java | IDEA-232177 Add the way to find out what fonts are actually used in editor component to render the text |
|
Java | apache-2.0 | fdfdf594f9d5d05f42092e2c13aba17402a667db | 0 | Shepard1212/elasticsearch,jimczi/elasticsearch,areek/elasticsearch,njlawton/elasticsearch,Kakakakakku/elasticsearch,kingaj/elasticsearch,wuranbo/elasticsearch,coding0011/elasticsearch,fekaputra/elasticsearch,zeroctu/elasticsearch,tahaemin/elasticsearch,caengcjd/elasticsearch,jw0201/elastic,dongjoon-hyun/elasticsearch,codebunt/elasticsearch,pozhidaevak/elasticsearch,snikch/elasticsearch,kenshin233/elasticsearch,kevinkluge/elasticsearch,vvcephei/elasticsearch,StefanGor/elasticsearch,wbowling/elasticsearch,kingaj/elasticsearch,cnfire/elasticsearch-1,yongminxia/elasticsearch,loconsolutions/elasticsearch,davidvgalbraith/elasticsearch,HarishAtGitHub/elasticsearch,episerver/elasticsearch,beiske/elasticsearch,18098924759/elasticsearch,andrestc/elasticsearch,acchen97/elasticsearch,Widen/elasticsearch,tahaemin/elasticsearch,fooljohnny/elasticsearch,zkidkid/elasticsearch,wenpos/elasticsearch,javachengwc/elasticsearch,lightslife/elasticsearch,LewayneNaidoo/elasticsearch,GlenRSmith/elasticsearch,IanvsPoplicola/elasticsearch,jbertouch/elasticsearch,schonfeld/elasticsearch,sarwarbhuiyan/elasticsearch,Chhunlong/elasticsearch,yongminxia/elasticsearch,Clairebi/ElasticsearchClone,yynil/elasticsearch,adrianbk/elasticsearch,ydsakyclguozi/elasticsearch,andrejserafim/elasticsearch,ulkas/elasticsearch,bawse/elasticsearch,EasonYi/elasticsearch,fforbeck/elasticsearch,apepper/elasticsearch,wangyuxue/elasticsearch,Clairebi/ElasticsearchClone,iantruslove/elasticsearch,trangvh/elasticsearch,awislowski/elasticsearch,artnowo/elasticsearch,pablocastro/elasticsearch,wangyuxue/elasticsearch,mrorii/elasticsearch,JSCooke/elasticsearch,C-Bish/elasticsearch,markwalkom/elasticsearch,mnylen/elasticsearch,polyfractal/elasticsearch,tkssharma/elasticsearch,fred84/elasticsearch,brandonkearby/elasticsearch,geidies/elasticsearch,lydonchandra/elasticsearch,golubev/elasticsearch,ouyangkongtong/elasticsearch,episerver/elasticsearch,bawse/elasticsearch,markharwood/elasticsearch,lzo/elasticsearch-1,ckclark/elasticsearch,TonyChai24/ESSource,dylan8902/elasticsearch,lchennup/elasticsearch,markllama/elasticsearch,queirozfcom/elasticsearch,linglaiyao1314/elasticsearch,skearns64/elasticsearch,nellicus/elasticsearch,huanzhong/elasticsearch,mjason3/elasticsearch,kenshin233/elasticsearch,jimhooker2002/elasticsearch,wayeast/elasticsearch,mapr/elasticsearch,MisterAndersen/elasticsearch,kalburgimanjunath/elasticsearch,hirdesh2008/elasticsearch,thecocce/elasticsearch,springning/elasticsearch,IanvsPoplicola/elasticsearch,wangtuo/elasticsearch,JervyShi/elasticsearch,franklanganke/elasticsearch,kimimj/elasticsearch,AshishThakur/elasticsearch,18098924759/elasticsearch,aglne/elasticsearch,masterweb121/elasticsearch,karthikjaps/elasticsearch,njlawton/elasticsearch,yuy168/elasticsearch,Kakakakakku/elasticsearch,koxa29/elasticsearch,Siddartha07/elasticsearch,mgalushka/elasticsearch,jpountz/elasticsearch,loconsolutions/elasticsearch,NBSW/elasticsearch,EasonYi/elasticsearch,robin13/elasticsearch,phani546/elasticsearch,i-am-Nathan/elasticsearch,s1monw/elasticsearch,springning/elasticsearch,mgalushka/elasticsearch,s1monw/elasticsearch,Rygbee/elasticsearch,nrkkalyan/elasticsearch,mohit/elasticsearch,snikch/elasticsearch,zeroctu/elasticsearch,slavau/elasticsearch,ivansun1010/elasticsearch,socialrank/elasticsearch,18098924759/elasticsearch,ulkas/elasticsearch,kalimatas/elasticsearch,TonyChai24/ESSource,maddin2016/elasticsearch,hirdesh2008/elasticsearch,HarishAtGitHub/elasticsearch,wbowling/elasticsearch,amit-shar/elasticsearch,dpursehouse/elasticsearch,Kakakakakku/elasticsearch,djschny/elasticsearch,iantruslove/elasticsearch,wimvds/elasticsearch,myelin/elasticsearch,tahaemin/elasticsearch,lydonchandra/elasticsearch,mikemccand/elasticsearch,YosuaMichael/elasticsearch,andrestc/elasticsearch,Chhunlong/elasticsearch,amit-shar/elasticsearch,winstonewert/elasticsearch,andrestc/elasticsearch,adrianbk/elasticsearch,palecur/elasticsearch,NBSW/elasticsearch,TonyChai24/ESSource,Charlesdong/elasticsearch,andrestc/elasticsearch,rlugojr/elasticsearch,s1monw/elasticsearch,strapdata/elassandra,Liziyao/elasticsearch,linglaiyao1314/elasticsearch,mnylen/elasticsearch,masterweb121/elasticsearch,thecocce/elasticsearch,snikch/elasticsearch,awislowski/elasticsearch,weipinghe/elasticsearch,dpursehouse/elasticsearch,mapr/elasticsearch,xpandan/elasticsearch,MetSystem/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,nilabhsagar/elasticsearch,brandonkearby/elasticsearch,sauravmondallive/elasticsearch,umeshdangat/elasticsearch,wenpos/elasticsearch,NBSW/elasticsearch,vingupta3/elasticsearch,Liziyao/elasticsearch,ivansun1010/elasticsearch,glefloch/elasticsearch,kimimj/elasticsearch,feiqitian/elasticsearch,yanjunh/elasticsearch,zhiqinghuang/elasticsearch,spiegela/elasticsearch,ThalaivaStars/OrgRepo1,mute/elasticsearch,djschny/elasticsearch,hydro2k/elasticsearch,camilojd/elasticsearch,jango2015/elasticsearch,aglne/elasticsearch,milodky/elasticsearch,camilojd/elasticsearch,Collaborne/elasticsearch,dataduke/elasticsearch,fernandozhu/elasticsearch,easonC/elasticsearch,Shepard1212/elasticsearch,onegambler/elasticsearch,abibell/elasticsearch,aglne/elasticsearch,jango2015/elasticsearch,easonC/elasticsearch,fernandozhu/elasticsearch,yanjunh/elasticsearch,Uiho/elasticsearch,andrejserafim/elasticsearch,vrkansagara/elasticsearch,Stacey-Gammon/elasticsearch,weipinghe/elasticsearch,pozhidaevak/elasticsearch,cnfire/elasticsearch-1,Widen/elasticsearch,dongjoon-hyun/elasticsearch,sdauletau/elasticsearch,sposam/elasticsearch,beiske/elasticsearch,fooljohnny/elasticsearch,golubev/elasticsearch,hechunwen/elasticsearch,karthikjaps/elasticsearch,mohit/elasticsearch,geidies/elasticsearch,abibell/elasticsearch,kimimj/elasticsearch,socialrank/elasticsearch,schonfeld/elasticsearch,s1monw/elasticsearch,myelin/elasticsearch,markllama/elasticsearch,hafkensite/elasticsearch,fernandozhu/elasticsearch,mcku/elasticsearch,cnfire/elasticsearch-1,jbertouch/elasticsearch,ZTE-PaaS/elasticsearch,kunallimaye/elasticsearch,markllama/elasticsearch,jbertouch/elasticsearch,coding0011/elasticsearch,truemped/elasticsearch,uschindler/elasticsearch,kenshin233/elasticsearch,gfyoung/elasticsearch,markharwood/elasticsearch,kaneshin/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,iamjakob/elasticsearch,hechunwen/elasticsearch,ouyangkongtong/elasticsearch,shreejay/elasticsearch,jeteve/elasticsearch,KimTaehee/elasticsearch,btiernay/elasticsearch,beiske/elasticsearch,kunallimaye/elasticsearch,overcome/elasticsearch,markwalkom/elasticsearch,socialrank/elasticsearch,rmuir/elasticsearch,mmaracic/elasticsearch,GlenRSmith/elasticsearch,queirozfcom/elasticsearch,rmuir/elasticsearch,kimimj/elasticsearch,zeroctu/elasticsearch,mkis-/elasticsearch,jchampion/elasticsearch,davidvgalbraith/elasticsearch,iamjakob/elasticsearch,yynil/elasticsearch,karthikjaps/elasticsearch,sposam/elasticsearch,mjason3/elasticsearch,rento19962/elasticsearch,LeoYao/elasticsearch,mbrukman/elasticsearch,Stacey-Gammon/elasticsearch,MaineC/elasticsearch,truemped/elasticsearch,huanzhong/elasticsearch,elasticdog/elasticsearch,MisterAndersen/elasticsearch,amaliujia/elasticsearch,iacdingping/elasticsearch,areek/elasticsearch,infusionsoft/elasticsearch,infusionsoft/elasticsearch,MichaelLiZhou/elasticsearch,fred84/elasticsearch,trangvh/elasticsearch,KimTaehee/elasticsearch,YosuaMichael/elasticsearch,Liziyao/elasticsearch,wittyameta/elasticsearch,18098924759/elasticsearch,ZTE-PaaS/elasticsearch,golubev/elasticsearch,i-am-Nathan/elasticsearch,sauravmondallive/elasticsearch,fforbeck/elasticsearch,sreeramjayan/elasticsearch,mkis-/elasticsearch,truemped/elasticsearch,ydsakyclguozi/elasticsearch,rhoml/elasticsearch,18098924759/elasticsearch,Shekharrajak/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,lydonchandra/elasticsearch,karthikjaps/elasticsearch,zkidkid/elasticsearch,liweinan0423/elasticsearch,trangvh/elasticsearch,huypx1292/elasticsearch,Rygbee/elasticsearch,jaynblue/elasticsearch,markllama/elasticsearch,xingguang2013/elasticsearch,mm0/elasticsearch,sauravmondallive/elasticsearch,markharwood/elasticsearch,hanst/elasticsearch,areek/elasticsearch,MjAbuz/elasticsearch,MjAbuz/elasticsearch,mnylen/elasticsearch,jimhooker2002/elasticsearch,uschindler/elasticsearch,geidies/elasticsearch,Ansh90/elasticsearch,humandb/elasticsearch,khiraiwa/elasticsearch,yongminxia/elasticsearch,vietlq/elasticsearch,ricardocerq/elasticsearch,tebriel/elasticsearch,adrianbk/elasticsearch,loconsolutions/elasticsearch,lchennup/elasticsearch,xuzha/elasticsearch,markllama/elasticsearch,lightslife/elasticsearch,ulkas/elasticsearch,nknize/elasticsearch,jbertouch/elasticsearch,AndreKR/elasticsearch,gingerwizard/elasticsearch,ThalaivaStars/OrgRepo1,alexkuk/elasticsearch,pritishppai/elasticsearch,C-Bish/elasticsearch,NBSW/elasticsearch,mohit/elasticsearch,lydonchandra/elasticsearch,jprante/elasticsearch,diendt/elasticsearch,Ansh90/elasticsearch,bestwpw/elasticsearch,Helen-Zhao/elasticsearch,bawse/elasticsearch,lightslife/elasticsearch,ricardocerq/elasticsearch,markllama/elasticsearch,Helen-Zhao/elasticsearch,mbrukman/elasticsearch,mortonsykes/elasticsearch,strapdata/elassandra-test,pozhidaevak/elasticsearch,sc0ttkclark/elasticsearch,pranavraman/elasticsearch,alexshadow007/elasticsearch,davidvgalbraith/elasticsearch,Widen/elasticsearch,wenpos/elasticsearch,sneivandt/elasticsearch,Brijeshrpatel9/elasticsearch,nellicus/elasticsearch,hydro2k/elasticsearch,elancom/elasticsearch,Rygbee/elasticsearch,Widen/elasticsearch,PhaedrusTheGreek/elasticsearch,lydonchandra/elasticsearch,schonfeld/elasticsearch,chirilo/elasticsearch,sreeramjayan/elasticsearch,qwerty4030/elasticsearch,winstonewert/elasticsearch,szroland/elasticsearch,elancom/elasticsearch,javachengwc/elasticsearch,bestwpw/elasticsearch,avikurapati/elasticsearch,mm0/elasticsearch,geidies/elasticsearch,jeteve/elasticsearch,qwerty4030/elasticsearch,kevinkluge/elasticsearch,wenpos/elasticsearch,kalimatas/elasticsearch,hirdesh2008/elasticsearch,pranavraman/elasticsearch,wangtuo/elasticsearch,yongminxia/elasticsearch,strapdata/elassandra-test,jchampion/elasticsearch,apepper/elasticsearch,rento19962/elasticsearch,mnylen/elasticsearch,C-Bish/elasticsearch,abibell/elasticsearch,ThiagoGarciaAlves/elasticsearch,HonzaKral/elasticsearch,sreeramjayan/elasticsearch,jimhooker2002/elasticsearch,Shekharrajak/elasticsearch,jimczi/elasticsearch,alexshadow007/elasticsearch,petabytedata/elasticsearch,bawse/elasticsearch,MaineC/elasticsearch,TonyChai24/ESSource,achow/elasticsearch,dongjoon-hyun/elasticsearch,palecur/elasticsearch,springning/elasticsearch,mm0/elasticsearch,brandonkearby/elasticsearch,tebriel/elasticsearch,loconsolutions/elasticsearch,masaruh/elasticsearch,pritishppai/elasticsearch,JSCooke/elasticsearch,rento19962/elasticsearch,nrkkalyan/elasticsearch,Rygbee/elasticsearch,elasticdog/elasticsearch,milodky/elasticsearch,IanvsPoplicola/elasticsearch,cnfire/elasticsearch-1,kalburgimanjunath/elasticsearch,strapdata/elassandra-test,gfyoung/elasticsearch,linglaiyao1314/elasticsearch,naveenhooda2000/elasticsearch,knight1128/elasticsearch,queirozfcom/elasticsearch,yongminxia/elasticsearch,MisterAndersen/elasticsearch,overcome/elasticsearch,MisterAndersen/elasticsearch,kimimj/elasticsearch,xingguang2013/elasticsearch,nellicus/elasticsearch,Liziyao/elasticsearch,spiegela/elasticsearch,mgalushka/elasticsearch,knight1128/elasticsearch,hirdesh2008/elasticsearch,jimhooker2002/elasticsearch,vietlq/elasticsearch,pablocastro/elasticsearch,Ansh90/elasticsearch,caengcjd/elasticsearch,naveenhooda2000/elasticsearch,aglne/elasticsearch,xuzha/elasticsearch,hydro2k/elasticsearch,kingaj/elasticsearch,AndreKR/elasticsearch,huypx1292/elasticsearch,mbrukman/elasticsearch,adrianbk/elasticsearch,vingupta3/elasticsearch,pablocastro/elasticsearch,fekaputra/elasticsearch,nknize/elasticsearch,njlawton/elasticsearch,gmarz/elasticsearch,wayeast/elasticsearch,geidies/elasticsearch,vroyer/elassandra,strapdata/elassandra-test,ulkas/elasticsearch,Brijeshrpatel9/elasticsearch,AndreKR/elasticsearch,lydonchandra/elasticsearch,yuy168/elasticsearch,YosuaMichael/elasticsearch,slavau/elasticsearch,dataduke/elasticsearch,fekaputra/elasticsearch,kalburgimanjunath/elasticsearch,caengcjd/elasticsearch,rajanm/elasticsearch,jango2015/elasticsearch,loconsolutions/elasticsearch,mm0/elasticsearch,artnowo/elasticsearch,Stacey-Gammon/elasticsearch,MjAbuz/elasticsearch,KimTaehee/elasticsearch,karthikjaps/elasticsearch,jchampion/elasticsearch,MaineC/elasticsearch,easonC/elasticsearch,wittyameta/elasticsearch,xuzha/elasticsearch,Uiho/elasticsearch,JervyShi/elasticsearch,luiseduardohdbackup/elasticsearch,weipinghe/elasticsearch,kcompher/elasticsearch,ckclark/elasticsearch,rhoml/elasticsearch,Ansh90/elasticsearch,liweinan0423/elasticsearch,lchennup/elasticsearch,sneivandt/elasticsearch,beiske/elasticsearch,szroland/elasticsearch,obourgain/elasticsearch,palecur/elasticsearch,AndreKR/elasticsearch,drewr/elasticsearch,Shepard1212/elasticsearch,gmarz/elasticsearch,luiseduardohdbackup/elasticsearch,himanshuag/elasticsearch,kevinkluge/elasticsearch,socialrank/elasticsearch,PhaedrusTheGreek/elasticsearch,Brijeshrpatel9/elasticsearch,likaiwalkman/elasticsearch,MetSystem/elasticsearch,iantruslove/elasticsearch,Fsero/elasticsearch,uschindler/elasticsearch,ivansun1010/elasticsearch,alexbrasetvik/elasticsearch,trangvh/elasticsearch,mkis-/elasticsearch,himanshuag/elasticsearch,sposam/elasticsearch,skearns64/elasticsearch,truemped/elasticsearch,sarwarbhuiyan/elasticsearch,djschny/elasticsearch,lmtwga/elasticsearch,andrestc/elasticsearch,dylan8902/elasticsearch,a2lin/elasticsearch,queirozfcom/elasticsearch,easonC/elasticsearch,andrejserafim/elasticsearch,mcku/elasticsearch,avikurapati/elasticsearch,knight1128/elasticsearch,gfyoung/elasticsearch,bestwpw/elasticsearch,Flipkart/elasticsearch,jango2015/elasticsearch,artnowo/elasticsearch,pablocastro/elasticsearch,Siddartha07/elasticsearch,mute/elasticsearch,jimczi/elasticsearch,scottsom/elasticsearch,rlugojr/elasticsearch,dongjoon-hyun/elasticsearch,GlenRSmith/elasticsearch,ckclark/elasticsearch,skearns64/elasticsearch,mjhennig/elasticsearch,jpountz/elasticsearch,MetSystem/elasticsearch,amit-shar/elasticsearch,wayeast/elasticsearch,Siddartha07/elasticsearch,xpandan/elasticsearch,avikurapati/elasticsearch,MjAbuz/elasticsearch,ESamir/elasticsearch,xuzha/elasticsearch,rlugojr/elasticsearch,hechunwen/elasticsearch,humandb/elasticsearch,dylan8902/elasticsearch,drewr/elasticsearch,vvcephei/elasticsearch,luiseduardohdbackup/elasticsearch,slavau/elasticsearch,anti-social/elasticsearch,mortonsykes/elasticsearch,hafkensite/elasticsearch,mbrukman/elasticsearch,JackyMai/elasticsearch,strapdata/elassandra5-rc,yanjunh/elasticsearch,humandb/elasticsearch,sreeramjayan/elasticsearch,kalburgimanjunath/elasticsearch,lightslife/elasticsearch,humandb/elasticsearch,episerver/elasticsearch,mikemccand/elasticsearch,TonyChai24/ESSource,drewr/elasticsearch,apepper/elasticsearch,apepper/elasticsearch,wimvds/elasticsearch,lydonchandra/elasticsearch,avikurapati/elasticsearch,lzo/elasticsearch-1,jsgao0/elasticsearch,sdauletau/elasticsearch,mmaracic/elasticsearch,elancom/elasticsearch,kenshin233/elasticsearch,dataduke/elasticsearch,kunallimaye/elasticsearch,jprante/elasticsearch,lmtwga/elasticsearch,hirdesh2008/elasticsearch,wbowling/elasticsearch,a2lin/elasticsearch,gmarz/elasticsearch,iantruslove/elasticsearch,rhoml/elasticsearch,sauravmondallive/elasticsearch,xpandan/elasticsearch,kevinkluge/elasticsearch,huypx1292/elasticsearch,zkidkid/elasticsearch,rhoml/elasticsearch,C-Bish/elasticsearch,kingaj/elasticsearch,EasonYi/elasticsearch,wimvds/elasticsearch,jimhooker2002/elasticsearch,obourgain/elasticsearch,mrorii/elasticsearch,mjason3/elasticsearch,masaruh/elasticsearch,tahaemin/elasticsearch,masterweb121/elasticsearch,jpountz/elasticsearch,F0lha/elasticsearch,hanst/elasticsearch,fooljohnny/elasticsearch,Fsero/elasticsearch,koxa29/elasticsearch,onegambler/elasticsearch,pozhidaevak/elasticsearch,ESamir/elasticsearch,strapdata/elassandra-test,ZTE-PaaS/elasticsearch,glefloch/elasticsearch,ESamir/elasticsearch,mikemccand/elasticsearch,lks21c/elasticsearch,pablocastro/elasticsearch,Fsero/elasticsearch,chirilo/elasticsearch,18098924759/elasticsearch,abibell/elasticsearch,onegambler/elasticsearch,wbowling/elasticsearch,areek/elasticsearch,hafkensite/elasticsearch,linglaiyao1314/elasticsearch,polyfractal/elasticsearch,sneivandt/elasticsearch,scottsom/elasticsearch,SergVro/elasticsearch,jbertouch/elasticsearch,JackyMai/elasticsearch,ulkas/elasticsearch,vietlq/elasticsearch,gmarz/elasticsearch,himanshuag/elasticsearch,vroyer/elasticassandra,liweinan0423/elasticsearch,Shepard1212/elasticsearch,LeoYao/elasticsearch,himanshuag/elasticsearch,Ansh90/elasticsearch,lightslife/elasticsearch,ouyangkongtong/elasticsearch,mute/elasticsearch,Collaborne/elasticsearch,jchampion/elasticsearch,Collaborne/elasticsearch,kaneshin/elasticsearch,vrkansagara/elasticsearch,ivansun1010/elasticsearch,beiske/elasticsearch,ouyangkongtong/elasticsearch,maddin2016/elasticsearch,Clairebi/ElasticsearchClone,masterweb121/elasticsearch,markwalkom/elasticsearch,Charlesdong/elasticsearch,kaneshin/elasticsearch,jeteve/elasticsearch,nazarewk/elasticsearch,ydsakyclguozi/elasticsearch,fernandozhu/elasticsearch,18098924759/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,mm0/elasticsearch,AshishThakur/elasticsearch,onegambler/elasticsearch,humandb/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,tebriel/elasticsearch,tsohil/elasticsearch,glefloch/elasticsearch,LeoYao/elasticsearch,yuy168/elasticsearch,amaliujia/elasticsearch,pranavraman/elasticsearch,wittyameta/elasticsearch,umeshdangat/elasticsearch,mmaracic/elasticsearch,chirilo/elasticsearch,winstonewert/elasticsearch,loconsolutions/elasticsearch,andrejserafim/elasticsearch,huypx1292/elasticsearch,obourgain/elasticsearch,ImpressTV/elasticsearch,iantruslove/elasticsearch,Chhunlong/elasticsearch,avikurapati/elasticsearch,weipinghe/elasticsearch,kubum/elasticsearch,yongminxia/elasticsearch,Brijeshrpatel9/elasticsearch,EasonYi/elasticsearch,vroyer/elassandra,kevinkluge/elasticsearch,Collaborne/elasticsearch,strapdata/elassandra5-rc,martinstuga/elasticsearch,nilabhsagar/elasticsearch,fforbeck/elasticsearch,davidvgalbraith/elasticsearch,yynil/elasticsearch,ckclark/elasticsearch,ESamir/elasticsearch,andrestc/elasticsearch,jaynblue/elasticsearch,kalburgimanjunath/elasticsearch,JervyShi/elasticsearch,amaliujia/elasticsearch,cnfire/elasticsearch-1,clintongormley/elasticsearch,Kakakakakku/elasticsearch,knight1128/elasticsearch,nellicus/elasticsearch,sarwarbhuiyan/elasticsearch,njlawton/elasticsearch,xingguang2013/elasticsearch,HarishAtGitHub/elasticsearch,LeoYao/elasticsearch,vrkansagara/elasticsearch,wittyameta/elasticsearch,franklanganke/elasticsearch,lchennup/elasticsearch,HonzaKral/elasticsearch,alexshadow007/elasticsearch,mnylen/elasticsearch,masaruh/elasticsearch,ouyangkongtong/elasticsearch,yuy168/elasticsearch,fforbeck/elasticsearch,SergVro/elasticsearch,jchampion/elasticsearch,sneivandt/elasticsearch,artnowo/elasticsearch,vingupta3/elasticsearch,HarishAtGitHub/elasticsearch,maddin2016/elasticsearch,MetSystem/elasticsearch,elasticdog/elasticsearch,bestwpw/elasticsearch,wuranbo/elasticsearch,mcku/elasticsearch,szroland/elasticsearch,likaiwalkman/elasticsearch,sdauletau/elasticsearch,nrkkalyan/elasticsearch,MetSystem/elasticsearch,yuy168/elasticsearch,Chhunlong/elasticsearch,mbrukman/elasticsearch,lmtwga/elasticsearch,kevinkluge/elasticsearch,mortonsykes/elasticsearch,pranavraman/elasticsearch,cwurm/elasticsearch,nezirus/elasticsearch,javachengwc/elasticsearch,wuranbo/elasticsearch,schonfeld/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,elancom/elasticsearch,jsgao0/elasticsearch,ckclark/elasticsearch,codebunt/elasticsearch,girirajsharma/elasticsearch,smflorentino/elasticsearch,rajanm/elasticsearch,yanjunh/elasticsearch,milodky/elasticsearch,yynil/elasticsearch,Collaborne/elasticsearch,andrejserafim/elasticsearch,girirajsharma/elasticsearch,gingerwizard/elasticsearch,amit-shar/elasticsearch,apepper/elasticsearch,dylan8902/elasticsearch,sdauletau/elasticsearch,Flipkart/elasticsearch,zeroctu/elasticsearch,nknize/elasticsearch,javachengwc/elasticsearch,kunallimaye/elasticsearch,zeroctu/elasticsearch,njlawton/elasticsearch,LewayneNaidoo/elasticsearch,wbowling/elasticsearch,lks21c/elasticsearch,Clairebi/ElasticsearchClone,wimvds/elasticsearch,gfyoung/elasticsearch,diendt/elasticsearch,spiegela/elasticsearch,chirilo/elasticsearch,yanjunh/elasticsearch,markharwood/elasticsearch,nomoa/elasticsearch,zhiqinghuang/elasticsearch,ydsakyclguozi/elasticsearch,hanswang/elasticsearch,Widen/elasticsearch,milodky/elasticsearch,mbrukman/elasticsearch,aglne/elasticsearch,nezirus/elasticsearch,queirozfcom/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,weipinghe/elasticsearch,nrkkalyan/elasticsearch,sc0ttkclark/elasticsearch,SergVro/elasticsearch,GlenRSmith/elasticsearch,dataduke/elasticsearch,nrkkalyan/elasticsearch,vrkansagara/elasticsearch,ESamir/elasticsearch,smflorentino/elasticsearch,tahaemin/elasticsearch,jango2015/elasticsearch,kalimatas/elasticsearch,kevinkluge/elasticsearch,gingerwizard/elasticsearch,davidvgalbraith/elasticsearch,tkssharma/elasticsearch,alexbrasetvik/elasticsearch,dylan8902/elasticsearch,C-Bish/elasticsearch,robin13/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,Charlesdong/elasticsearch,mm0/elasticsearch,F0lha/elasticsearch,coding0011/elasticsearch,strapdata/elassandra-test,Ansh90/elasticsearch,hafkensite/elasticsearch,Charlesdong/elasticsearch,pranavraman/elasticsearch,nilabhsagar/elasticsearch,MichaelLiZhou/elasticsearch,cwurm/elasticsearch,Fsero/elasticsearch,rhoml/elasticsearch,JackyMai/elasticsearch,koxa29/elasticsearch,kunallimaye/elasticsearch,codebunt/elasticsearch,jpountz/elasticsearch,AshishThakur/elasticsearch,hydro2k/elasticsearch,wbowling/elasticsearch,lightslife/elasticsearch,wayeast/elasticsearch,LeoYao/elasticsearch,jimczi/elasticsearch,strapdata/elassandra,mapr/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,ulkas/elasticsearch,awislowski/elasticsearch,wimvds/elasticsearch,koxa29/elasticsearch,hafkensite/elasticsearch,polyfractal/elasticsearch,ThiagoGarciaAlves/elasticsearch,artnowo/elasticsearch,masterweb121/elasticsearch,liweinan0423/elasticsearch,kimimj/elasticsearch,Chhunlong/elasticsearch,sdauletau/elasticsearch,vvcephei/elasticsearch,dpursehouse/elasticsearch,fred84/elasticsearch,a2lin/elasticsearch,shreejay/elasticsearch,MetSystem/elasticsearch,ImpressTV/elasticsearch,MaineC/elasticsearch,mnylen/elasticsearch,jaynblue/elasticsearch,mmaracic/elasticsearch,snikch/elasticsearch,jeteve/elasticsearch,overcome/elasticsearch,mcku/elasticsearch,Helen-Zhao/elasticsearch,likaiwalkman/elasticsearch,kingaj/elasticsearch,mrorii/elasticsearch,achow/elasticsearch,diendt/elasticsearch,kubum/elasticsearch,iantruslove/elasticsearch,Flipkart/elasticsearch,drewr/elasticsearch,mjhennig/elasticsearch,coding0011/elasticsearch,Chhunlong/elasticsearch,rento19962/elasticsearch,javachengwc/elasticsearch,Siddartha07/elasticsearch,ckclark/elasticsearch,achow/elasticsearch,clintongormley/elasticsearch,PhaedrusTheGreek/elasticsearch,socialrank/elasticsearch,thecocce/elasticsearch,hydro2k/elasticsearch,jpountz/elasticsearch,Clairebi/ElasticsearchClone,pritishppai/elasticsearch,Liziyao/elasticsearch,iacdingping/elasticsearch,dongjoon-hyun/elasticsearch,myelin/elasticsearch,Siddartha07/elasticsearch,winstonewert/elasticsearch,mmaracic/elasticsearch,martinstuga/elasticsearch,infusionsoft/elasticsearch,onegambler/elasticsearch,StefanGor/elasticsearch,Flipkart/elasticsearch,clintongormley/elasticsearch,scottsom/elasticsearch,Widen/elasticsearch,caengcjd/elasticsearch,jw0201/elastic,pranavraman/elasticsearch,mrorii/elasticsearch,IanvsPoplicola/elasticsearch,chirilo/elasticsearch,jprante/elasticsearch,LeoYao/elasticsearch,himanshuag/elasticsearch,SergVro/elasticsearch,yongminxia/elasticsearch,tkssharma/elasticsearch,strapdata/elassandra5-rc,Kakakakakku/elasticsearch,amaliujia/elasticsearch,jw0201/elastic,sarwarbhuiyan/elasticsearch,KimTaehee/elasticsearch,karthikjaps/elasticsearch,xingguang2013/elasticsearch,nomoa/elasticsearch,elancom/elasticsearch,tsohil/elasticsearch,Helen-Zhao/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,hanst/elasticsearch,mjhennig/elasticsearch,jango2015/elasticsearch,wangyuxue/elasticsearch,luiseduardohdbackup/elasticsearch,kubum/elasticsearch,petabytedata/elasticsearch,dylan8902/elasticsearch,alexbrasetvik/elasticsearch,iacdingping/elasticsearch,AshishThakur/elasticsearch,wayeast/elasticsearch,TonyChai24/ESSource,zkidkid/elasticsearch,liweinan0423/elasticsearch,abibell/elasticsearch,ThiagoGarciaAlves/elasticsearch,feiqitian/elasticsearch,sc0ttkclark/elasticsearch,overcome/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,girirajsharma/elasticsearch,Rygbee/elasticsearch,tsohil/elasticsearch,xpandan/elasticsearch,myelin/elasticsearch,LewayneNaidoo/elasticsearch,sdauletau/elasticsearch,Uiho/elasticsearch,lmtwga/elasticsearch,Shekharrajak/elasticsearch,scorpionvicky/elasticsearch,ThalaivaStars/OrgRepo1,martinstuga/elasticsearch,vietlq/elasticsearch,henakamaMSFT/elasticsearch,wayeast/elasticsearch,hanswang/elasticsearch,caengcjd/elasticsearch,mcku/elasticsearch,Shekharrajak/elasticsearch,IanvsPoplicola/elasticsearch,areek/elasticsearch,wayeast/elasticsearch,fernandozhu/elasticsearch,MjAbuz/elasticsearch,franklanganke/elasticsearch,LeoYao/elasticsearch,queirozfcom/elasticsearch,nrkkalyan/elasticsearch,phani546/elasticsearch,franklanganke/elasticsearch,StefanGor/elasticsearch,PhaedrusTheGreek/elasticsearch,clintongormley/elasticsearch,alexbrasetvik/elasticsearch,SergVro/elasticsearch,clintongormley/elasticsearch,gfyoung/elasticsearch,sarwarbhuiyan/elasticsearch,mm0/elasticsearch,Helen-Zhao/elasticsearch,slavau/elasticsearch,sc0ttkclark/elasticsearch,Collaborne/elasticsearch,khiraiwa/elasticsearch,koxa29/elasticsearch,GlenRSmith/elasticsearch,caengcjd/elasticsearch,scorpionvicky/elasticsearch,luiseduardohdbackup/elasticsearch,aglne/elasticsearch,HarishAtGitHub/elasticsearch,khiraiwa/elasticsearch,AshishThakur/elasticsearch,xingguang2013/elasticsearch,masaruh/elasticsearch,iamjakob/elasticsearch,anti-social/elasticsearch,umeshdangat/elasticsearch,wimvds/elasticsearch,Collaborne/elasticsearch,lightslife/elasticsearch,HonzaKral/elasticsearch,fekaputra/elasticsearch,springning/elasticsearch,xpandan/elasticsearch,elasticdog/elasticsearch,drewr/elasticsearch,Shepard1212/elasticsearch,MetSystem/elasticsearch,wangtuo/elasticsearch,camilojd/elasticsearch,smflorentino/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,gingerwizard/elasticsearch,phani546/elasticsearch,jsgao0/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,fred84/elasticsearch,jango2015/elasticsearch,phani546/elasticsearch,bestwpw/elasticsearch,nezirus/elasticsearch,scorpionvicky/elasticsearch,wittyameta/elasticsearch,strapdata/elassandra-test,djschny/elasticsearch,MisterAndersen/elasticsearch,rlugojr/elasticsearch,markwalkom/elasticsearch,acchen97/elasticsearch,mbrukman/elasticsearch,ivansun1010/elasticsearch,huypx1292/elasticsearch,apepper/elasticsearch,SergVro/elasticsearch,iamjakob/elasticsearch,easonC/elasticsearch,huanzhong/elasticsearch,rajanm/elasticsearch,Uiho/elasticsearch,Uiho/elasticsearch,Ansh90/elasticsearch,henakamaMSFT/elasticsearch,jimhooker2002/elasticsearch,nilabhsagar/elasticsearch,StefanGor/elasticsearch,mrorii/elasticsearch,Shekharrajak/elasticsearch,ricardocerq/elasticsearch,anti-social/elasticsearch,jpountz/elasticsearch,franklanganke/elasticsearch,EasonYi/elasticsearch,martinstuga/elasticsearch,iantruslove/elasticsearch,anti-social/elasticsearch,smflorentino/elasticsearch,ricardocerq/elasticsearch,fekaputra/elasticsearch,mute/elasticsearch,lzo/elasticsearch-1,pritishppai/elasticsearch,LewayneNaidoo/elasticsearch,elancom/elasticsearch,nellicus/elasticsearch,coding0011/elasticsearch,kingaj/elasticsearch,szroland/elasticsearch,polyfractal/elasticsearch,vroyer/elassandra,mute/elasticsearch,hanst/elasticsearch,mikemccand/elasticsearch,feiqitian/elasticsearch,fekaputra/elasticsearch,pozhidaevak/elasticsearch,markwalkom/elasticsearch,kingaj/elasticsearch,F0lha/elasticsearch,Rygbee/elasticsearch,s1monw/elasticsearch,slavau/elasticsearch,ouyangkongtong/elasticsearch,likaiwalkman/elasticsearch,brandonkearby/elasticsearch,girirajsharma/elasticsearch,khiraiwa/elasticsearch,kaneshin/elasticsearch,pablocastro/elasticsearch,markharwood/elasticsearch,LewayneNaidoo/elasticsearch,jsgao0/elasticsearch,luiseduardohdbackup/elasticsearch,sauravmondallive/elasticsearch,andrestc/elasticsearch,shreejay/elasticsearch,Liziyao/elasticsearch,mortonsykes/elasticsearch,sarwarbhuiyan/elasticsearch,trangvh/elasticsearch,shreejay/elasticsearch,mcku/elasticsearch,nrkkalyan/elasticsearch,glefloch/elasticsearch,acchen97/elasticsearch,NBSW/elasticsearch,acchen97/elasticsearch,iamjakob/elasticsearch,ThalaivaStars/OrgRepo1,zhiqinghuang/elasticsearch,mjhennig/elasticsearch,btiernay/elasticsearch,schonfeld/elasticsearch,djschny/elasticsearch,qwerty4030/elasticsearch,HarishAtGitHub/elasticsearch,springning/elasticsearch,ImpressTV/elasticsearch,drewr/elasticsearch,hirdesh2008/elasticsearch,F0lha/elasticsearch,linglaiyao1314/elasticsearch,kubum/elasticsearch,vingupta3/elasticsearch,vroyer/elasticassandra,sc0ttkclark/elasticsearch,mapr/elasticsearch,sreeramjayan/elasticsearch,likaiwalkman/elasticsearch,btiernay/elasticsearch,nomoa/elasticsearch,HarishAtGitHub/elasticsearch,rmuir/elasticsearch,F0lha/elasticsearch,tsohil/elasticsearch,kimimj/elasticsearch,wangtuo/elasticsearch,fforbeck/elasticsearch,zhiqinghuang/elasticsearch,ThiagoGarciaAlves/elasticsearch,glefloch/elasticsearch,truemped/elasticsearch,huanzhong/elasticsearch,rmuir/elasticsearch,awislowski/elasticsearch,tsohil/elasticsearch,khiraiwa/elasticsearch,alexkuk/elasticsearch,cnfire/elasticsearch-1,szroland/elasticsearch,wangtuo/elasticsearch,cwurm/elasticsearch,fooljohnny/elasticsearch,strapdata/elassandra5-rc,weipinghe/elasticsearch,Rygbee/elasticsearch,wbowling/elasticsearch,hanswang/elasticsearch,djschny/elasticsearch,ydsakyclguozi/elasticsearch,Charlesdong/elasticsearch,szroland/elasticsearch,masterweb121/elasticsearch,zeroctu/elasticsearch,nazarewk/elasticsearch,sc0ttkclark/elasticsearch,xuzha/elasticsearch,ZTE-PaaS/elasticsearch,umeshdangat/elasticsearch,pritishppai/elasticsearch,ImpressTV/elasticsearch,feiqitian/elasticsearch,jeteve/elasticsearch,Stacey-Gammon/elasticsearch,rajanm/elasticsearch,onegambler/elasticsearch,MaineC/elasticsearch,kalimatas/elasticsearch,golubev/elasticsearch,vrkansagara/elasticsearch,bestwpw/elasticsearch,weipinghe/elasticsearch,snikch/elasticsearch,camilojd/elasticsearch,wittyameta/elasticsearch,EasonYi/elasticsearch,JSCooke/elasticsearch,zkidkid/elasticsearch,i-am-Nathan/elasticsearch,kalimatas/elasticsearch,henakamaMSFT/elasticsearch,a2lin/elasticsearch,skearns64/elasticsearch,MjAbuz/elasticsearch,AndreKR/elasticsearch,shreejay/elasticsearch,jaynblue/elasticsearch,areek/elasticsearch,tahaemin/elasticsearch,jw0201/elastic,btiernay/elasticsearch,alexshadow007/elasticsearch,fooljohnny/elasticsearch,MichaelLiZhou/elasticsearch,huanzhong/elasticsearch,masterweb121/elasticsearch,hanswang/elasticsearch,jw0201/elastic,infusionsoft/elasticsearch,iacdingping/elasticsearch,AndreKR/elasticsearch,alexshadow007/elasticsearch,achow/elasticsearch,likaiwalkman/elasticsearch,hanswang/elasticsearch,milodky/elasticsearch,alexkuk/elasticsearch,truemped/elasticsearch,cwurm/elasticsearch,TonyChai24/ESSource,martinstuga/elasticsearch,springning/elasticsearch,lzo/elasticsearch-1,dpursehouse/elasticsearch,xpandan/elasticsearch,rmuir/elasticsearch,JackyMai/elasticsearch,springning/elasticsearch,mmaracic/elasticsearch,uschindler/elasticsearch,yuy168/elasticsearch,gingerwizard/elasticsearch,camilojd/elasticsearch,Siddartha07/elasticsearch,btiernay/elasticsearch,knight1128/elasticsearch,KimTaehee/elasticsearch,davidvgalbraith/elasticsearch,thecocce/elasticsearch,luiseduardohdbackup/elasticsearch,sdauletau/elasticsearch,wuranbo/elasticsearch,jimhooker2002/elasticsearch,sc0ttkclark/elasticsearch,wittyameta/elasticsearch,ThiagoGarciaAlves/elasticsearch,tsohil/elasticsearch,thecocce/elasticsearch,robin13/elasticsearch,hydro2k/elasticsearch,alexkuk/elasticsearch,achow/elasticsearch,pritishppai/elasticsearch,infusionsoft/elasticsearch,lzo/elasticsearch-1,fred84/elasticsearch,skearns64/elasticsearch,apepper/elasticsearch,martinstuga/elasticsearch,mkis-/elasticsearch,feiqitian/elasticsearch,Flipkart/elasticsearch,schonfeld/elasticsearch,rento19962/elasticsearch,markwalkom/elasticsearch,lzo/elasticsearch-1,hanst/elasticsearch,KimTaehee/elasticsearch,yuy168/elasticsearch,nazarewk/elasticsearch,franklanganke/elasticsearch,strapdata/elassandra,slavau/elasticsearch,phani546/elasticsearch,strapdata/elassandra,schonfeld/elasticsearch,scottsom/elasticsearch,yynil/elasticsearch,smflorentino/elasticsearch,jeteve/elasticsearch,codebunt/elasticsearch,Flipkart/elasticsearch,ImpressTV/elasticsearch,gingerwizard/elasticsearch,codebunt/elasticsearch,nezirus/elasticsearch,golubev/elasticsearch,anti-social/elasticsearch,geidies/elasticsearch,MichaelLiZhou/elasticsearch,qwerty4030/elasticsearch,MichaelLiZhou/elasticsearch,caengcjd/elasticsearch,sposam/elasticsearch,kubum/elasticsearch,codebunt/elasticsearch,Charlesdong/elasticsearch,petabytedata/elasticsearch,xingguang2013/elasticsearch,overcome/elasticsearch,Widen/elasticsearch,spiegela/elasticsearch,Chhunlong/elasticsearch,Uiho/elasticsearch,MjAbuz/elasticsearch,mkis-/elasticsearch,mgalushka/elasticsearch,wimvds/elasticsearch,girirajsharma/elasticsearch,mjason3/elasticsearch,kalburgimanjunath/elasticsearch,scorpionvicky/elasticsearch,ImpressTV/elasticsearch,achow/elasticsearch,kcompher/elasticsearch,xingguang2013/elasticsearch,gingerwizard/elasticsearch,Brijeshrpatel9/elasticsearch,lchennup/elasticsearch,tsohil/elasticsearch,obourgain/elasticsearch,adrianbk/elasticsearch,kubum/elasticsearch,jsgao0/elasticsearch,iacdingping/elasticsearch,lmtwga/elasticsearch,spiegela/elasticsearch,naveenhooda2000/elasticsearch,mnylen/elasticsearch,hanswang/elasticsearch,girirajsharma/elasticsearch,vroyer/elasticassandra,ThalaivaStars/OrgRepo1,adrianbk/elasticsearch,nazarewk/elasticsearch,skearns64/elasticsearch,ivansun1010/elasticsearch,nomoa/elasticsearch,lmtwga/elasticsearch,rajanm/elasticsearch,humandb/elasticsearch,Liziyao/elasticsearch,rhoml/elasticsearch,ouyangkongtong/elasticsearch,polyfractal/elasticsearch,PhaedrusTheGreek/elasticsearch,golubev/elasticsearch,scottsom/elasticsearch,rento19962/elasticsearch,hafkensite/elasticsearch,zeroctu/elasticsearch,iacdingping/elasticsearch,cnfire/elasticsearch-1,JervyShi/elasticsearch,mute/elasticsearch,amit-shar/elasticsearch,strapdata/elassandra5-rc,amit-shar/elasticsearch,Fsero/elasticsearch,alexbrasetvik/elasticsearch,obourgain/elasticsearch,episerver/elasticsearch,chirilo/elasticsearch,alexbrasetvik/elasticsearch,humandb/elasticsearch,snikch/elasticsearch,mjhennig/elasticsearch,markharwood/elasticsearch,phani546/elasticsearch,mcku/elasticsearch,JervyShi/elasticsearch,likaiwalkman/elasticsearch,mgalushka/elasticsearch,vvcephei/elasticsearch,rento19962/elasticsearch,kaneshin/elasticsearch,henakamaMSFT/elasticsearch,btiernay/elasticsearch,PhaedrusTheGreek/elasticsearch,tkssharma/elasticsearch,huanzhong/elasticsearch,Charlesdong/elasticsearch,maddin2016/elasticsearch,masaruh/elasticsearch,andrejserafim/elasticsearch,vingupta3/elasticsearch,amit-shar/elasticsearch,Siddartha07/elasticsearch,infusionsoft/elasticsearch,a2lin/elasticsearch,diendt/elasticsearch,amaliujia/elasticsearch,mgalushka/elasticsearch,robin13/elasticsearch,jchampion/elasticsearch,mjhennig/elasticsearch,nknize/elasticsearch,sneivandt/elasticsearch,rajanm/elasticsearch,rlugojr/elasticsearch,jw0201/elastic,petabytedata/elasticsearch,palecur/elasticsearch,robin13/elasticsearch,palecur/elasticsearch,knight1128/elasticsearch,lchennup/elasticsearch,hydro2k/elasticsearch,ricardocerq/elasticsearch,lks21c/elasticsearch,himanshuag/elasticsearch,Uiho/elasticsearch,markllama/elasticsearch,sposam/elasticsearch,dylan8902/elasticsearch,zhiqinghuang/elasticsearch,tkssharma/elasticsearch,mohit/elasticsearch,nomoa/elasticsearch,diendt/elasticsearch,iamjakob/elasticsearch,sposam/elasticsearch,AshishThakur/elasticsearch,infusionsoft/elasticsearch,fekaputra/elasticsearch,naveenhooda2000/elasticsearch,Stacey-Gammon/elasticsearch,Shekharrajak/elasticsearch,mikemccand/elasticsearch,kcompher/elasticsearch,vietlq/elasticsearch,wenpos/elasticsearch,karthikjaps/elasticsearch,zhiqinghuang/elasticsearch,sposam/elasticsearch,mohit/elasticsearch,vingupta3/elasticsearch,zhiqinghuang/elasticsearch,kunallimaye/elasticsearch,pritishppai/elasticsearch,acchen97/elasticsearch,PhaedrusTheGreek/elasticsearch,EasonYi/elasticsearch,dataduke/elasticsearch,kenshin233/elasticsearch,iamjakob/elasticsearch,NBSW/elasticsearch,ZTE-PaaS/elasticsearch,StefanGor/elasticsearch,kcompher/elasticsearch,milodky/elasticsearch,pranavraman/elasticsearch,linglaiyao1314/elasticsearch,beiske/elasticsearch,franklanganke/elasticsearch,hirdesh2008/elasticsearch,areek/elasticsearch,linglaiyao1314/elasticsearch,jeteve/elasticsearch,abibell/elasticsearch,hechunwen/elasticsearch,mjhennig/elasticsearch,hechunwen/elasticsearch,nellicus/elasticsearch,HonzaKral/elasticsearch,acchen97/elasticsearch,clintongormley/elasticsearch,ThiagoGarciaAlves/elasticsearch,dataduke/elasticsearch,hanswang/elasticsearch,nknize/elasticsearch,polyfractal/elasticsearch,YosuaMichael/elasticsearch,sreeramjayan/elasticsearch,tkssharma/elasticsearch,btiernay/elasticsearch,diendt/elasticsearch,lmtwga/elasticsearch,vietlq/elasticsearch,sauravmondallive/elasticsearch,NBSW/elasticsearch,JSCooke/elasticsearch,pablocastro/elasticsearch,vingupta3/elasticsearch,cwurm/elasticsearch,jaynblue/elasticsearch,mgalushka/elasticsearch,mortonsykes/elasticsearch,MichaelLiZhou/elasticsearch,sarwarbhuiyan/elasticsearch,iacdingping/elasticsearch,JSCooke/elasticsearch,amaliujia/elasticsearch,uschindler/elasticsearch,Shekharrajak/elasticsearch,socialrank/elasticsearch,kubum/elasticsearch,kcompher/elasticsearch,koxa29/elasticsearch,abibell/elasticsearch,adrianbk/elasticsearch,henakamaMSFT/elasticsearch,drewr/elasticsearch,thecocce/elasticsearch,JervyShi/elasticsearch,lks21c/elasticsearch,javachengwc/elasticsearch,onegambler/elasticsearch,alexkuk/elasticsearch,huanzhong/elasticsearch,beiske/elasticsearch,bestwpw/elasticsearch,umeshdangat/elasticsearch,tebriel/elasticsearch,Clairebi/ElasticsearchClone,jprante/elasticsearch,maddin2016/elasticsearch,camilojd/elasticsearch,kenshin233/elasticsearch,lks21c/elasticsearch,dataduke/elasticsearch,scorpionvicky/elasticsearch,ESamir/elasticsearch,mrorii/elasticsearch,ImpressTV/elasticsearch,tebriel/elasticsearch,acchen97/elasticsearch,Brijeshrpatel9/elasticsearch,gmarz/elasticsearch,truemped/elasticsearch,slavau/elasticsearch,tkssharma/elasticsearch,achow/elasticsearch,Fsero/elasticsearch,lzo/elasticsearch-1,ckclark/elasticsearch,kaneshin/elasticsearch,mapr/elasticsearch,socialrank/elasticsearch,kcompher/elasticsearch,vietlq/elasticsearch,i-am-Nathan/elasticsearch,Kakakakakku/elasticsearch,F0lha/elasticsearch,knight1128/elasticsearch,huypx1292/elasticsearch,djschny/elasticsearch,yynil/elasticsearch,dpursehouse/elasticsearch,elancom/elasticsearch,mute/elasticsearch,hanst/elasticsearch,mapr/elasticsearch,hafkensite/elasticsearch,vvcephei/elasticsearch,feiqitian/elasticsearch,jaynblue/elasticsearch,jsgao0/elasticsearch,Brijeshrpatel9/elasticsearch,smflorentino/elasticsearch,ulkas/elasticsearch,himanshuag/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,nezirus/elasticsearch,qwerty4030/elasticsearch,petabytedata/elasticsearch,vvcephei/elasticsearch,nilabhsagar/elasticsearch,YosuaMichael/elasticsearch,rmuir/elasticsearch,JackyMai/elasticsearch,ThalaivaStars/OrgRepo1,jprante/elasticsearch,alexkuk/elasticsearch,vrkansagara/elasticsearch,episerver/elasticsearch,MichaelLiZhou/elasticsearch,brandonkearby/elasticsearch,mjason3/elasticsearch,overcome/elasticsearch,YosuaMichael/elasticsearch,i-am-Nathan/elasticsearch,petabytedata/elasticsearch,naveenhooda2000/elasticsearch,easonC/elasticsearch,mkis-/elasticsearch,bawse/elasticsearch,winstonewert/elasticsearch,khiraiwa/elasticsearch,kenshin233/elasticsearch,YosuaMichael/elasticsearch,elasticdog/elasticsearch,kunallimaye/elasticsearch,KimTaehee/elasticsearch,xuzha/elasticsearch,myelin/elasticsearch,lchennup/elasticsearch,tahaemin/elasticsearch,hechunwen/elasticsearch,kcompher/elasticsearch,ydsakyclguozi/elasticsearch,fooljohnny/elasticsearch,jbertouch/elasticsearch,nellicus/elasticsearch,tebriel/elasticsearch,kalburgimanjunath/elasticsearch,Fsero/elasticsearch,anti-social/elasticsearch,awislowski/elasticsearch,strapdata/elassandra,petabytedata/elasticsearch,wuranbo/elasticsearch,nazarewk/elasticsearch,jimczi/elasticsearch,queirozfcom/elasticsearch | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.junit.rule;
import org.elasticsearch.common.logging.ESLogger;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
/**
* A helper rule to catch all BindTransportExceptions
* and rerun the test for a configured number of times
*
* Note: Be aware, that when a test is repeated, the @After and @Before
* annotated methods are not run a second time
*
*/
public class RepeatOnExceptionRule implements TestRule {
private ESLogger logger;
private int retryCount;
private Class expectedException;
/**
*
* @param logger the es logger from the test class
* @param retryCount number of amounts to try a single test before failing
* @param expectedException The exception class you want to catch
*
*/
public RepeatOnExceptionRule(ESLogger logger, int retryCount, Class expectedException) {
this.logger = logger;
this.retryCount = retryCount;
this.expectedException = expectedException;
}
@Override
public Statement apply(final Statement base, Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
Throwable caughtThrowable = null;
for (int i = 0; i < retryCount; i++) {
try {
base.evaluate();
return;
} catch (Throwable t) {
if (t.getClass().equals(expectedException)) {
caughtThrowable = t;
logger.info("Exception [{}] occurred, rerunning the test after [{}] failures", t, t.getClass().getSimpleName(), i+1);
} else {
throw t;
}
}
}
logger.error("Giving up after [{}] failures... marking test as failed", retryCount);
throw caughtThrowable;
}
};
}
}
| src/test/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.junit.rule;
import org.elasticsearch.common.logging.ESLogger;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
/**
* A helper rule to catch all BindTransportExceptions
* and rerun the test for a configured number of times
*
* Note: Be aware, that when a test is repeated, the @After and @Before
* annotated methods are not run a second time
*
*/
public class RepeatOnExceptionRule implements TestRule {
private ESLogger logger;
private int retryCount;
private Class expectedException;
/**
*
* @param logger the es logger from the test class
* @param retryCount number of amounts to try a single test before failing
* @param expectedException The exception class you want to catch
*
*/
public RepeatOnExceptionRule(ESLogger logger, int retryCount, Class expectedException) {
this.logger = logger;
this.retryCount = retryCount;
this.expectedException = expectedException;
}
@Override
public Statement apply(final Statement base, Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
Throwable caughtThrowable = null;
for (int i = 0; i < retryCount; i++) {
try {
base.evaluate();
return;
} catch (Throwable t) {
if (t.getClass().equals(expectedException)) {
caughtThrowable = t;
logger.info("Exception [{}] occurred, rerunning the test after [{}] failures", t, t.getClass().getSimpleName(), i+1);
}
}
}
logger.error("Giving up after [{}] failures... marking test as failed", retryCount);
throw caughtThrowable;
}
};
}
}
| [TEST] RepeatOnExceptionRule to rethrow unexpected exception
In case an exception was caught by the repeat rule, the retry mechanism would kick in only if the exception was the expected one. If not an NPE got thrown, while we should rather just bubble it up to the caller. This makes `NettyTransportMultiPortTests` run from a plane. An assumption would kick in to make sure that the test gets ignored but the `AssumptionViolationException` was caught and not properly re-thrown.
| src/test/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java | [TEST] RepeatOnExceptionRule to rethrow unexpected exception |
|
Java | apache-2.0 | 6e28b6cb2d41e32c46a0f3dc94e9a66cf39ac28f | 0 | mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs | /*
* Copyright 2002-2018 Barcelona Supercomputing Center (www.bsc.es)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package es.bsc.compss.executor.utils;
import es.bsc.compss.log.Loggers;
import es.bsc.compss.types.execution.InvocationContext;
import es.bsc.compss.types.execution.exceptions.UnsufficientAvailableComputingUnitsException;
import es.bsc.compss.types.resources.ResourceDescription;
import es.bsc.compss.executor.Executor;
import es.bsc.compss.executor.ExecutorContext;
import es.bsc.compss.executor.external.ExecutionPlatformMirror;
import es.bsc.compss.executor.types.Execution;
import es.bsc.compss.executor.utils.ResourceManager.InvocationResources;
import es.bsc.compss.invokers.util.JobQueue;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Semaphore;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.TreeSet;
/**
* The thread pool is an utility to manage a set of threads for job execution
*/
public class ExecutionPlatform implements ExecutorContext {
private static final Logger LOGGER = LogManager.getLogger(Loggers.WORKER_POOL);
private final String platformName;
private final InvocationContext context;
private final ResourceManager rm;
private final JobQueue queue;
private boolean started = false;
private int nextThreadId = 0;
private final TreeSet<Thread> workerThreads;
private final LinkedList<Thread> finishedWorkerThreads;
private final Semaphore startSemaphore;
private final Semaphore stopSemaphore;
private final Map<Class<?>, ExecutionPlatformMirror> mirrors;
/**
* Constructs a new thread pool but not the threads inside it.
*
* @param platformName
* @param context
* @param initialSize
* @param resManager
*/
public ExecutionPlatform(String platformName, InvocationContext context, int initialSize, ResourceManager resManager) {
LOGGER.info("Initializing execution platform " + platformName);
this.platformName = platformName;
this.context = context;
this.rm = resManager;
this.mirrors = new HashMap<>();
// Make system properties local to each thread
System.setProperties(new ThreadedProperties(System.getProperties()));
// Instantiate the message queue and the stop semaphore
this.queue = new JobQueue();
this.startSemaphore = new Semaphore(0);
this.stopSemaphore = new Semaphore(0);
// Instantiate worker thread structure
this.workerThreads = new TreeSet<>(new Comparator<Thread>() {
@Override
public int compare(Thread t1, Thread t2) {
return Long.compare(t1.getId(), t2.getId());
}
});
this.finishedWorkerThreads = new LinkedList<>();
addWorkerThreads(initialSize);
}
/**
* Adds a new task to the queue
*
* @param exec
*/
public void execute(Execution exec) {
this.queue.enqueue(exec);
}
/**
* Creates and starts the threads of the pool and waits until they are created
*
*
*/
public final synchronized void start() {
LOGGER.info("Starting execution platform " + this.platformName);
// Start is in inverse order so that Thread 1 is the last available
for (Thread t : this.workerThreads.descendingSet()) {
t.start();
}
int size = this.workerThreads.size();
this.startSemaphore.acquireUninterruptibly(size);
this.started = true;
LOGGER.info("Started execution platform " + this.platformName + " with " + size);
}
/**
* Stops all the threads. Inserts as many null objects to the queue as threads are managed. It wakes up all the
* threads and wait until they process the null objects inserted which will stop them.
*/
public final synchronized void stop() {
LOGGER.info("Stopping execution platform " + this.platformName);
/*
* Empty queue to discard any pending requests and make threads finish
*/
int size = this.workerThreads.size();
removeWorkerThreads(size);
LOGGER.info("Stopping mirrors for execution platform " + this.platformName);
for (ExecutionPlatformMirror mirror : this.mirrors.values()) {
mirror.stop();
}
mirrors.clear();
started = false;
LOGGER.info("Stopped execution platform " + this.platformName);
}
public final synchronized void addWorkerThreads(int numWorkerThreads) {
Semaphore startSem;
if (started) {
startSem = new Semaphore(numWorkerThreads);
} else {
startSem = this.startSemaphore;
}
for (int i = 0; i < numWorkerThreads; i++) {
int id = nextThreadId++;
Executor executor = new Executor(context, this, "compute" + id) {
@Override
public void run() {
startSem.release();
super.run();
synchronized (ExecutionPlatform.this.finishedWorkerThreads) {
ExecutionPlatform.this.finishedWorkerThreads.add(Thread.currentThread());
}
ExecutionPlatform.this.stopSemaphore.release();
}
};
Thread t = new Thread(executor);
t.setName(platformName + " compute thread # " + id);
workerThreads.add(t);
if (started) {
t.start();
}
}
if (started) {
startSem.acquireUninterruptibly(numWorkerThreads);
}
}
public synchronized final void removeWorkerThreads(int numWorkerThreads) {
LOGGER.info("Stopping " + numWorkerThreads + " executors from execution platform " + this.platformName);
//Request N threads to finish
for (int i = 0; i < numWorkerThreads; i++) {
this.queue.enqueue(null);
}
this.queue.wakeUpAll();
// Wait until all threads have completed their last request
this.stopSemaphore.acquireUninterruptibly(numWorkerThreads);
// Stop specific language components
joinThreads();
LOGGER.info("Stopped " + numWorkerThreads + " executors from execution platform " + this.platformName);
}
private void joinThreads() {
Iterator<Thread> iter = this.finishedWorkerThreads.iterator();
while (iter.hasNext()) {
Thread t = iter.next();
if (t != null) {
try {
t.join();
iter.remove();
this.workerThreads.remove(t);
t = null;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
// For tracing
Runtime.getRuntime().gc();
}
@Override
public int getSize() {
return this.workerThreads.size();
}
@Override
public Execution getJob() {
return this.queue.dequeue();
}
@Override
public InvocationResources acquireResources(int jobId, ResourceDescription requirements)
throws UnsufficientAvailableComputingUnitsException {
return this.rm.acquireResources(jobId, requirements);
}
@Override
public void releaseResources(int jobId) {
this.rm.releaseResources(jobId);
}
@Override
public ExecutionPlatformMirror getMirror(Class<?> invoker) {
return this.mirrors.get(invoker);
}
@Override
public void registerMirror(Class<?> invoker, ExecutionPlatformMirror mirror) {
this.mirrors.put(invoker, mirror);
}
@Override
public Collection<ExecutionPlatformMirror> getMirrors() {
return mirrors.values();
}
}
| compss/runtime/adaptors/execution/src/main/java/es/bsc/compss/executor/utils/ExecutionPlatform.java | /*
* Copyright 2002-2018 Barcelona Supercomputing Center (www.bsc.es)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package es.bsc.compss.executor.utils;
import es.bsc.compss.log.Loggers;
import es.bsc.compss.types.execution.InvocationContext;
import es.bsc.compss.types.execution.exceptions.UnsufficientAvailableComputingUnitsException;
import es.bsc.compss.types.resources.ResourceDescription;
import es.bsc.compss.executor.Executor;
import es.bsc.compss.executor.ExecutorContext;
import es.bsc.compss.executor.external.ExecutionPlatformMirror;
import es.bsc.compss.executor.types.Execution;
import es.bsc.compss.executor.utils.ResourceManager.InvocationResources;
import es.bsc.compss.invokers.util.JobQueue;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Semaphore;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.TreeSet;
/**
* The thread pool is an utility to manage a set of threads for job execution
*/
public class ExecutionPlatform implements ExecutorContext {
private static final Logger LOGGER = LogManager.getLogger(Loggers.WORKER_POOL);
private final String platformName;
private final InvocationContext context;
private final ResourceManager rm;
private final JobQueue queue;
private boolean started = false;
private int nextThreadId = 0;
private final TreeSet<Thread> workerThreads;
private final LinkedList<Thread> finishedWorkerThreads;
private final Semaphore startSemaphore;
private final Semaphore stopSemaphore;
private final Map<Class<?>, ExecutionPlatformMirror> mirrors;
/**
* Constructs a new thread pool but not the threads inside it.
*
* @param platformName
* @param context
* @param initialSize
* @param resManager
*/
public ExecutionPlatform(String platformName, InvocationContext context, int initialSize, ResourceManager resManager) {
LOGGER.info("Initializing execution platform " + platformName);
this.platformName = platformName;
this.context = context;
this.rm = resManager;
this.mirrors = new HashMap<>();
// Make system properties local to each thread
System.setProperties(new ThreadedProperties(System.getProperties()));
// Instantiate the message queue and the stop semaphore
this.queue = new JobQueue();
this.startSemaphore = new Semaphore(0);
this.stopSemaphore = new Semaphore(0);
// Instantiate worker thread structure
this.workerThreads = new TreeSet<>(new Comparator<Thread>() {
@Override
public int compare(Thread t1, Thread t2) {
return Long.compare(t1.getId(), t2.getId());
}
});
this.finishedWorkerThreads = new LinkedList<>();
addWorkerThreads(initialSize);
}
/**
* Adds a new task to the queue
*
* @param exec
*/
public void execute(Execution exec) {
this.queue.enqueue(exec);
}
/**
* Creates and starts the threads of the pool and waits until they are created
*
*
*/
public final synchronized void start() {
LOGGER.info("Starting execution platform " + this.platformName);
// Start is in inverse order so that Thread 1 is the last available
for (Thread t : this.workerThreads.descendingSet()) {
t.start();
}
int size = this.workerThreads.size();
this.startSemaphore.acquireUninterruptibly(size);
this.started = true;
LOGGER.info("Started execution platform " + this.platformName + " with " + size);
}
/**
* Stops all the threads. Inserts as many null objects to the queue as threads are managed. It wakes up all the
* threads and wait until they process the null objects inserted which will stop them.
*/
public final synchronized void stop() {
LOGGER.info("Stopping execution platform " + this.platformName);
/*
* Empty queue to discard any pending requests and make threads finish
*/
int size = this.workerThreads.size();
removeWorkerThreads(size);
LOGGER.info("Stopping mirrors for execution platform " + this.platformName);
for (ExecutionPlatformMirror mirror : this.mirrors.values()) {
mirror.stop();
}
mirrors.clear();
started = false;
LOGGER.info("Stopped execution platform " + this.platformName);
}
public final synchronized void addWorkerThreads(int numWorkerThreads) {
Semaphore startSem;
if (started) {
startSem = new Semaphore(numWorkerThreads);
} else {
startSem = this.startSemaphore;
}
for (int i = 0; i < numWorkerThreads; i++) {
int id = nextThreadId++;
Executor executor = new Executor(context, this, "compute" + id) {
@Override
public void run() {
startSem.release();
super.run();
ExecutionPlatform.this.finishedWorkerThreads.add(Thread.currentThread());
ExecutionPlatform.this.stopSemaphore.release();
}
};
Thread t = new Thread(executor);
t.setName(platformName + " compute thread # " + id);
workerThreads.add(t);
if (started) {
t.start();
}
}
if (started) {
startSem.acquireUninterruptibly(numWorkerThreads);
}
}
public synchronized final void removeWorkerThreads(int numWorkerThreads) {
LOGGER.info("Stopping " + numWorkerThreads + " executors from execution platform " + this.platformName);
//Request N threads to finish
for (int i = 0; i < numWorkerThreads; i++) {
this.queue.enqueue(null);
}
this.queue.wakeUpAll();
// Wait until all threads have completed their last request
this.stopSemaphore.acquireUninterruptibly(numWorkerThreads);
// Stop specific language components
joinThreads();
LOGGER.info("Stopped " + numWorkerThreads + " executors from execution platform " + this.platformName);
}
private void joinThreads() {
Iterator<Thread> iter = this.finishedWorkerThreads.iterator();
while (iter.hasNext()) {
Thread t = iter.next();
if (t != null) {
try {
t.join();
iter.remove();
this.workerThreads.remove(t);
t = null;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
// For tracing
Runtime.getRuntime().gc();
}
@Override
public int getSize() {
return this.workerThreads.size();
}
@Override
public Execution getJob() {
return this.queue.dequeue();
}
@Override
public InvocationResources acquireResources(int jobId, ResourceDescription requirements)
throws UnsufficientAvailableComputingUnitsException {
return this.rm.acquireResources(jobId, requirements);
}
@Override
public void releaseResources(int jobId) {
this.rm.releaseResources(jobId);
}
@Override
public ExecutionPlatformMirror getMirror(Class<?> invoker) {
return this.mirrors.get(invoker);
}
@Override
public void registerMirror(Class<?> invoker, ExecutionPlatformMirror mirror) {
this.mirrors.put(invoker, mirror);
}
@Override
public Collection<ExecutionPlatformMirror> getMirrors() {
return mirrors.values();
}
}
| Dynamic Executor Bugfix
| compss/runtime/adaptors/execution/src/main/java/es/bsc/compss/executor/utils/ExecutionPlatform.java | Dynamic Executor Bugfix |
|
Java | apache-2.0 | 96317bdae38c9bea91c2ba32da14ace3272c0adb | 0 | tagbangers/wallride,tagbangers/wallride,tagbangers/wallride | /*
* Copyright 2014 Tagbangers, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wallride.service;
import org.apache.commons.lang.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.validation.BeanPropertyBindingResult;
import org.springframework.validation.BindException;
import org.springframework.validation.BindingResult;
import org.springframework.validation.MessageCodesResolver;
import org.wallride.autoconfigure.WallRideCacheConfiguration;
import org.wallride.autoconfigure.WallRideProperties;
import org.wallride.domain.*;
import org.wallride.exception.DuplicateCodeException;
import org.wallride.exception.EmptyCodeException;
import org.wallride.exception.NotNullException;
import org.wallride.exception.ServiceException;
import org.wallride.model.*;
import org.wallride.repository.*;
import org.wallride.support.AuthorizedUser;
import org.wallride.support.CodeFormatter;
import org.wallride.web.controller.admin.article.CustomFieldValueEditForm;
import javax.annotation.Resource;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import java.text.ParseException;
import java.time.LocalDateTime;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Service
@Transactional(rollbackFor=Exception.class)
public class ArticleService {
@Resource
private BlogService blogService;
@Resource
private PostRepository postRepository;
@Resource
private ArticleRepository articleRepository;
@Resource
private TagRepository tagRepository;
@Resource
private MediaRepository mediaRepository;
@Inject
private MessageCodesResolver messageCodesResolver;
@Inject
private PlatformTransactionManager transactionManager;
@Inject
private WallRideProperties wallRideProperties;
@PersistenceContext
private EntityManager entityManager;
private static Logger logger = LoggerFactory.getLogger(ArticleService.class);
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article createArticle(ArticleCreateRequest request, Post.Status status, AuthorizedUser authorizedUser) {
LocalDateTime now = LocalDateTime.now();
String code = request.getCode();
if (code == null) {
try {
code = new CodeFormatter().parse(request.getTitle(), LocaleContextHolder.getLocale());
} catch (ParseException e) {
throw new ServiceException(e);
}
}
if (!StringUtils.hasText(code)) {
if (!status.equals(Post.Status.DRAFT)) {
throw new EmptyCodeException();
}
}
if (!status.equals(Post.Status.DRAFT)) {
Post duplicate = postRepository.findOneByCodeAndLanguage(code, request.getLanguage());
if (duplicate != null) {
throw new DuplicateCodeException(code);
}
}
Article article = new Article();
if (!status.equals(Post.Status.DRAFT)) {
article.setCode(code);
article.setDraftedCode(null);
}
else {
article.setCode(null);
article.setDraftedCode(code);
}
Media cover = null;
if (request.getCoverId() != null) {
cover = entityManager.getReference(Media.class, request.getCoverId());
}
article.setCover(cover);
article.setTitle(request.getTitle());
article.setBody(request.getBody());
article.setAuthor(entityManager.getReference(User.class, authorizedUser.getId()));
LocalDateTime date = request.getDate();
if (Post.Status.PUBLISHED.equals(status)) {
if (date == null) {
date = now;
}
else if (date.isAfter(now)) {
status = Post.Status.SCHEDULED;
}
}
article.setDate(date);
article.setStatus(status);
article.setLanguage(request.getLanguage());
article.getCategories().clear();
for (long categoryId : request.getCategoryIds()) {
article.getCategories().add(entityManager.getReference(Category.class, categoryId));
}
article.getTags().clear();
Set<String> tagNames = StringUtils.commaDelimitedListToSet(request.getTags());
if (!CollectionUtils.isEmpty(tagNames)) {
for (String tagName : tagNames) {
Tag tag = tagRepository.findOneForUpdateByNameAndLanguage(tagName, request.getLanguage());
if (tag == null) {
tag = new Tag();
tag.setName(tagName);
tag.setLanguage(request.getLanguage());
article.setCreatedAt(now);
article.setCreatedBy(authorizedUser.toString());
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
tag = tagRepository.saveAndFlush(tag);
}
article.getTags().add(tag);
}
}
article.getRelatedPosts().clear();
Set<Post> relatedPosts = new HashSet<>();
for (long relatedId : request.getRelatedPostIds()) {
relatedPosts.add(entityManager.getReference(Post.class, relatedId));
}
article.setRelatedToPosts(relatedPosts);
Seo seo = new Seo();
seo.setTitle(request.getSeoTitle());
seo.setDescription(request.getSeoDescription());
seo.setKeywords(request.getSeoKeywords());
article.setSeo(seo);
List<Media> medias = new ArrayList<>();
if (StringUtils.hasText(request.getBody())) {
// Blog blog = blogService.getBlogById(Blog.DEFAULT_ID);
String mediaUrlPrefix = wallRideProperties.getMediaUrlPrefix();
Pattern mediaUrlPattern = Pattern.compile(String.format("%s([0-9a-zA-Z\\-]+)", mediaUrlPrefix));
Matcher mediaUrlMatcher = mediaUrlPattern.matcher(request.getBody());
while (mediaUrlMatcher.find()) {
Media media = mediaRepository.findOneById(mediaUrlMatcher.group(1));
medias.add(media);
}
}
article.setMedias(medias);
article.setCreatedAt(now);
article.setCreatedBy(authorizedUser.toString());
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
article.getCustomFieldValues().clear();
if (!CollectionUtils.isEmpty(request.getCustomFieldValues())) {
for (CustomFieldValueEditForm valueForm : request.getCustomFieldValues()) {
CustomFieldValue value = new CustomFieldValue();
value.setCustomField(entityManager.getReference(CustomField.class, valueForm.getCustomFieldId()));
value.setPost(article);
if (valueForm.getFieldType().equals(CustomField.FieldType.CHECKBOX)) {
if (!ArrayUtils.isEmpty(valueForm.getTextValues())) {
value.setTextValue(String.join(",", valueForm.getTextValues()));
} else {
value.setTextValue(null);
}
} else {
value.setTextValue(valueForm.getTextValue());
}
value.setStringValue(valueForm.getStringValue());
value.setNumberValue(valueForm.getNumberValue());
value.setDateValue(valueForm.getDateValue());
value.setDatetimeValue(valueForm.getDatetimeValue());
if (!value.isEmpty()) {
article.getCustomFieldValues().add(value);
}
}
}
return articleRepository.save(article);
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article saveArticleAsDraft(ArticleUpdateRequest request, AuthorizedUser authorizedUser) {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
if (!article.getStatus().equals(Post.Status.DRAFT)) {
Article draft = articleRepository.findOne(ArticleSpecifications.draft(article));
if (draft == null) {
ArticleCreateRequest createRequest = new ArticleCreateRequest.Builder()
.code(request.getCode())
.coverId(request.getCoverId())
.title(request.getTitle())
.body(request.getBody())
.authorId(request.getAuthorId())
.date(request.getDate())
.categoryIds(request.getCategoryIds())
.tags(request.getTags())
.seoTitle(request.getSeoTitle())
.seoDescription(request.getSeoDescription())
.seoKeywords(request.getSeoKeywords())
.customFieldValues(new ArrayList<>(request.getCustomFieldValues()))
.language(request.getLanguage())
.build();
draft = createArticle(createRequest, Post.Status.DRAFT, authorizedUser);
draft.setDrafted(article);
return articleRepository.save(draft);
}
else {
ArticleUpdateRequest updateRequest = new ArticleUpdateRequest.Builder()
.id(draft.getId())
.code(request.getCode())
.coverId(request.getCoverId())
.title(request.getTitle())
.body(request.getBody())
.authorId(request.getAuthorId())
.date(request.getDate())
.categoryIds(request.getCategoryIds())
.tags(request.getTags())
.seoTitle(request.getSeoTitle())
.seoDescription(request.getSeoDescription())
.seoKeywords(request.getSeoKeywords())
.customFieldValues(request.getCustomFieldValues())
.language(request.getLanguage())
.build();
return saveArticle(updateRequest, authorizedUser);
}
}
else {
return saveArticle(request, authorizedUser);
}
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article saveArticleAsPublished(ArticleUpdateRequest request, AuthorizedUser authorizedUser) {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
publishArticle(article);
return saveArticle(request, authorizedUser);
}
private Article publishArticle(Article article) {
Article deleteTarget = getDraftById(article.getId());
if (deleteTarget != null) {
articleRepository.delete(deleteTarget);
}
article.setDrafted(null);
article.setStatus(Post.Status.PUBLISHED);
Article published = articleRepository.save(article);
return published;
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article saveArticleAsUnpublished(ArticleUpdateRequest request, AuthorizedUser authorizedUser) {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
unpublishArticle(article);
return saveArticle(request, authorizedUser);
}
private Article unpublishArticle(Article article) {
Article deleteTarget = getDraftById(article.getId());
if (deleteTarget != null) {
articleRepository.delete(deleteTarget);
}
article.setDrafted(null);
article.setStatus(Post.Status.DRAFT);
Article unpublished = articleRepository.save(article);
return unpublished;
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article saveArticle(ArticleUpdateRequest request, AuthorizedUser authorizedUser) {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
LocalDateTime now = LocalDateTime.now();
String code = request.getCode();
if (code == null) {
try {
code = new CodeFormatter().parse(request.getTitle(), LocaleContextHolder.getLocale());
} catch (ParseException e) {
throw new ServiceException(e);
}
}
if (!StringUtils.hasText(code)) {
if (!article.getStatus().equals(Post.Status.DRAFT)) {
throw new EmptyCodeException();
}
}
if (!article.getStatus().equals(Post.Status.DRAFT)) {
Post duplicate = postRepository.findOneByCodeAndLanguage(code, request.getLanguage());
if (duplicate != null && !duplicate.equals(article)) {
throw new DuplicateCodeException(code);
}
}
if (!article.getStatus().equals(Post.Status.DRAFT)) {
article.setCode(code);
article.setDraftedCode(null);
}
else {
article.setCode(null);
article.setDraftedCode(code);
}
Media cover = null;
if (request.getCoverId() != null) {
cover = entityManager.getReference(Media.class, request.getCoverId());
}
article.setCover(cover);
article.setTitle(request.getTitle());
article.setBody(request.getBody());
// User author = null;
// if (request.getAuthorId() != null) {
// author = entityManager.getReference(User.class, request.getAuthorId());
// }
// article.setAuthor(author);
LocalDateTime date = request.getDate();
if (!Post.Status.DRAFT.equals(article.getStatus())) {
if (date == null) {
date = now;
} else if (date.isAfter(now)) {
article.setStatus(Post.Status.SCHEDULED);
} else {
article.setStatus(Post.Status.PUBLISHED);
}
}
article.setDate(date);
article.setLanguage(request.getLanguage());
article.getCategories().clear();
for (long categoryId : request.getCategoryIds()) {
article.getCategories().add(entityManager.getReference(Category.class, categoryId));
}
article.getTags().clear();
Set<String> tagNames = StringUtils.commaDelimitedListToSet(request.getTags());
if (!CollectionUtils.isEmpty(tagNames)) {
for (String tagName : tagNames) {
Tag tag = tagRepository.findOneForUpdateByNameAndLanguage(tagName, request.getLanguage());
if (tag == null) {
tag = new Tag();
tag.setName(tagName);
tag.setLanguage(request.getLanguage());
article.setCreatedAt(now);
article.setCreatedBy(authorizedUser.toString());
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
tag = tagRepository.saveAndFlush(tag);
}
article.getTags().add(tag);
}
}
article.getRelatedPosts().clear();
Set<Post> relatedPosts = new HashSet<>();
for (long relatedId : request.getRelatedPostIds()) {
relatedPosts.add(entityManager.getReference(Post.class, relatedId));
}
article.setRelatedToPosts(relatedPosts);
Seo seo = new Seo();
seo.setTitle(request.getSeoTitle());
seo.setDescription(request.getSeoDescription());
seo.setKeywords(request.getSeoKeywords());
article.setSeo(seo);
List<Media> medias = new ArrayList<>();
if (StringUtils.hasText(request.getBody())) {
// Blog blog = blogService.getBlogById(Blog.DEFAULT_ID);
String mediaUrlPrefix = wallRideProperties.getMediaUrlPrefix();
Pattern mediaUrlPattern = Pattern.compile(String.format("%s([0-9a-zA-Z\\-]+)", mediaUrlPrefix));
Matcher mediaUrlMatcher = mediaUrlPattern.matcher(request.getBody());
while (mediaUrlMatcher.find()) {
Media media = mediaRepository.findOneById(mediaUrlMatcher.group(1));
medias.add(media);
}
}
article.setMedias(medias);
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
Map<CustomField, CustomFieldValue> valueMap = new LinkedHashMap<>();
for (CustomFieldValue value : article.getCustomFieldValues()) {
valueMap.put(value.getCustomField(), value);
}
article.getCustomFieldValues().clear();
if (!CollectionUtils.isEmpty(request.getCustomFieldValues())) {
for (CustomFieldValueEditForm valueForm : request.getCustomFieldValues()) {
CustomField customField = entityManager.getReference(CustomField.class, valueForm.getCustomFieldId());
CustomFieldValue value = valueMap.get(customField);
if (value == null) {
value = new CustomFieldValue();
}
value.setCustomField(customField);
value.setPost(article);
if (valueForm.getFieldType().equals(CustomField.FieldType.CHECKBOX)) {
if (!ArrayUtils.isEmpty(valueForm.getTextValues())) {
value.setTextValue(String.join(",", valueForm.getTextValues()));
} else {
value.setTextValue(null);
}
} else {
value.setTextValue(valueForm.getTextValue());
}
value.setStringValue(valueForm.getStringValue());
value.setNumberValue(valueForm.getNumberValue());
value.setDateValue(valueForm.getDateValue());
value.setDatetimeValue(valueForm.getDatetimeValue());
if (!value.isEmpty()) {
article.getCustomFieldValues().add(value);
}
}
}
return articleRepository.save(article);
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article deleteArticle(ArticleDeleteRequest request, BindingResult result) throws BindException {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
articleRepository.delete(article);
return article;
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public List<Article> bulkPublishArticle(ArticleBulkPublishRequest request, AuthorizedUser authorizedUser) {
List<Article> articles = new ArrayList<>();
for (long id : request.getIds()) {
postRepository.lock(id);
Article article = articleRepository.findOneByIdAndLanguage(id, request.getLanguage());
if (article.getStatus() != Post.Status.DRAFT && request.getDate() == null) {
continue;
}
if (!StringUtils.hasText(article.getCode())) {
throw new NotNullException();
}
if (!StringUtils.hasText(article.getTitle())) {
throw new NotNullException();
}
if (!StringUtils.hasText(article.getBody())) {
throw new NotNullException();
}
LocalDateTime now = LocalDateTime.now();
LocalDateTime date = article.getDate();
if (request.getDate() != null) {
date = request.getDate();
}
if (date == null) {
date = now;
}
article.setDate(date);
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
article = publishArticle(article);
if (article.getDate().isAfter(now)) {
article.setStatus(Post.Status.SCHEDULED);
} else {
article.setStatus(Post.Status.PUBLISHED);
}
article = articleRepository.saveAndFlush(article);
articles.add(article);
}
return articles;
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public List<Article> bulkUnpublishArticle(ArticleBulkUnpublishRequest request, AuthorizedUser authorizedUser) {
List<Article> articles = new ArrayList<>();
for (long id : request.getIds()) {
postRepository.lock(id);
Article article = articleRepository.findOneByIdAndLanguage(id, request.getLanguage());
if (article.getStatus() == Post.Status.DRAFT) {
continue;
}
LocalDateTime now = LocalDateTime.now();
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
article = unpublishArticle(article);
articles.add(article);
}
return articles;
}
@Transactional(propagation=Propagation.NOT_SUPPORTED)
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public List<Article> bulkDeleteArticle(ArticleBulkDeleteRequest bulkDeleteRequest, BindingResult result) {
List<Article> articles = new ArrayList<>();
for (long id : bulkDeleteRequest.getIds()) {
final ArticleDeleteRequest deleteRequest = new ArticleDeleteRequest.Builder()
.id(id)
.language(bulkDeleteRequest.getLanguage())
.build();
final BeanPropertyBindingResult r = new BeanPropertyBindingResult(deleteRequest, "request");
r.setMessageCodesResolver(messageCodesResolver);
TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager);
transactionTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
Article article = null;
try {
article = transactionTemplate.execute(new TransactionCallback<Article>() {
public Article doInTransaction(TransactionStatus status) {
try {
return deleteArticle(deleteRequest, r);
}
catch (BindException e) {
throw new RuntimeException(e);
}
}
});
articles.add(article);
}
catch (Exception e) {
logger.debug("Errors: {}", r);
result.addAllErrors(r);
}
}
return articles;
}
public List<Long> getArticleIds(ArticleSearchRequest request) {
return articleRepository.searchForId(request);
}
public Page<Article> getArticles(ArticleSearchRequest request) {
Pageable pageable = new PageRequest(0, 10);
return getArticles(request, pageable);
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public Page<Article> getArticles(ArticleSearchRequest request, Pageable pageable) {
return articleRepository.search(request, pageable);
}
public List<Article> getArticles(Collection<Long> ids) {
Set<Article> results = new LinkedHashSet<Article>(articleRepository.findAllByIdIn(ids));
List<Article> articles = new ArrayList<>();
for (long id : ids) {
for (Article article : results) {
if (id == article.getId()) {
articles.add(article);
break;
}
}
}
return articles;
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public SortedSet<Article> getArticlesByCategoryCode(String language, String code, Post.Status status) {
return getArticlesByCategoryCode(language, code, status, 10);
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public SortedSet<Article> getArticlesByCategoryCode(String language, String code, Post.Status status, int size) {
ArticleSearchRequest request = new ArticleSearchRequest()
.withLanguage(language)
.withCategoryCodes(code)
.withStatus(status);
Pageable pageable = new PageRequest(0, size);
Page<Article> page = articleRepository.search(request, pageable);
return new TreeSet<>(page.getContent());
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public SortedSet<Article> getLatestArticles(String language, Post.Status status, int size) {
ArticleSearchRequest request = new ArticleSearchRequest()
.withLanguage(language)
.withStatus(status);
Pageable pageable = new PageRequest(0, size);
Page<Article> page = articleRepository.search(request, pageable);
return new TreeSet<>(page.getContent());
}
public Article getArticleById(long id) {
return articleRepository.findOneById(id);
}
public Article getArticleById(long id, String language) {
return articleRepository.findOneByIdAndLanguage(id, language);
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public Article getArticleByCode(String code, String language) {
return articleRepository.findOneByCodeAndLanguage(code, language);
}
public Article getDraftById(long id) {
return articleRepository.findOne(ArticleSpecifications.draft(entityManager.getReference(Article.class, id)));
}
public long countArticles(String language) {
return articleRepository.count(language);
}
public long countArticlesByStatus(Post.Status status, String language) {
return articleRepository.countByStatus(status, language);
}
public Map<Long, Long> countArticlesByAuthorIdGrouped(Post.Status status, String language) {
List<Map<String, Object>> results = articleRepository.countByAuthorIdGrouped(status, language);
Map<Long, Long> counts = new HashMap<>();
for (Map<String, Object> row : results) {
counts.put((Long) row.get("userId"), (Long) row.get("count"));
}
return counts;
}
public Map<Long, Long> countArticlesByCategoryIdGrouped(Post.Status status, String language) {
List<Map<String, Object>> results = articleRepository.countByCategoryIdGrouped(status, language);
Map<Long, Long> counts = new HashMap<>();
for (Map<String, Object> row : results) {
counts.put((Long) row.get("categoryId"), (Long) row.get("count"));
}
return counts;
}
public Map<Long, Long> countArticlesByTagIdGrouped(Post.Status status, String language) {
List<Map<String, Object>> results = articleRepository.countByTagIdGrouped(status, language);
Map<Long, Long> counts = new HashMap<>();
for (Map<String, Object> row : results) {
counts.put((Long) row.get("tagId"), (Long) row.get("count"));
}
return counts;
}
}
| wallride-core/src/main/java/org/wallride/service/ArticleService.java | /*
* Copyright 2014 Tagbangers, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wallride.service;
import org.apache.commons.lang.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.validation.BeanPropertyBindingResult;
import org.springframework.validation.BindException;
import org.springframework.validation.BindingResult;
import org.springframework.validation.MessageCodesResolver;
import org.wallride.autoconfigure.WallRideCacheConfiguration;
import org.wallride.autoconfigure.WallRideProperties;
import org.wallride.domain.*;
import org.wallride.exception.DuplicateCodeException;
import org.wallride.exception.EmptyCodeException;
import org.wallride.exception.NotNullException;
import org.wallride.exception.ServiceException;
import org.wallride.model.*;
import org.wallride.repository.*;
import org.wallride.support.AuthorizedUser;
import org.wallride.support.CodeFormatter;
import org.wallride.web.controller.admin.article.CustomFieldValueEditForm;
import javax.annotation.Resource;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import java.text.ParseException;
import java.time.LocalDateTime;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Service
@Transactional(rollbackFor=Exception.class)
public class ArticleService {
@Resource
private BlogService blogService;
@Resource
private PostRepository postRepository;
@Resource
private ArticleRepository articleRepository;
@Resource
private TagRepository tagRepository;
@Resource
private MediaRepository mediaRepository;
@Inject
private MessageCodesResolver messageCodesResolver;
@Inject
private PlatformTransactionManager transactionManager;
@Inject
private WallRideProperties wallRideProperties;
@PersistenceContext
private EntityManager entityManager;
private static Logger logger = LoggerFactory.getLogger(ArticleService.class);
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article createArticle(ArticleCreateRequest request, Post.Status status, AuthorizedUser authorizedUser) {
LocalDateTime now = LocalDateTime.now();
String code = request.getCode();
if (code == null) {
try {
code = new CodeFormatter().parse(request.getTitle(), LocaleContextHolder.getLocale());
} catch (ParseException e) {
throw new ServiceException(e);
}
}
if (!StringUtils.hasText(code)) {
if (!status.equals(Post.Status.DRAFT)) {
throw new EmptyCodeException();
}
}
if (!status.equals(Post.Status.DRAFT)) {
Post duplicate = postRepository.findOneByCodeAndLanguage(code, request.getLanguage());
if (duplicate != null) {
throw new DuplicateCodeException(code);
}
}
Article article = new Article();
if (!status.equals(Post.Status.DRAFT)) {
article.setCode(code);
article.setDraftedCode(null);
}
else {
article.setCode(null);
article.setDraftedCode(code);
}
Media cover = null;
if (request.getCoverId() != null) {
cover = entityManager.getReference(Media.class, request.getCoverId());
}
article.setCover(cover);
article.setTitle(request.getTitle());
article.setBody(request.getBody());
article.setAuthor(entityManager.getReference(User.class, authorizedUser.getId()));
LocalDateTime date = request.getDate();
if (Post.Status.PUBLISHED.equals(status)) {
if (date == null) {
date = now;
}
else if (date.isAfter(now)) {
status = Post.Status.SCHEDULED;
}
}
article.setDate(date);
article.setStatus(status);
article.setLanguage(request.getLanguage());
article.getCategories().clear();
for (long categoryId : request.getCategoryIds()) {
article.getCategories().add(entityManager.getReference(Category.class, categoryId));
}
article.getTags().clear();
Set<String> tagNames = StringUtils.commaDelimitedListToSet(request.getTags());
if (!CollectionUtils.isEmpty(tagNames)) {
for (String tagName : tagNames) {
Tag tag = tagRepository.findOneForUpdateByNameAndLanguage(tagName, request.getLanguage());
if (tag == null) {
tag = new Tag();
tag.setName(tagName);
tag.setLanguage(request.getLanguage());
article.setCreatedAt(now);
article.setCreatedBy(authorizedUser.toString());
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
tag = tagRepository.saveAndFlush(tag);
}
article.getTags().add(tag);
}
}
article.getRelatedPosts().clear();
Set<Post> relatedPosts = new HashSet<>();
for (long relatedId : request.getRelatedPostIds()) {
relatedPosts.add(entityManager.getReference(Post.class, relatedId));
}
article.setRelatedToPosts(relatedPosts);
Seo seo = new Seo();
seo.setTitle(request.getSeoTitle());
seo.setDescription(request.getSeoDescription());
seo.setKeywords(request.getSeoKeywords());
article.setSeo(seo);
List<Media> medias = new ArrayList<>();
if (StringUtils.hasText(request.getBody())) {
// Blog blog = blogService.getBlogById(Blog.DEFAULT_ID);
String mediaUrlPrefix = wallRideProperties.getMediaUrlPrefix();
Pattern mediaUrlPattern = Pattern.compile(String.format("%s([0-9a-zA-Z\\-]+)", mediaUrlPrefix));
Matcher mediaUrlMatcher = mediaUrlPattern.matcher(request.getBody());
while (mediaUrlMatcher.find()) {
Media media = mediaRepository.findOneById(mediaUrlMatcher.group(1));
medias.add(media);
}
}
article.setMedias(medias);
article.setCreatedAt(now);
article.setCreatedBy(authorizedUser.toString());
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
article.getCustomFieldValues().clear();
if (!CollectionUtils.isEmpty(request.getCustomFieldValues())) {
for (CustomFieldValueEditForm valueForm : request.getCustomFieldValues()) {
CustomFieldValue value = new CustomFieldValue();
value.setCustomField(entityManager.getReference(CustomField.class, valueForm.getCustomFieldId()));
value.setPost(article);
if (valueForm.getFieldType().equals(CustomField.FieldType.CHECKBOX)) {
if (!ArrayUtils.isEmpty(valueForm.getTextValues())) {
value.setTextValue(String.join(",", valueForm.getTextValues()));
} else {
value.setTextValue(null);
}
} else {
value.setTextValue(valueForm.getTextValue());
}
value.setStringValue(valueForm.getStringValue());
value.setNumberValue(valueForm.getNumberValue());
value.setDateValue(valueForm.getDateValue());
value.setDatetimeValue(valueForm.getDatetimeValue());
if (!value.isEmpty()) {
article.getCustomFieldValues().add(value);
}
}
}
return articleRepository.save(article);
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article saveArticleAsDraft(ArticleUpdateRequest request, AuthorizedUser authorizedUser) {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
if (!article.getStatus().equals(Post.Status.DRAFT)) {
Article draft = articleRepository.findOne(ArticleSpecifications.draft(article));
if (draft == null) {
ArticleCreateRequest createRequest = new ArticleCreateRequest.Builder()
.code(request.getCode())
.coverId(request.getCoverId())
.title(request.getTitle())
.body(request.getBody())
.authorId(request.getAuthorId())
.date(request.getDate())
.categoryIds(request.getCategoryIds())
.tags(request.getTags())
.seoTitle(request.getSeoTitle())
.seoDescription(request.getSeoDescription())
.seoKeywords(request.getSeoKeywords())
.customFieldValues(new ArrayList<>(request.getCustomFieldValues()))
.language(request.getLanguage())
.build();
draft = createArticle(createRequest, Post.Status.DRAFT, authorizedUser);
draft.setDrafted(article);
return articleRepository.save(draft);
}
else {
ArticleUpdateRequest updateRequest = new ArticleUpdateRequest.Builder()
.id(draft.getId())
.code(request.getCode())
.coverId(request.getCoverId())
.title(request.getTitle())
.body(request.getBody())
.authorId(request.getAuthorId())
.date(request.getDate())
.categoryIds(request.getCategoryIds())
.tags(request.getTags())
.seoTitle(request.getSeoTitle())
.seoDescription(request.getSeoDescription())
.seoKeywords(request.getSeoKeywords())
.customFieldValues(request.getCustomFieldValues())
.language(request.getLanguage())
.build();
return saveArticle(updateRequest, authorizedUser);
}
}
else {
return saveArticle(request, authorizedUser);
}
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article saveArticleAsPublished(ArticleUpdateRequest request, AuthorizedUser authorizedUser) {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
publishArticle(article);
return saveArticle(request, authorizedUser);
}
private Article publishArticle(Article article) {
Article deleteTarget = getDraftById(article.getId());
if (deleteTarget != null) {
articleRepository.delete(deleteTarget);
}
article.setDrafted(null);
article.setStatus(Post.Status.PUBLISHED);
Article published = articleRepository.save(article);
return published;
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article saveArticleAsUnpublished(ArticleUpdateRequest request, AuthorizedUser authorizedUser) {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
unpublishArticle(article);
return saveArticle(request, authorizedUser);
}
private Article unpublishArticle(Article article) {
Article deleteTarget = getDraftById(article.getId());
if (deleteTarget != null) {
articleRepository.delete(deleteTarget);
}
article.setDrafted(null);
article.setStatus(Post.Status.DRAFT);
Article unpublished = articleRepository.save(article);
return unpublished;
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article saveArticle(ArticleUpdateRequest request, AuthorizedUser authorizedUser) {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
LocalDateTime now = LocalDateTime.now();
String code = request.getCode();
if (code == null) {
try {
code = new CodeFormatter().parse(request.getTitle(), LocaleContextHolder.getLocale());
} catch (ParseException e) {
throw new ServiceException(e);
}
}
if (!StringUtils.hasText(code)) {
if (!article.getStatus().equals(Post.Status.DRAFT)) {
throw new EmptyCodeException();
}
}
if (!article.getStatus().equals(Post.Status.DRAFT)) {
Post duplicate = postRepository.findOneByCodeAndLanguage(code, request.getLanguage());
if (duplicate != null && !duplicate.equals(article)) {
throw new DuplicateCodeException(code);
}
}
if (!article.getStatus().equals(Post.Status.DRAFT)) {
article.setCode(code);
article.setDraftedCode(null);
}
else {
article.setCode(null);
article.setDraftedCode(code);
}
Media cover = null;
if (request.getCoverId() != null) {
cover = entityManager.getReference(Media.class, request.getCoverId());
}
article.setCover(cover);
article.setTitle(request.getTitle());
article.setBody(request.getBody());
// User author = null;
// if (request.getAuthorId() != null) {
// author = entityManager.getReference(User.class, request.getAuthorId());
// }
// article.setAuthor(author);
LocalDateTime date = request.getDate();
if (!Post.Status.DRAFT.equals(article.getStatus())) {
if (date == null) {
date = now;
} else if (date.isAfter(now)) {
article.setStatus(Post.Status.SCHEDULED);
} else {
article.setStatus(Post.Status.PUBLISHED);
}
}
article.setDate(date);
article.setLanguage(request.getLanguage());
article.getCategories().clear();
for (long categoryId : request.getCategoryIds()) {
article.getCategories().add(entityManager.getReference(Category.class, categoryId));
}
article.getTags().clear();
Set<String> tagNames = StringUtils.commaDelimitedListToSet(request.getTags());
if (!CollectionUtils.isEmpty(tagNames)) {
for (String tagName : tagNames) {
Tag tag = tagRepository.findOneForUpdateByNameAndLanguage(tagName, request.getLanguage());
if (tag == null) {
tag = new Tag();
tag.setName(tagName);
tag.setLanguage(request.getLanguage());
article.setCreatedAt(now);
article.setCreatedBy(authorizedUser.toString());
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
tag = tagRepository.saveAndFlush(tag);
}
article.getTags().add(tag);
}
}
article.getRelatedPosts().clear();
Set<Post> relatedPosts = new HashSet<>();
for (long relatedId : request.getRelatedPostIds()) {
relatedPosts.add(entityManager.getReference(Post.class, relatedId));
}
article.setRelatedToPosts(relatedPosts);
Seo seo = new Seo();
seo.setTitle(request.getSeoTitle());
seo.setDescription(request.getSeoDescription());
seo.setKeywords(request.getSeoKeywords());
article.setSeo(seo);
List<Media> medias = new ArrayList<>();
if (StringUtils.hasText(request.getBody())) {
// Blog blog = blogService.getBlogById(Blog.DEFAULT_ID);
String mediaUrlPrefix = wallRideProperties.getMediaUrlPrefix();
Pattern mediaUrlPattern = Pattern.compile(String.format("%s([0-9a-zA-Z\\-]+)", mediaUrlPrefix));
Matcher mediaUrlMatcher = mediaUrlPattern.matcher(request.getBody());
while (mediaUrlMatcher.find()) {
Media media = mediaRepository.findOneById(mediaUrlMatcher.group(1));
medias.add(media);
}
}
article.setMedias(medias);
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
Map<CustomField, CustomFieldValue> valueMap = new LinkedHashMap<>();
for (CustomFieldValue value : article.getCustomFieldValues()) {
valueMap.put(value.getCustomField(), value);
}
article.getCustomFieldValues().clear();
SortedSet<CustomFieldValue> fieldValues = null;
if (!CollectionUtils.isEmpty(request.getCustomFieldValues())) {
fieldValues = new TreeSet<>();
for (CustomFieldValueEditForm valueForm : request.getCustomFieldValues()) {
CustomField customField = entityManager.getReference(CustomField.class, valueForm.getCustomFieldId());
CustomFieldValue value = valueMap.get(customField);
if (value == null) {
value = new CustomFieldValue();
}
value.setCustomField(customField);
value.setPost(article);
if (valueForm.getFieldType().equals(CustomField.FieldType.CHECKBOX)) {
if (!ArrayUtils.isEmpty(valueForm.getTextValues())) {
value.setTextValue(String.join(",", valueForm.getTextValues()));
} else {
value.setTextValue(null);
}
} else {
value.setTextValue(valueForm.getTextValue());
}
value.setStringValue(valueForm.getStringValue());
value.setNumberValue(valueForm.getNumberValue());
value.setDateValue(valueForm.getDateValue());
value.setDatetimeValue(valueForm.getDatetimeValue());
if (!value.isEmpty()) {
fieldValues.add(value);
}
}
}
article.setCustomFieldValues(fieldValues);
return articleRepository.save(article);
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public Article deleteArticle(ArticleDeleteRequest request, BindingResult result) throws BindException {
postRepository.lock(request.getId());
Article article = articleRepository.findOneByIdAndLanguage(request.getId(), request.getLanguage());
articleRepository.delete(article);
return article;
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public List<Article> bulkPublishArticle(ArticleBulkPublishRequest request, AuthorizedUser authorizedUser) {
List<Article> articles = new ArrayList<>();
for (long id : request.getIds()) {
postRepository.lock(id);
Article article = articleRepository.findOneByIdAndLanguage(id, request.getLanguage());
if (article.getStatus() != Post.Status.DRAFT && request.getDate() == null) {
continue;
}
if (!StringUtils.hasText(article.getCode())) {
throw new NotNullException();
}
if (!StringUtils.hasText(article.getTitle())) {
throw new NotNullException();
}
if (!StringUtils.hasText(article.getBody())) {
throw new NotNullException();
}
LocalDateTime now = LocalDateTime.now();
LocalDateTime date = article.getDate();
if (request.getDate() != null) {
date = request.getDate();
}
if (date == null) {
date = now;
}
article.setDate(date);
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
article = publishArticle(article);
if (article.getDate().isAfter(now)) {
article.setStatus(Post.Status.SCHEDULED);
} else {
article.setStatus(Post.Status.PUBLISHED);
}
article = articleRepository.saveAndFlush(article);
articles.add(article);
}
return articles;
}
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public List<Article> bulkUnpublishArticle(ArticleBulkUnpublishRequest request, AuthorizedUser authorizedUser) {
List<Article> articles = new ArrayList<>();
for (long id : request.getIds()) {
postRepository.lock(id);
Article article = articleRepository.findOneByIdAndLanguage(id, request.getLanguage());
if (article.getStatus() == Post.Status.DRAFT) {
continue;
}
LocalDateTime now = LocalDateTime.now();
article.setUpdatedAt(now);
article.setUpdatedBy(authorizedUser.toString());
article = unpublishArticle(article);
articles.add(article);
}
return articles;
}
@Transactional(propagation=Propagation.NOT_SUPPORTED)
@CacheEvict(value = WallRideCacheConfiguration.ARTICLE_CACHE, allEntries = true)
public List<Article> bulkDeleteArticle(ArticleBulkDeleteRequest bulkDeleteRequest, BindingResult result) {
List<Article> articles = new ArrayList<>();
for (long id : bulkDeleteRequest.getIds()) {
final ArticleDeleteRequest deleteRequest = new ArticleDeleteRequest.Builder()
.id(id)
.language(bulkDeleteRequest.getLanguage())
.build();
final BeanPropertyBindingResult r = new BeanPropertyBindingResult(deleteRequest, "request");
r.setMessageCodesResolver(messageCodesResolver);
TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager);
transactionTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
Article article = null;
try {
article = transactionTemplate.execute(new TransactionCallback<Article>() {
public Article doInTransaction(TransactionStatus status) {
try {
return deleteArticle(deleteRequest, r);
}
catch (BindException e) {
throw new RuntimeException(e);
}
}
});
articles.add(article);
}
catch (Exception e) {
logger.debug("Errors: {}", r);
result.addAllErrors(r);
}
}
return articles;
}
public List<Long> getArticleIds(ArticleSearchRequest request) {
return articleRepository.searchForId(request);
}
public Page<Article> getArticles(ArticleSearchRequest request) {
Pageable pageable = new PageRequest(0, 10);
return getArticles(request, pageable);
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public Page<Article> getArticles(ArticleSearchRequest request, Pageable pageable) {
return articleRepository.search(request, pageable);
}
public List<Article> getArticles(Collection<Long> ids) {
Set<Article> results = new LinkedHashSet<Article>(articleRepository.findAllByIdIn(ids));
List<Article> articles = new ArrayList<>();
for (long id : ids) {
for (Article article : results) {
if (id == article.getId()) {
articles.add(article);
break;
}
}
}
return articles;
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public SortedSet<Article> getArticlesByCategoryCode(String language, String code, Post.Status status) {
return getArticlesByCategoryCode(language, code, status, 10);
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public SortedSet<Article> getArticlesByCategoryCode(String language, String code, Post.Status status, int size) {
ArticleSearchRequest request = new ArticleSearchRequest()
.withLanguage(language)
.withCategoryCodes(code)
.withStatus(status);
Pageable pageable = new PageRequest(0, size);
Page<Article> page = articleRepository.search(request, pageable);
return new TreeSet<>(page.getContent());
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public SortedSet<Article> getLatestArticles(String language, Post.Status status, int size) {
ArticleSearchRequest request = new ArticleSearchRequest()
.withLanguage(language)
.withStatus(status);
Pageable pageable = new PageRequest(0, size);
Page<Article> page = articleRepository.search(request, pageable);
return new TreeSet<>(page.getContent());
}
public Article getArticleById(long id) {
return articleRepository.findOneById(id);
}
public Article getArticleById(long id, String language) {
return articleRepository.findOneByIdAndLanguage(id, language);
}
@Cacheable(value = WallRideCacheConfiguration.ARTICLE_CACHE)
public Article getArticleByCode(String code, String language) {
return articleRepository.findOneByCodeAndLanguage(code, language);
}
public Article getDraftById(long id) {
return articleRepository.findOne(ArticleSpecifications.draft(entityManager.getReference(Article.class, id)));
}
public long countArticles(String language) {
return articleRepository.count(language);
}
public long countArticlesByStatus(Post.Status status, String language) {
return articleRepository.countByStatus(status, language);
}
public Map<Long, Long> countArticlesByAuthorIdGrouped(Post.Status status, String language) {
List<Map<String, Object>> results = articleRepository.countByAuthorIdGrouped(status, language);
Map<Long, Long> counts = new HashMap<>();
for (Map<String, Object> row : results) {
counts.put((Long) row.get("userId"), (Long) row.get("count"));
}
return counts;
}
public Map<Long, Long> countArticlesByCategoryIdGrouped(Post.Status status, String language) {
List<Map<String, Object>> results = articleRepository.countByCategoryIdGrouped(status, language);
Map<Long, Long> counts = new HashMap<>();
for (Map<String, Object> row : results) {
counts.put((Long) row.get("categoryId"), (Long) row.get("count"));
}
return counts;
}
public Map<Long, Long> countArticlesByTagIdGrouped(Post.Status status, String language) {
List<Map<String, Object>> results = articleRepository.countByTagIdGrouped(status, language);
Map<Long, Long> counts = new HashMap<>();
for (Map<String, Object> row : results) {
counts.put((Long) row.get("tagId"), (Long) row.get("count"));
}
return counts;
}
}
| Fix save customFieldValue
| wallride-core/src/main/java/org/wallride/service/ArticleService.java | Fix save customFieldValue |
|
Java | apache-2.0 | acdbbb0508e118eab27faf09881293cccc84bd47 | 0 | LightSun/android-drag-FlowLayout | package com.heaven7.android.dragflowlayout;
import android.annotation.TargetApi;
import android.content.Context;
import android.support.annotation.IntDef;
import android.support.annotation.NonNull;
import android.support.v4.view.GestureDetectorCompat;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.HapticFeedbackConstants;
import android.view.MotionEvent;
import android.view.SoundEffectConstants;
import android.view.View;
import android.view.accessibility.AccessibilityEvent;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
/**
* the drag flow layout
* Created by heaven7 on 2016/8/1.
*/
public class DragFlowLayout extends FlowLayout {
private static final String TAG = "DragGridLayout";
private static final Debugger sDebugger = new Debugger(TAG);
private static final int INVALID_INDXE = -1;
/** indicate currrent is idle, and can't draggable */
public static final int DRAG_STATE_IDLE = 1;
/** indicate currrent is dragging */
public static final int DRAG_STATE_DRAGGING = 2;
/** indicate currrent is not dragging but can drag */
public static final int DRAG_STATE_DRAGGABLE = 3;
@Retention(RetentionPolicy.SOURCE)
@IntDef({DRAG_STATE_IDLE, DRAG_STATE_DRAGGING , DRAG_STATE_DRAGGABLE })
public @interface DragState{
}
private static final Comparator<Item> sComparator = new Comparator<Item>() {
@Override
public int compare(Item lhs, Item rhs) {
return compareImpl(lhs.index, rhs.index);
}
public int compareImpl(int lhs, int rhs) {
return lhs < rhs ? -1 : (lhs == rhs ? 0 : 1);
}
};
private final InternalItemHelper mItemManager = new InternalItemHelper();
private AlertWindowHelper mWindomHelper;
private @DragState int mDragState = DRAG_STATE_IDLE;
private DragItemManager mDragManager;
private DefaultDragCallback mCallback;
private OnItemClickListener mClickListener;
/** indicate whether dispatch the event to the alert window or not. */
private boolean mDispatchToAlertWindow;
private final int[] mTempLocation = new int[2];
private CheckForDrag mCheckForDrag ;
private CheckForRelease mCheckForRelease;
private boolean mReDrag ;
private volatile boolean mCancelled ;
private GestureDetectorCompat mGestureDetector;
private volatile View mTouchChild;
private final AlertWindowHelper.ICallback mWindowCallback = new AlertWindowHelper.ICallback() {
@Override
public void onCancel(View view, MotionEvent event) {
sDebugger.i("onCancel","------------->");
releaseDragInternal();
}
@Override
public boolean onMove(View view, MotionEvent event) {
//infoWhenDebug("onMove","------------->");
return processOverlap(view);
}
};
public interface OnItemClickListener {
/**
* called when a click event occurrence ,perform the click event if you need. and return true if you performed the click event.
* @param dragFlowLayout the DragFlowLayout
* @param child the direct child of DragFlowLayout.
* @param event the event of trigger this click event
* @param dragState indicate current drag state , see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
* @return true,if you performed the click event
*/
boolean performClick(DragFlowLayout dragFlowLayout, View child,
MotionEvent event, int dragState);
}
/**
* the callback of DragFlowLayout.
*/
static abstract class Callback {
/**
* set the child data by target drag state.
* @param child the direct child of DragFlowLayout
* @param dragState the drag state of current,see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
*/
public abstract void setChildByDragState(View child, int dragState);
/**
* create a child view from the target child view.
* @param child the direct child of DragFlowLayout
* @param index the index of this child view, or -1 for unknown index.
* @param dragState current drag state. see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
* @return the new child view
*/
@NonNull
public abstract View createChildView(View child, int index, int dragState);
/**
* set the window view by target child view.
* @param windowView the window view, often like the child view.
* @param child the direct child view of DragFlowLayout
* @param dragState current drag state. see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
*/
public abstract void setWindowViewByChild(View windowView, View child, int dragState);
/**
* create window view by target child view
* @param child the direct child view of DragFlowLayout
* @param dragState current drag state. see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
* @return a window view that will attach to application.
*/
public View createWindowView(View child, int dragState){
return createChildView(child, -1, dragState);
}
/**
* is the child draggable,default is true.
* @param child the direct child of DragFlowLayout
* @return true if the child is draggable
*/
public boolean isChildDraggable(View child) {
return true;
}
}
public DragFlowLayout(Context context) {
this(context,null);
}
public DragFlowLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init(context,attrs);
}
public DragFlowLayout(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context,attrs);
}
@TargetApi(21)
public DragFlowLayout(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
mWindomHelper = new AlertWindowHelper(context);
mGestureDetector = new GestureDetectorCompat(context, new GestureListenerImpl());
}
/** get the drag state */
public @DragState int getDragState(){
return mDragState;
}
/***
* set the item click listenr
* @param l the item listener
*/
public void setOnItemClickListener(OnItemClickListener l) {
this.mClickListener = l;
}
/** set the adapter */
public <T> void setDragAdapter(DragAdapter<T> adapter){
if(adapter == null){
throw new NullPointerException();
}
this.mCallback = new DefaultDragCallback<T>(adapter);
}
/**
* get the {@link DragItemManager} for manage the item of DragFlowLayout. eg 'CRUD'
* @return the DragItemManager.
*/
public DragItemManager getDragItemManager(){
if(mDragManager == null){
mDragManager = new DragItemManager();
}
return mDragManager;
}
/**
* set the drag state
* @param dragState the drag state of current,see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc
* @param showChildren if show all direct children of DragFlowLayout
*/
private void setDragState(@DragState int dragState, boolean showChildren){
checkCallback();
this.mDragState = dragState;
final Callback mCallback = this.mCallback;
View view;
for(int i=0, size = getChildCount(); i < size ;i++){
view = getChildAt(i);
if(showChildren && view.getVisibility() != View.VISIBLE){
view.setVisibility(View.VISIBLE);
}
mCallback.setChildByDragState(view, dragState);
}
}
/** tag finish the drag state */
public void finishDrag(){
setDragState(DragFlowLayout.DRAG_STATE_IDLE, true);
}
private void checkForRelease(){
if(mCheckForRelease == null){
mCheckForRelease = new CheckForRelease();
}
postDelayed(mCheckForRelease, 100);
}
private void checkForDrag(long delay, boolean checkRelease){
if(mCheckForDrag == null) {
mCheckForDrag = new CheckForDrag();
}
postDelayed(mCheckForDrag, delay);
if(checkRelease){
checkForRelease();
}
}
private void beginDragImpl(View childView){
checkCallback();
//impl
childView.setVisibility(View.INVISIBLE);
mDispatchToAlertWindow = true;
mItemManager.findDragItem(childView);
childView.getLocationInWindow(mTempLocation);
mWindomHelper.showView(mCallback.createWindowView(childView, mDragState), mTempLocation[0],
mTempLocation[1], true, mWindowCallback);
mDragState = DRAG_STATE_DRAGGING;
}
/**
* 根据指定的view,处理重叠事件
* @param view the target view
* @return true 如果处理重叠成功。
*/
private boolean processOverlap(View view) {
final List<Item> mItems = mItemManager.mItems;
final Callback mCallback = this.mCallback;
Item item = null;
int centerX, centerY;
boolean found = false;
for(int i=0, size = mItems.size() ; i < size ; i++){
item = mItems.get(i);
item.view.getLocationOnScreen(mTempLocation);
centerX = mTempLocation[0] + item.view.getWidth()/2;
centerY = mTempLocation[1] + item.view.getHeight()/2;
if(isViewUnderInScreen(view, centerX, centerY, false) && item != mItemManager.mDragItem
&& mCallback.isChildDraggable(item.view) ){
sDebugger.i("onMove_isViewUnderInScreen","index = " + item.index );
/**
* Drag到target目标的center时,判断有没有已经hold item, 有的话,先删除旧的,
*/
found = true;
break;
}
}
if(found ){
//the really index to add
final int index = item.index;
Item dragItem = mItemManager.mDragItem;
// remove old
removeView(mItemManager.mDragItem.view);
//add hold
View hold = mCallback.createChildView(dragItem.view, dragItem.index, mDragState);
hold.setVisibility(View.INVISIBLE); //隐藏
addView(hold, index);
//reset drag item and alert view
mItemManager.findDragItem(hold);
mCallback.setWindowViewByChild(mWindomHelper.getView(), mItemManager.mDragItem.view, mDragState);
sDebugger.i("onMove","hold index = " + mItemManager.mDragItem.index);
}
return found;
}
private void releaseDragInternal(){
checkCallback();
if(mItemManager.mDragItem!=null) {
mItemManager.mDragItem.view.setVisibility(View.VISIBLE);
mCallback.setChildByDragState(mItemManager.mDragItem.view, mDragState);
}
mWindomHelper.releaseView();
mDispatchToAlertWindow = false;
mTouchChild = null;
mDragState = DRAG_STATE_DRAGGABLE;
}
private void checkCallback() {
if(mCallback == null){
throw new IllegalStateException("you must call #setCallback first.");
}
}
/**
* Find the topmost child under the given point within the parent view's coordinate system.
*
* @param x X position to test in the parent's coordinate system
* @param y Y position to test in the parent's coordinate system
* @return The topmost child view under (x, y) or null if none found.
*/
public View findTopChildUnder(int x, int y) {
checkCallback();
final int childCount = getChildCount();
for (int i = childCount - 1; i >= 0; i--) {
final View child = getChildAt(i);
if (ViewUtils.isViewIntersect(child,x, y))
return child;
}
return null;
}
private boolean isViewUnderInScreen(View view, int x, int y, boolean log) {
if (view == null) {
return false;
}
int w = view.getWidth();
int h = view.getHeight();
view.getLocationOnScreen(mTempLocation);
int viewX = mTempLocation[0];
int viewY = mTempLocation[1];
if(log) {
sDebugger.i("isViewUnderInScreen", String.format(Locale.getDefault(),
"viewX = %d ,viewY = %d ,width = %d ,height = %d", viewX, viewY, w, h));
}
return x >= viewX && x < viewX + w
&& y >= viewY && y < viewY + h;
}
private void checkIfAutoReleaseDrag() {
if(getChildCount()==0){
releaseDragInternal();
mDragState = DRAG_STATE_IDLE;
}
}
//=================================== override method ===================================== //
@Override
public void setOnClickListener(View.OnClickListener l) {
throw new UnsupportedOperationException("you should use DragFlowLayout.OnItemClickListener instead..");
}
@Override
public void addView(View child, int index, LayoutParams params) {
super.addView(child, index, params);
checkCallback();
mItemManager.onAddView(child, index, params) ;
mCallback.setChildByDragState(child, mDragState);
}
@Override
public void removeViewAt(int index) {
super.removeViewAt(index);
mItemManager.onRemoveViewAt(index);
checkIfAutoReleaseDrag();
}
@Override
public void removeView(View view) {
super.removeView(view);
mItemManager.onRemoveView(view);
checkIfAutoReleaseDrag();
}
@Override
public void removeAllViews() {
super.removeAllViews();
mItemManager.onRemoveAllViews();
checkIfAutoReleaseDrag();
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
removeCallbacks(mCheckForDrag);
removeCallbacks(mCheckForRelease);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
//infoWhenDebug("onTouchEvent", event.toString());
//infoWhenDebug("onTouchEvent", "------> mDispatchToAlertWindow = " + mDispatchToAlertWindow +" ,mIsDragState = " + mIsDragState);
mCancelled = event.getAction() == MotionEvent.ACTION_CANCEL || event.getAction() == MotionEvent.ACTION_UP;
final boolean handled = mGestureDetector.onTouchEvent(event);
//解决ScrollView嵌套DragFlowLayout时,引起的事件冲突
if(getParent()!=null){
getParent().requestDisallowInterceptTouchEvent(mDispatchToAlertWindow);
}
if(mDispatchToAlertWindow){
mWindomHelper.getView().dispatchTouchEvent(event);
if(mCancelled){
mDispatchToAlertWindow = false;
}
}
return handled;
}
//=================================== end -- override method ===================================== //
//================================================================================
private class CheckForDrag implements Runnable{
@Override
public void run() {
if(mTouchChild != null){
beginDragImpl(mTouchChild);
}
}
}
private class CheckForRelease implements Runnable{
@Override
public void run() {
if(mCancelled) {
releaseDragInternal();
}
}
}
private static class Item{
int index;
View view;
@Override
public String toString() {
return "Item{" +
"index=" + index +
'}';
}
}
private static class InternalItemHelper {
final List<Item> mItems = new ArrayList<>();
/** 对应的拖拽item */
Item mDragItem = null;
public void onAddView(View child, int index, LayoutParams params) {
index = index != -1 ? index : mItems.size();
sDebugger.d("onAddView", "index = " + index );
Item item;
for(int i=0,size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.index >= index){
item.index ++;
}
}
//add
item = new Item();
item.index = index;
item.view = child;
mItems.add(item);
Collections.sort(mItems, sComparator);
//debugWhenDebug("onAddView",mItems.toString());
}
public void onRemoveViewAt(int index) {
sDebugger.d("onRemoveViewAt", "index = " + index );
Item item;
for(int i=0,size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.index > index){
item.index --;
}
}
mItems.remove(index);
Collections.sort(mItems, sComparator);
// debugWhenDebug("onAddView",mItems.toString());
}
public void onRemoveView(View view) {
Item item;
int targetIndex = INVALID_INDXE;
for(int i=0, size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.view == view){
targetIndex = item.index ;
break;
}
}
sDebugger.d("onRemoveView", "targetIndex = " + targetIndex );
if(targetIndex == -1){
throw new IllegalStateException("caused by targetIndex == -1");
}
// -- index if need
for(int i=0,size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.index > targetIndex){
item.index --;
}
}
mItems.remove(targetIndex);
Collections.sort(mItems, sComparator);
//debugWhenDebug("onAddView",mItems.toString());
}
public void onRemoveAllViews() {
mItems.clear();
}
public void findDragItem(View touchView) {
Item item;
for(int i=0 ,size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.view == touchView){
mDragItem = item;
break;
}
}
}
}
private class GestureListenerImpl extends GestureDetector.SimpleOnGestureListener{
@Override
public boolean onDown(MotionEvent e) {
mTouchChild = findTopChildUnder((int) e.getX(), (int) e.getY());
sDebugger.i("mGestureDetector_onDown","----------------- > after find : mTouchChild = "
+ mTouchChild);
mReDrag = false;
if(mTouchChild!=null && !mDispatchToAlertWindow && mDragState != DRAG_STATE_IDLE){
if(mCallback.isChildDraggable(mTouchChild)) {
mReDrag = true;
checkForDrag(130, false);
}
}
return mTouchChild != null;
}
@Override
public boolean onSingleTapUp(MotionEvent e) {
sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_CLICKED);
if(mClickListener == null){
return false;
}
//处理点击时,看起来有点怪异的感觉(控件偏离了点位置)
removeCallbacks(mCheckForDrag);
boolean performed = mClickListener.performClick(DragFlowLayout.this, mTouchChild, e , mDragState);
sDebugger.i("mGestureDetector_onSingleTapUp","----------------- > performed = " + performed);
if(performed){
playSoundEffect(SoundEffectConstants.CLICK);
}else if (mReDrag) {
checkForDrag(0, true);
}
return performed;
}
@Override
public void onLongPress(MotionEvent e) {
sDebugger.i("mGestureDetector_onLongPress","----------------- >");
sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_LONG_CLICKED);
performHapticFeedback(HapticFeedbackConstants.LONG_PRESS);
if(mDragState!= DRAG_STATE_DRAGGING && mTouchChild!=null && mCallback.isChildDraggable(mTouchChild)) {
setDragState(DRAG_STATE_DRAGGING, false);
checkForDrag(0, false);
}
}
}
/**
* the drag item manager
*/
public class DragItemManager {
DragAdapter getDragAdapter() {
return mCallback.getDragAdapter();
}
/** get the item count
* @return the item count */
public int getItemCount(){
return getChildCount();
}
/**
* get all items
* @param <T> the t
* @return the items that not removed
*/
public <T> List<T> getItems(){
final DragAdapter adapter = getDragAdapter();
List<T> list = new ArrayList<>();
T t;
for (int i=0 ,size = getChildCount(); i < size; i++) {
t = (T) adapter.getData(getChildAt(i));
list.add(t);
}
return list;
}
/**
* add order items to the last.
* @param datas the datas
*/
public void addItems(Object...datas){
for(int i=0,size = datas.length ;i<size ; i++){
addItem(i, datas[i]);
}
}
/**
* add order items to the last.
* @param list the list data
* @param <T> the t
*/
public <T> void addItems(List<T> list){
for(int i=0,size = list.size() ;i<size ; i++){
addItem(i, list.get(i));
}
}
/**
* add items from target startIndex and data.
* @param startIndex the start index to add
* @param data the data.
*/
public void addItems(int startIndex, Object...data){
if(startIndex > getItemCount()){
throw new IllegalArgumentException();
}
for(int i=0,size = data.length ;i<size ; i++){
addItem(startIndex + i, data[i]);
}
}
/**
* add items from target startIndex and data.
* @param startIndex the start index to add
* @param data the data.
* @param <T> the t
*/
public <T> void addItems(int startIndex, List<T> data){
if(startIndex > getItemCount()){
throw new IllegalArgumentException();
}
for(int i=0,size = data.size() ;i<size ; i++){
addItem(startIndex + i, data.get(i));
}
}
/**
* add a item to the DragFlowLayout
* @param index the index , can be -1 if add last.
* @param data the data
*/
public void addItem(int index, Object data) {
if (index < -1) {
throw new IllegalArgumentException("index can't < -1.");
}
final DragAdapter mAdapter = getDragAdapter();
final View view = View.inflate(getContext(), mAdapter.getItemLayoutId(), null);
mAdapter.onBindData(view, getDragState(), data);
addView(view, index);
}
/**
* remove item by index
* @param index the index , you should be careful of the drag state.
*/
public void removeItem(int index) {
removeViewAt(index);
}
/**
* remove item by data
* @param data the data
*/
public void removeItem(Object data) {
final DragAdapter adapter = getDragAdapter();
Object rawData;
int index = INVALID_INDXE;
for (int size = getChildCount(), i = size - 1; i >= 0; i--) {
rawData = adapter.getData(getChildAt(i));
if (rawData.equals(data)) {
index = i;
break;
}
}
if (index >= 0) {
removeViewAt(index);
}
}
/**
* update item by index and new data.
* @param index the index
* @param data the data
*/
public void updateItem(int index, Object data) {
final View view = getChildAt(index);
getDragAdapter().onBindData(view, getDragState(), data);
}
/**
* update item by previous data and new data.
* @param preData the previous data
* @param newData the new data
*/
public void updateItem(Object preData, Object newData) {
final DragAdapter adapter = getDragAdapter();
Object rawData;
View view = null;
for (int size = getChildCount(), i = size - 1; i >= 0; i--) {
view = getChildAt(i);
rawData = adapter.getData(view);
if (rawData.equals(preData)) {
break;
}
}
if (view != null) {
getDragAdapter().onBindData(view,getDragState(),newData);
}
}
}
}
| Drag-FlowLayout/dragflowlayout/src/main/java/com/heaven7/android/dragflowlayout/DragFlowLayout.java | package com.heaven7.android.dragflowlayout;
import android.annotation.TargetApi;
import android.content.Context;
import android.support.annotation.IntDef;
import android.support.annotation.NonNull;
import android.support.v4.view.GestureDetectorCompat;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.HapticFeedbackConstants;
import android.view.MotionEvent;
import android.view.SoundEffectConstants;
import android.view.View;
import android.view.accessibility.AccessibilityEvent;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
/**
* the drag flow layout
* Created by heaven7 on 2016/8/1.
*/
public class DragFlowLayout extends FlowLayout {
private static final String TAG = "DragGridLayout";
private static final Debugger sDebugger = new Debugger(TAG);
private static final int INVALID_INDXE = -1;
/** indicate currrent is idle, and can't draggable */
public static final int DRAG_STATE_IDLE = 1;
/** indicate currrent is dragging */
public static final int DRAG_STATE_DRAGGING = 2;
/** indicate currrent is not dragging but can drag */
public static final int DRAG_STATE_DRAGGABLE = 3;
@Retention(RetentionPolicy.SOURCE)
@IntDef({DRAG_STATE_IDLE, DRAG_STATE_DRAGGING , DRAG_STATE_DRAGGABLE })
public @interface DragState{
}
private static final Comparator<Item> sComparator = new Comparator<Item>() {
@Override
public int compare(Item lhs, Item rhs) {
return compareImpl(lhs.index, rhs.index);
}
public int compareImpl(int lhs, int rhs) {
return lhs < rhs ? -1 : (lhs == rhs ? 0 : 1);
}
};
private final InternalItemHelper mItemManager = new InternalItemHelper();
private AlertWindowHelper mWindomHelper;
private @DragState int mDragState = DRAG_STATE_IDLE;
private DragItemManager mDragManager;
private DefaultDragCallback mCallback;
private OnItemClickListener mClickListener;
/** indicate whether dispatch the event to the alert window or not. */
private boolean mDispatchToAlertWindow;
private final int[] mTempLocation = new int[2];
private CheckForDrag mCheckForDrag ;
private CheckForRelease mCheckForRelease;
private boolean mReDrag ;
private volatile boolean mCancelled ;
private GestureDetectorCompat mGestureDetector;
private volatile View mTouchChild;
private final AlertWindowHelper.ICallback mWindowCallback = new AlertWindowHelper.ICallback() {
@Override
public void onCancel(View view, MotionEvent event) {
sDebugger.i("onCancel","------------->");
releaseDragInternal();
}
@Override
public boolean onMove(View view, MotionEvent event) {
//infoWhenDebug("onMove","------------->");
return processOverlap(view);
}
};
public interface OnItemClickListener {
/**
* called when a click event occurrence ,perform the click event if you need. and return true if you performed the click event.
* @param dragFlowLayout the DragFlowLayout
* @param child the direct child of DragFlowLayout.
* @param event the event of trigger this click event
* @param dragState indicate current drag state , see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
* @return true,if you performed the click event
*/
boolean performClick(DragFlowLayout dragFlowLayout, View child,
MotionEvent event, int dragState);
}
/**
* the callback of DragFlowLayout.
*/
static abstract class Callback {
/**
* set the child data by target drag state.
* @param child the direct child of DragFlowLayout
* @param dragState the drag state of current,see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
*/
public abstract void setChildByDragState(View child, int dragState);
/**
* create a child view from the target child view.
* @param child the direct child of DragFlowLayout
* @param index the index of this child view, or -1 for unknown index.
* @param dragState current drag state. see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
* @return the new child view
*/
@NonNull
public abstract View createChildView(View child, int index, int dragState);
/**
* set the window view by target child view.
* @param windowView the window view, often like the child view.
* @param child the direct child view of DragFlowLayout
* @param dragState current drag state. see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
*/
public abstract void setWindowViewByChild(View windowView, View child, int dragState);
/**
* create window view by target child view
* @param child the direct child view of DragFlowLayout
* @param dragState current drag state. see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc.
* @return a window view that will attach to application.
*/
public View createWindowView(View child, int dragState){
return createChildView(child, -1, dragState);
}
/**
* is the child draggable,default is true.
* @param child the direct child of DragFlowLayout
* @return true if the child is draggable
*/
public boolean isChildDraggable(View child) {
return true;
}
}
public DragFlowLayout(Context context) {
this(context,null);
}
public DragFlowLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init(context,attrs);
}
public DragFlowLayout(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context,attrs);
}
@TargetApi(21)
public DragFlowLayout(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
mWindomHelper = new AlertWindowHelper(context);
mGestureDetector = new GestureDetectorCompat(context, new GestureListenerImpl());
}
/** get the drag state */
public @DragState int getDragState(){
return mDragState;
}
/***
* set the item click listenr
* @param l the item listener
*/
public void setOnItemClickListener(OnItemClickListener l) {
this.mClickListener = l;
}
/** set the adapter */
public <T> void setDragAdapter(DragAdapter<T> adapter){
if(adapter == null){
throw new NullPointerException();
}
this.mCallback = new DefaultDragCallback<T>(adapter);
}
/**
* get the {@link DragItemManager} for manage the item of DragFlowLayout. eg 'CRUD'
* @return the DragItemManager.
*/
public DragItemManager getDragItemManager(){
if(mDragManager == null){
mDragManager = new DragItemManager();
}
return mDragManager;
}
/**
* set the drag state
* @param dragState the drag state of current,see {@link DragFlowLayout#DRAG_STATE_DRAGGING} and etc
* @param showChildren if show all direct children of DragFlowLayout
*/
private void setDragState(@DragState int dragState, boolean showChildren){
checkCallback();
this.mDragState = dragState;
final Callback mCallback = this.mCallback;
View view;
for(int i=0, size = getChildCount(); i < size ;i++){
view = getChildAt(i);
if(showChildren && view.getVisibility() != View.VISIBLE){
view.setVisibility(View.VISIBLE);
}
mCallback.setChildByDragState(view, dragState);
}
}
/** tag finish the drag state */
public void finishDrag(){
setDragState(DragFlowLayout.DRAG_STATE_IDLE, true);
}
private void checkForRelease(){
if(mCheckForRelease == null){
mCheckForRelease = new CheckForRelease();
}
postDelayed(mCheckForRelease, 100);
}
private void checkForDrag(long delay, boolean checkRelease){
if(mCheckForDrag == null) {
mCheckForDrag = new CheckForDrag();
}
postDelayed(mCheckForDrag, delay);
if(checkRelease){
checkForRelease();
}
}
private void beginDragImpl(View childView){
checkCallback();
//impl
childView.setVisibility(View.INVISIBLE);
mDispatchToAlertWindow = true;
mItemManager.findDragItem(childView);
childView.getLocationInWindow(mTempLocation);
mWindomHelper.showView(mCallback.createWindowView(childView, mDragState), mTempLocation[0],
mTempLocation[1], true, mWindowCallback);
mDragState = DRAG_STATE_DRAGGING;
}
/**
* 根据指定的view,处理重叠事件
* @param view the target view
* @return true 如果处理重叠成功。
*/
private boolean processOverlap(View view) {
final List<Item> mItems = mItemManager.mItems;
final Callback mCallback = this.mCallback;
Item item = null;
int centerX, centerY;
boolean found = false;
for(int i=0, size = mItems.size() ; i < size ; i++){
item = mItems.get(i);
item.view.getLocationOnScreen(mTempLocation);
centerX = mTempLocation[0] + item.view.getWidth()/2;
centerY = mTempLocation[1] + item.view.getHeight()/2;
if(isViewUnderInScreen(view, centerX, centerY, false) && item != mItemManager.mDragItem
&& mCallback.isChildDraggable(item.view) ){
sDebugger.i("onMove_isViewUnderInScreen","index = " + item.index );
/**
* Drag到target目标的center时,判断有没有已经hold item, 有的话,先删除旧的,
*/
found = true;
break;
}
}
if(found ){
//the really index to add
final int index = item.index;
Item dragItem = mItemManager.mDragItem;
// remove old
removeView(mItemManager.mDragItem.view);
//add hold
View hold = mCallback.createChildView(dragItem.view, dragItem.index, mDragState);
hold.setVisibility(View.INVISIBLE); //隐藏
addView(hold, index);
//reset drag item and alert view
mItemManager.findDragItem(hold);
mCallback.setWindowViewByChild(mWindomHelper.getView(), mItemManager.mDragItem.view, mDragState);
sDebugger.i("onMove","hold index = " + mItemManager.mDragItem.index);
}
return found;
}
private void releaseDragInternal(){
checkCallback();
if(mItemManager.mDragItem!=null) {
mItemManager.mDragItem.view.setVisibility(View.VISIBLE);
mCallback.setChildByDragState(mItemManager.mDragItem.view, mDragState);
}
mWindomHelper.releaseView();
mDispatchToAlertWindow = false;
mTouchChild = null;
mDragState = DRAG_STATE_DRAGGABLE;
}
private void checkCallback() {
if(mCallback == null){
throw new IllegalStateException("you must call #setCallback first.");
}
}
/**
* Find the topmost child under the given point within the parent view's coordinate system.
*
* @param x X position to test in the parent's coordinate system
* @param y Y position to test in the parent's coordinate system
* @return The topmost child view under (x, y) or null if none found.
*/
public View findTopChildUnder(int x, int y) {
checkCallback();
final int childCount = getChildCount();
for (int i = childCount - 1; i >= 0; i--) {
final View child = getChildAt(i);
if (ViewUtils.isViewIntersect(child,x, y))
return child;
}
return null;
}
private boolean isViewUnderInScreen(View view, int x, int y, boolean log) {
if (view == null) {
return false;
}
int w = view.getWidth();
int h = view.getHeight();
view.getLocationOnScreen(mTempLocation);
int viewX = mTempLocation[0];
int viewY = mTempLocation[1];
if(log) {
sDebugger.i("isViewUnderInScreen", String.format(Locale.getDefault(),
"viewX = %d ,viewY = %d ,width = %d ,height = %d", viewX, viewY, w, h));
}
return x >= viewX && x < viewX + w
&& y >= viewY && y < viewY + h;
}
private void checkIfAutoReleaseDrag() {
if(getChildCount()==0){
releaseDragInternal();
mDragState = DRAG_STATE_IDLE;
}
}
//=================================== override method ===================================== //
@Override
public void setOnClickListener(View.OnClickListener l) {
throw new UnsupportedOperationException("you should use DragFlowLayout.OnItemClickListener instead..");
}
@Override
public void addView(View child, int index, LayoutParams params) {
super.addView(child, index, params);
checkCallback();
mItemManager.onAddView(child, index, params) ;
mCallback.setChildByDragState(child, mDragState);
}
@Override
public void removeViewAt(int index) {
super.removeViewAt(index);
mItemManager.onRemoveViewAt(index);
checkIfAutoReleaseDrag();
}
@Override
public void removeView(View view) {
super.removeView(view);
mItemManager.onRemoveView(view);
checkIfAutoReleaseDrag();
}
@Override
public void removeAllViews() {
super.removeAllViews();
mItemManager.onRemoveAllViews();
checkIfAutoReleaseDrag();
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
removeCallbacks(mCheckForDrag);
removeCallbacks(mCheckForRelease);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
//infoWhenDebug("onTouchEvent", event.toString());
//infoWhenDebug("onTouchEvent", "------> mDispatchToAlertWindow = " + mDispatchToAlertWindow +" ,mIsDragState = " + mIsDragState);
mCancelled = event.getAction() == MotionEvent.ACTION_CANCEL || event.getAction() == MotionEvent.ACTION_UP;
final boolean handled = mGestureDetector.onTouchEvent(event);
//解决ScrollView嵌套DragFlowLayout时,引起的事件冲突
if(getParent()!=null){
getParent().requestDisallowInterceptTouchEvent(mDispatchToAlertWindow);
}
if(mDispatchToAlertWindow){
mWindomHelper.getView().dispatchTouchEvent(event);
if(mCancelled){
mDispatchToAlertWindow = false;
}
}
return handled;
}
//=================================== end -- override method ===================================== //
//================================================================================
private class CheckForDrag implements Runnable{
@Override
public void run() {
if(mTouchChild != null){
beginDragImpl(mTouchChild);
}
}
}
private class CheckForRelease implements Runnable{
@Override
public void run() {
if(mCancelled) {
releaseDragInternal();
}
}
}
private static class Item{
int index;
View view;
@Override
public String toString() {
return "Item{" +
"index=" + index +
'}';
}
}
private static class InternalItemHelper {
final List<Item> mItems = new ArrayList<>();
/** 对应的拖拽item */
Item mDragItem = null;
public void onAddView(View child, int index, LayoutParams params) {
index = index != -1 ? index : mItems.size();
sDebugger.d("onAddView", "index = " + index );
Item item;
for(int i=0,size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.index >= index){
item.index ++;
}
}
//add
item = new Item();
item.index = index;
item.view = child;
mItems.add(item);
Collections.sort(mItems, sComparator);
//debugWhenDebug("onAddView",mItems.toString());
}
public void onRemoveViewAt(int index) {
sDebugger.d("onRemoveViewAt", "index = " + index );
Item item;
for(int i=0,size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.index > index){
item.index --;
}
}
mItems.remove(index);
Collections.sort(mItems, sComparator);
// debugWhenDebug("onAddView",mItems.toString());
}
public void onRemoveView(View view) {
Item item;
int targetIndex = INVALID_INDXE;
for(int i=0, size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.view == view){
targetIndex = item.index ;
break;
}
}
sDebugger.d("onRemoveView", "targetIndex = " + targetIndex );
if(targetIndex == -1){
throw new IllegalStateException("caused by targetIndex == -1");
}
// -- index if need
for(int i=0,size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.index > targetIndex){
item.index --;
}
}
mItems.remove(targetIndex);
Collections.sort(mItems, sComparator);
//debugWhenDebug("onAddView",mItems.toString());
}
public void onRemoveAllViews() {
mItems.clear();
}
public void findDragItem(View touchView) {
Item item;
for(int i=0 ,size = mItems.size() ;i<size ;i++){
item = mItems.get(i);
if(item.view == touchView){
mDragItem = item;
break;
}
}
}
}
private class GestureListenerImpl extends GestureDetector.SimpleOnGestureListener{
@Override
public boolean onDown(MotionEvent e) {
mTouchChild = findTopChildUnder((int) e.getX(), (int) e.getY());
sDebugger.i("mGestureDetector_onDown","----------------- > after find : mTouchChild = "
+ mTouchChild);
mReDrag = false;
if(mTouchChild!=null && !mDispatchToAlertWindow && mDragState != DRAG_STATE_IDLE){
if(mCallback.isChildDraggable(mTouchChild)) {
mReDrag = true;
checkForDrag(130, false);
}
}
return mTouchChild != null;
}
@Override
public boolean onSingleTapUp(MotionEvent e) {
sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_CLICKED);
if(mClickListener == null){
return false;
}
//处理点击时,看起来有点怪异的感觉(控件偏离了点位置)
removeCallbacks(mCheckForDrag);
boolean performed = mClickListener.performClick(DragFlowLayout.this, mTouchChild, e , mDragState);
sDebugger.i("mGestureDetector_onSingleTapUp","----------------- > performed = " + performed);
if(performed){
playSoundEffect(SoundEffectConstants.CLICK);
}else if (mReDrag) {
checkForDrag(0, true);
}
return performed;
}
@Override
public void onLongPress(MotionEvent e) {
sDebugger.i("mGestureDetector_onLongPress","----------------- >");
sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_LONG_CLICKED);
performHapticFeedback(HapticFeedbackConstants.LONG_PRESS);
if(mDragState!= DRAG_STATE_DRAGGING && mTouchChild!=null && mCallback.isChildDraggable(mTouchChild)) {
setDragState(DRAG_STATE_DRAGGING, false);
checkForDrag(0, false);
}
}
}
/**
* the drag item manager
*/
public class DragItemManager {
DragAdapter getDragAdapter() {
return mCallback.getDragAdapter();
}
/** get the item count
* @return the item count */
public int getItemCount(){
return getChildCount();
}
/**
* get all items
* @param <T> the t
* @return the items that not removed
*/
public <T> List<T> getItems(){
final DragAdapter adapter = getDragAdapter();
List<T> list = new ArrayList<>();
T t;
for (int i=0 ,size = getChildCount(); i < size; i++) {
t = (T) adapter.getData(getChildAt(i));
list.add(t);
}
return list;
}
/**
* add order items to the last.
*/
public void addItems(Object...datas){
for(int i=0,size = datas.length ;i<size ; i++){
addItem(i, datas[i]);
}
}
/**
* add order items to the last.
*/
public <T> void addItems(List<T> list){
for(int i=0,size = list.size() ;i<size ; i++){
addItem(i, list.get(i));
}
}
/**
* add items from target startIndex and data.
* @param startIndex the start index to add
* @param data the data.
*/
public void addItems(int startIndex, Object...data){
if(startIndex > getItemCount()){
throw new IllegalArgumentException();
}
for(int i=0,size = data.length ;i<size ; i++){
addItem(startIndex + i, data[i]);
}
}
/**
* add items from target startIndex and data.
* @param startIndex the start index to add
* @param data the data.
*/
public <T> void addItems(int startIndex, List<T> data){
if(startIndex > getItemCount()){
throw new IllegalArgumentException();
}
for(int i=0,size = data.size() ;i<size ; i++){
addItem(startIndex + i, data.get(i));
}
}
/**
* add a item to the DragFlowLayout
* @param index the index , can be -1 if add last.
* @param data the data
*/
public void addItem(int index, Object data) {
if (index < -1) {
throw new IllegalArgumentException("index can't < -1.");
}
final DragAdapter mAdapter = getDragAdapter();
final View view = View.inflate(getContext(), mAdapter.getItemLayoutId(), null);
mAdapter.onBindData(view, getDragState(), data);
addView(view, index);
}
/**
* remove item by index
* @param index the index , you should be careful of the drag state.
*/
public void removeItem(int index) {
removeViewAt(index);
}
/**
* remove item by data
* @param data the data
*/
public void removeItem(Object data) {
final DragAdapter adapter = getDragAdapter();
Object rawData;
int index = INVALID_INDXE;
for (int size = getChildCount(), i = size - 1; i >= 0; i--) {
rawData = adapter.getData(getChildAt(i));
if (rawData.equals(data)) {
index = i;
break;
}
}
if (index >= 0) {
removeViewAt(index);
}
}
/**
* update item by index and new data.
* @param index the index
* @param data the data
*/
public void updateItem(int index, Object data) {
final View view = getChildAt(index);
getDragAdapter().onBindData(view, getDragState(), data);
}
/**
* update item by previous data and new data.
* @param preData the previous data
* @param newData the new data
*/
public void updateItem(Object preData, Object newData) {
final DragAdapter adapter = getDragAdapter();
Object rawData;
View view = null;
for (int size = getChildCount(), i = size - 1; i >= 0; i--) {
view = getChildAt(i);
rawData = adapter.getData(view);
if (rawData.equals(preData)) {
break;
}
}
if (view != null) {
getDragAdapter().onBindData(view,getDragState(),newData);
}
}
}
}
| change doc
| Drag-FlowLayout/dragflowlayout/src/main/java/com/heaven7/android/dragflowlayout/DragFlowLayout.java | change doc |
|
Java | apache-2.0 | 1f048a606837326267558bb3ccb38efdfc2aa672 | 0 | mcqueentc/chronix.benchmark,ChronixDB/chronix.benchmark | package de.qaware.chronix.client.benchmark.benchmarkrunner.util;
import de.qaware.chronix.shared.QueryUtil.JsonTimeSeriesHandler;
import de.qaware.chronix.database.TimeSeriesMetaData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
/**
* Created by mcqueen666 on 13.09.16.
*/
public class TimeSeriesCounter {
private static TimeSeriesCounter instance;
private final Logger logger = LoggerFactory.getLogger(TimeSeriesCounter.class);
private JsonTimeSeriesHandler jsonTimeSeriesHandler;
private TimeSeriesCounter(){
jsonTimeSeriesHandler = JsonTimeSeriesHandler.getInstance();
}
public static TimeSeriesCounter getInstance(){
if(instance == null){
instance = new TimeSeriesCounter();
}
return instance;
}
/**
* Returns meta data for all previously imported time series.
*
* @return list of all imported meta data. Empty if nothing was imported previously.
*/
public List<TimeSeriesMetaData> getAll(){
List<TimeSeriesMetaData> metaDataList = new ArrayList<>();
File directory = new File(jsonTimeSeriesHandler.getTimeSeriesMetaDataRecordDirectoryPath());
if(directory.exists() && directory.isDirectory()){
File[] measurements = directory.listFiles();
if(measurements != null && measurements.length > 0) {
for (File measurement : measurements) {
if (measurement.isFile() && measurement.getName().endsWith(".json")) {
metaDataList.addAll(jsonTimeSeriesHandler.readTimeSeriesMetaDatafromJson(measurement.getName().replace(".json","")));
}
}
} else {
logger.error("TimeSeriesCounter: No meta data to read.");
}
}
return metaDataList;
}
/**
* Returns meta data for a random time series size times in a list. (for cache testing)
*
* @param size number of how many elements the list should have.
* @return list if meta data.
*/
public List<TimeSeriesMetaData> getCachingTestMetaData(int size){
List<TimeSeriesMetaData> allMetaData = this.getAll();
return this.getCachingTestMetaData(allMetaData, size);
}
/**
* Returns meta data for a random time series size times in a list from given meta data. (for cache testing)
*
* @param timeSeriesMetaDataList meta data from which a random time series should be chosen.
* @param size number of how many elements the list should have.
* @return list if meta data.
*/
public List<TimeSeriesMetaData> getCachingTestMetaData(List<TimeSeriesMetaData> timeSeriesMetaDataList, int size){
if(size < 0) size *= -1;
List<TimeSeriesMetaData> cachingTestMetaData = new ArrayList<>(size);
if(!timeSeriesMetaDataList.isEmpty()){
Random random = new Random();
TimeSeriesMetaData metaData = timeSeriesMetaDataList.get(random.nextInt(timeSeriesMetaDataList.size()));
for(int i = 0; i < size; i++){
cachingTestMetaData.add(metaData);
}
}
return cachingTestMetaData;
}
/**
* Returns meta data for random time series of all previously imported time series.
*
* @param size number of how many elements the list should have.
* @return list of meta data.
*/
public List<TimeSeriesMetaData> getRandomTimeSeriesMetaData(int size){
List<TimeSeriesMetaData> allMetaData = this.getAll();
return this.getRandomTimeSeriesMetaData(allMetaData, size);
}
/**
* Returns meta data for random time series from given meta data.
*
* @param timeSeriesMetaDataList meta data from which the random time series should be chosen.
* @param size number of how many elements the list should have.
* @return list of meta data.
*/
public List<TimeSeriesMetaData> getRandomTimeSeriesMetaData(List<TimeSeriesMetaData> timeSeriesMetaDataList, int size){
if(size < 0) size *= -1;
List<TimeSeriesMetaData> metaDataList = new ArrayList<>(size);
if(!timeSeriesMetaDataList.isEmpty()){
Random random = new Random();
for(int i = 0; i < size; i++){
TimeSeriesMetaData randomMetaData = timeSeriesMetaDataList.get(random.nextInt(timeSeriesMetaDataList.size()));
if(randomMetaData.getStart() > randomMetaData.getEnd()) {
logger.info("Start: {}, End: {}", randomMetaData.getStart(), randomMetaData.getEnd());
long randomStart = random.longs(randomMetaData.getStart(), randomMetaData.getEnd()).iterator().next();
long randomEnd = random.longs(randomStart, randomMetaData.getEnd() + 1L).iterator().next();
randomMetaData.setStart(randomStart);
randomMetaData.setEnd(randomEnd);
metaDataList.add(randomMetaData);
}
else {
metaDataList.add(randomMetaData);
}
}
}
return metaDataList;
}
}
| client/src/main/java/de/qaware/chronix/client/benchmark/benchmarkrunner/util/TimeSeriesCounter.java | package de.qaware.chronix.client.benchmark.benchmarkrunner.util;
import de.qaware.chronix.shared.QueryUtil.JsonTimeSeriesHandler;
import de.qaware.chronix.database.TimeSeriesMetaData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
/**
* Created by mcqueen666 on 13.09.16.
*/
public class TimeSeriesCounter {
private static TimeSeriesCounter instance;
private final Logger logger = LoggerFactory.getLogger(TimeSeriesCounter.class);
private JsonTimeSeriesHandler jsonTimeSeriesHandler;
private TimeSeriesCounter(){
jsonTimeSeriesHandler = JsonTimeSeriesHandler.getInstance();
}
public static TimeSeriesCounter getInstance(){
if(instance == null){
instance = new TimeSeriesCounter();
}
return instance;
}
/**
* Returns meta data for all previously imported time series.
*
* @return list of all imported meta data. Empty if nothing was imported previously.
*/
public List<TimeSeriesMetaData> getAll(){
List<TimeSeriesMetaData> metaDataList = new ArrayList<>();
File directory = new File(jsonTimeSeriesHandler.getTimeSeriesMetaDataRecordDirectoryPath());
if(directory.exists() && directory.isDirectory()){
File[] measurements = directory.listFiles();
if(measurements != null && measurements.length > 0) {
for (File measurement : measurements) {
if (measurement.isFile() && measurement.getName().endsWith(".json")) {
metaDataList.addAll(jsonTimeSeriesHandler.readTimeSeriesMetaDatafromJson(measurement.getName().replace(".json","")));
}
}
} else {
logger.error("TimeSeriesCounter: No meta data to read.");
}
}
return metaDataList;
}
/**
* Returns meta data for a random time series size times in a list. (for cache testing)
*
* @param size number of how many elements the list should have.
* @return list if meta data.
*/
public List<TimeSeriesMetaData> getCachingTestMetaData(int size){
List<TimeSeriesMetaData> allMetaData = this.getAll();
return this.getCachingTestMetaData(allMetaData, size);
}
/**
* Returns meta data for a random time series size times in a list from given meta data. (for cache testing)
*
* @param timeSeriesMetaDataList meta data from which a random time series should be chosen.
* @param size number of how many elements the list should have.
* @return list if meta data.
*/
public List<TimeSeriesMetaData> getCachingTestMetaData(List<TimeSeriesMetaData> timeSeriesMetaDataList, int size){
if(size < 0) size *= -1;
List<TimeSeriesMetaData> cachingTestMetaData = new ArrayList<>(size);
if(!timeSeriesMetaDataList.isEmpty()){
Random random = new Random();
TimeSeriesMetaData metaData = timeSeriesMetaDataList.get(random.nextInt(timeSeriesMetaDataList.size()));
for(int i = 0; i < size; i++){
cachingTestMetaData.add(metaData);
}
}
return cachingTestMetaData;
}
/**
* Returns meta data for random time series of all previously imported time series.
*
* @param size number of how many elements the list should have.
* @return list of meta data.
*/
public List<TimeSeriesMetaData> getRandomTimeSeriesMetaData(int size){
List<TimeSeriesMetaData> allMetaData = this.getAll();
return this.getRandomTimeSeriesMetaData(allMetaData, size);
}
/**
* Returns meta data for random time series from given meta data.
*
* @param timeSeriesMetaDataList meta data from which the random time series should be chosen.
* @param size number of how many elements the list should have.
* @return list of meta data.
*/
public List<TimeSeriesMetaData> getRandomTimeSeriesMetaData(List<TimeSeriesMetaData> timeSeriesMetaDataList, int size){
if(size < 0) size *= -1;
List<TimeSeriesMetaData> metaDataList = new ArrayList<>(size);
if(!timeSeriesMetaDataList.isEmpty()){
Random random = new Random();
for(int i = 0; i < size; i++){
TimeSeriesMetaData randomMetaData = timeSeriesMetaDataList.get(random.nextInt(timeSeriesMetaDataList.size()));
// get at least a time span of 2000ms
long randomStart = random.longs(randomMetaData.getStart(), randomMetaData.getEnd() - 2000L).iterator().next();
long randomEnd = random.longs(randomStart, randomMetaData.getEnd() + 1L).iterator().next();
randomMetaData.setStart(randomStart);
randomMetaData.setEnd(randomEnd);
metaDataList.add(randomMetaData);
}
}
return metaDataList;
}
}
| some time series have only one point and in that case, random start / end times are not needed (and do actually cause exception because origin and bound must not be equal)
| client/src/main/java/de/qaware/chronix/client/benchmark/benchmarkrunner/util/TimeSeriesCounter.java | some time series have only one point and in that case, random start / end times are not needed (and do actually cause exception because origin and bound must not be equal) |
|
Java | bsd-2-clause | 1938a1ec44a4db188e4413d2b53969abc83c4c75 | 0 | scifio/scifio | //
// LeicaHandler.java
//
/*
OME Bio-Formats package for reading and converting biological file formats.
Copyright (C) 2005-@year@ UW-Madison LOCI and Glencoe Software, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package loci.formats.in;
import java.util.Arrays;
import java.util.Hashtable;
import java.util.StringTokenizer;
import java.util.Vector;
import loci.common.DateTools;
import loci.formats.CoreMetadata;
import loci.formats.FormatTools;
import loci.formats.MetadataTools;
import loci.formats.meta.MetadataStore;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.DefaultHandler;
/**
* SAX handler for parsing XML in Leica LIF and Leica TCS files.
*
* <dl><dt><b>Source code:</b></dt>
* <dd><a href="https://skyking.microscopy.wisc.edu/trac/java/browser/trunk/components/bio-formats/src/loci/formats/in/LeicaHandler.java">Trac</a>,
* <a href="https://skyking.microscopy.wisc.edu/svn/java/trunk/components/bio-formats/src/loci/formats/in/LeicaHandler.java">SVN</a></dd></dl>
*
* @author Melissa Linkert linkert at wisc.edu
*/
public class LeicaHandler extends DefaultHandler {
// -- Fields --
private String elementName, collection;
private int count = 0, numChannels, extras;
private Vector<String> lutNames;
private Vector<Float> xPos, yPos, zPos;
private int numDatasets = -1;
private Hashtable globalMetadata;
private MetadataStore store;
private int nextLaser, channel, nextDetector = -1;
private Float zoom, pinhole, readOutRate;
private Vector<Integer> detectorIndices;
private String filterWheelName;
private int nextFilter = 0, filterIndex;
private Vector<CoreMetadata> core;
private boolean canParse = true;
private long firstStamp = 0;
private Hashtable<Integer, String> bytesPerAxis;
// -- Constructor --
public LeicaHandler(MetadataStore store) {
super();
globalMetadata = new Hashtable();
lutNames = new Vector<String>();
this.store = store;
core = new Vector<CoreMetadata>();
detectorIndices = new Vector<Integer>();
xPos = new Vector<Float>();
yPos = new Vector<Float>();
zPos = new Vector<Float>();
bytesPerAxis = new Hashtable<Integer, String>();
}
// -- LeicaHandler API methods --
public Vector<CoreMetadata> getCoreMetadata() { return core; }
public Hashtable getGlobalMetadata() { return globalMetadata; }
public Vector<String> getLutNames() { return lutNames; }
// -- DefaultHandler API methods --
public void endElement(String uri, String localName, String qName) {
if (qName.equals("ImageDescription")) {
CoreMetadata coreMeta = core.get(numDatasets);
if (numChannels == 0) numChannels = 1;
coreMeta.sizeC = numChannels;
if (extras > 1) {
if (coreMeta.sizeZ == 1) coreMeta.sizeZ = extras;
else coreMeta.sizeT *= extras;
}
if (coreMeta.sizeX == 0 && coreMeta.sizeY == 0) {
numDatasets--;
}
else {
if (coreMeta.sizeX == 0) coreMeta.sizeX = 1;
if (coreMeta.sizeZ == 0) coreMeta.sizeZ = 1;
if (coreMeta.sizeT == 0) coreMeta.sizeT = 1;
coreMeta.orderCertain = true;
coreMeta.metadataComplete = true;
coreMeta.littleEndian = true;
coreMeta.interleaved = coreMeta.rgb;
coreMeta.imageCount = coreMeta.sizeZ * coreMeta.sizeT;
if (!coreMeta.rgb) coreMeta.imageCount *= coreMeta.sizeC;
coreMeta.indexed = !coreMeta.rgb;
coreMeta.falseColor = true;
Integer[] bytes = bytesPerAxis.keySet().toArray(new Integer[0]);
Arrays.sort(bytes);
coreMeta.dimensionOrder = "XY";
for (Integer nBytes : bytes) {
String axis = bytesPerAxis.get(nBytes);
if (coreMeta.dimensionOrder.indexOf(axis) == -1) {
coreMeta.dimensionOrder += axis;
}
}
String[] axes = new String[] {"Z", "C", "T"};
for (String axis : axes) {
if (coreMeta.dimensionOrder.indexOf(axis) == -1) {
coreMeta.dimensionOrder += axis;
}
}
core.setElementAt(coreMeta, numDatasets);
}
int nChannels = coreMeta.rgb ? 0 : numChannels;
if (readOutRate != null) {
for (int c=0; c<nChannels; c++) {
if (c < detectorIndices.size()) {
int index = detectorIndices.get(c).intValue();
if (index < nChannels && index <= nextDetector) {
store.setDetectorSettingsReadOutRate(readOutRate, numDatasets, c);
String id =
MetadataTools.createLSID("Detector", numDatasets, index);
store.setDetectorSettingsDetector(id, numDatasets, c);
}
}
}
}
for (int c=0; c<nChannels; c++) {
store.setLogicalChannelPinholeSize(pinhole, numDatasets, c);
}
for (int i=0; i<xPos.size(); i++) {
int nPlanes = coreMeta.imageCount / (coreMeta.rgb ? 1 : coreMeta.sizeC);
for (int image=0; image<nPlanes; image++) {
int offset = image * nChannels + i;
store.setStagePositionPositionX(xPos.get(i), numDatasets, 0, offset);
store.setStagePositionPositionY(yPos.get(i), numDatasets, 0, offset);
store.setStagePositionPositionZ(zPos.get(i), numDatasets, 0, offset);
}
}
for (int c=0; c<nChannels; c++) {
int index = c < detectorIndices.size() ?
detectorIndices.get(c).intValue() : detectorIndices.size() - 1;
if (index < 0 || index >= nChannels || index > nextDetector) break;
String id = MetadataTools.createLSID("Detector", numDatasets, index);
store.setDetectorSettingsDetector(id, numDatasets, c);
}
xPos.clear();
yPos.clear();
zPos.clear();
detectorIndices.clear();
}
else if (qName.equals("Element")) {
nextLaser = 0;
nextFilter = 0;
nextDetector = -1;
int nChannels = core.get(numDatasets).rgb ? 1 : numChannels;
for (int c=0; c<detectorIndices.size(); c++) {
int index = detectorIndices.get(c).intValue();
if (c >= nChannels || index >= nChannels || index > nextDetector) break;
String id = MetadataTools.createLSID("Detector", numDatasets, index);
store.setDetectorSettingsDetector(id, numDatasets, index);
}
for (int c=0; c<nChannels; c++) {
store.setLogicalChannelPinholeSize(pinhole, numDatasets, c);
}
}
else if (qName.equals("LDM_Block_Sequential_Master")) {
canParse = true;
}
}
public void startElement(String uri, String localName, String qName,
Attributes attributes)
{
Hashtable h = getSeriesHashtable(numDatasets);
if (qName.equals("LDM_Block_Sequential_Master")) {
canParse = false;
}
if (!canParse) return;
if (qName.equals("Element")) {
elementName = attributes.getValue("Name");
}
else if (qName.equals("Collection")) {
collection = elementName;
}
else if (qName.equals("Image")) {
core.add(new CoreMetadata());
numDatasets++;
String name = elementName;
if (collection != null) name = collection + "/" + name;
store.setImageName(name, numDatasets);
String instrumentID = MetadataTools.createLSID("Instrument", numDatasets);
store.setInstrumentID(instrumentID, numDatasets);
store.setImageInstrumentRef(instrumentID, numDatasets);
channel = 0;
numChannels = 0;
extras = 1;
}
else if (qName.equals("Attachment")) {
if (attributes.getValue("Name").equals("ContextDescription")) {
store.setImageDescription(attributes.getValue("Content"), numDatasets);
}
}
else if (qName.equals("ChannelDescription")) {
count++;
numChannels++;
lutNames.add(attributes.getValue("LUTName"));
int bytes = Integer.parseInt(attributes.getValue("BytesInc"));
if (bytes > 0) {
bytesPerAxis.put(new Integer(bytes), "C");
}
}
else if (qName.equals("DimensionDescription")) {
int len = Integer.parseInt(attributes.getValue("NumberOfElements"));
int id = Integer.parseInt(attributes.getValue("DimID"));
float physicalLen = Float.parseFloat(attributes.getValue("Length"));
String unit = attributes.getValue("Unit");
int nBytes = Integer.parseInt(attributes.getValue("BytesInc"));
physicalLen /= len;
if (unit.equals("Ks")) {
physicalLen /= 1000;
}
else if (unit.equals("m")) {
physicalLen *= 1000000;
}
Float physicalSize = new Float(physicalLen);
CoreMetadata coreMeta = core.get(core.size() - 1);
switch (id) {
case 1: // X axis
coreMeta.sizeX = len;
coreMeta.rgb = (nBytes % 3) == 0;
if (coreMeta.rgb) nBytes /= 3;
switch (nBytes) {
case 1:
coreMeta.pixelType = FormatTools.UINT8;
break;
case 2:
coreMeta.pixelType = FormatTools.UINT16;
break;
case 4:
coreMeta.pixelType = FormatTools.FLOAT;
break;
}
store.setDimensionsPhysicalSizeX(physicalSize, numDatasets, 0);
break;
case 2: // Y axis
if (coreMeta.sizeY != 0) {
if (coreMeta.sizeZ == 1) {
coreMeta.sizeZ = len;
}
else if (coreMeta.sizeT == 1) {
coreMeta.sizeT = len;
}
}
else {
coreMeta.sizeY = len;
store.setDimensionsPhysicalSizeY(physicalSize, numDatasets, 0);
}
break;
case 3: // Z axis
if (coreMeta.sizeY == 0) {
// XZ scan - swap Y and Z
coreMeta.sizeY = len;
coreMeta.sizeZ = 1;
store.setDimensionsPhysicalSizeY(physicalSize, numDatasets, 0);
}
else {
coreMeta.sizeZ = len;
}
bytesPerAxis.put(new Integer(nBytes), "Z");
break;
case 4: // T axis
if (coreMeta.sizeY == 0) {
// XT scan - swap Y and T
coreMeta.sizeY = len;
coreMeta.sizeT = 1;
store.setDimensionsPhysicalSizeY(physicalSize, numDatasets, 0);
}
else {
coreMeta.sizeT = len;
}
bytesPerAxis.put(new Integer(nBytes), "T");
break;
default:
extras *= len;
}
count++;
}
else if (qName.equals("ScannerSettingRecord")) {
String id = attributes.getValue("Identifier");
String value = attributes.getValue("Variant");
if (id.equals("SystemType")) {
store.setMicroscopeModel(value, numDatasets);
store.setMicroscopeType("Unknown", numDatasets);
}
else if (id.equals("dblPinhole")) {
pinhole = new Float(Float.parseFloat(value) * 1000000);
}
else if (id.equals("dblZoom")) {
zoom = new Float(value);
}
else if (id.equals("dblStepSize")) {
float zStep = (float) (Double.parseDouble(value) * 1000000);
store.setDimensionsPhysicalSizeZ(new Float(zStep), numDatasets, 0);
}
else if (id.equals("nDelayTime_s")) {
store.setDimensionsTimeIncrement(new Float(value), numDatasets, 0);
}
else if (id.equals("CameraName")) {
store.setDetectorModel(value, numDatasets, 0);
}
else if (id.indexOf("WFC") == 1) {
int c = 0;
try {
c = Integer.parseInt(id.replaceAll("\\D", ""));
}
catch (NumberFormatException e) { }
if (id.endsWith("ExposureTime")) {
store.setPlaneTimingExposureTime(new Float(value), numDatasets, 0, c);
}
else if (id.endsWith("Gain")) {
store.setDetectorSettingsGain(new Float(value), numDatasets, c);
String detectorID =
MetadataTools.createLSID("Detector", numDatasets, 0);
store.setDetectorSettingsDetector(detectorID, numDatasets, c);
store.setDetectorID(detectorID, numDatasets, 0);
}
else if (id.endsWith("WaveLength")) {
store.setLogicalChannelExWave(new Integer(value), numDatasets, c);
}
else if (id.endsWith("UesrDefName")) {
store.setLogicalChannelName(value, numDatasets, c);
}
}
}
else if (qName.equals("FilterSettingRecord")) {
String object = attributes.getValue("ObjectName");
String attribute = attributes.getValue("Attribute");
String objectClass = attributes.getValue("ClassName");
String variant = attributes.getValue("Variant");
CoreMetadata coreMeta = core.get(numDatasets);
if (attribute.equals("NumericalAperture")) {
store.setObjectiveLensNA(new Float(variant), numDatasets, 0);
}
else if (attribute.equals("OrderNumber")) {
store.setObjectiveSerialNumber(variant, numDatasets, 0);
}
else if (objectClass.equals("CLaser")) {
if (attribute.equals("Wavelength")) {
String id =
MetadataTools.createLSID("LightSource", numDatasets, nextLaser);
store.setLightSourceID(id, numDatasets, nextLaser);
store.setLaserWavelength(
new Integer(variant), numDatasets, nextLaser);
String model =
object.substring(object.indexOf("(") + 1, object.indexOf(")"));
store.setLightSourceModel(model, numDatasets, nextLaser);
nextLaser++;
}
else if (attribute.equals("Output Power")) {
store.setLightSourcePower(
new Float(variant), numDatasets, nextLaser - 1);
}
}
else if (objectClass.equals("CDetectionUnit")) {
if (attribute.equals("State")) {
nextDetector++;
String id =
MetadataTools.createLSID("Detector", numDatasets, nextDetector);
store.setDetectorID(id, numDatasets, nextDetector);
store.setDetectorModel(object, numDatasets, nextDetector);
store.setDetectorType("Unknown", numDatasets, nextDetector);
store.setDetectorZoom(zoom, numDatasets, nextDetector);
}
else if (attribute.equals("HighVoltage")) {
store.setDetectorVoltage(
new Float(variant), numDatasets, nextDetector);
}
else if (attribute.equals("VideoOffset")) {
store.setDetectorOffset(
new Float(variant), numDatasets, nextDetector);
}
}
else if (attribute.equals("Objective")) {
StringTokenizer tokens = new StringTokenizer(variant, " ");
boolean foundMag = false;
StringBuffer model = new StringBuffer();
while (!foundMag) {
String token = tokens.nextToken();
int x = token.indexOf("x");
if (x != -1) {
foundMag = true;
int mag = (int) Float.parseFloat(token.substring(0, x));
String na = token.substring(x + 1);
store.setObjectiveNominalMagnification(
new Integer(mag), numDatasets, 0);
store.setObjectiveLensNA(new Float(na), numDatasets, 0);
}
else {
model.append(token);
model.append(" ");
}
}
if (tokens.hasMoreTokens()) {
String immersion = tokens.nextToken();
if (immersion == null || immersion.trim().equals("")) {
immersion = "Unknown";
}
store.setObjectiveImmersion(immersion, numDatasets, 0);
}
if (tokens.hasMoreTokens()) {
String correction = tokens.nextToken();
if (correction == null || correction.trim().equals("")) {
correction = "Unknown";
}
store.setObjectiveCorrection(correction, numDatasets, 0);
}
store.setObjectiveModel(model.toString().trim(), numDatasets, 0);
}
else if (attribute.equals("RefractionIndex")) {
String id = MetadataTools.createLSID("Objective", numDatasets, 0);
store.setObjectiveID(id, numDatasets, 0);
store.setObjectiveSettingsObjective(id, numDatasets);
store.setObjectiveSettingsRefractiveIndex(new Float(variant),
numDatasets);
}
else if (attribute.equals("XPos")) {
int c = coreMeta.rgb || coreMeta.sizeC == 0 ? 1 : coreMeta.sizeC;
int nPlanes = coreMeta.imageCount / c;
Float posX = new Float(variant);
for (int image=0; image<nPlanes; image++) {
int index = image * (coreMeta.rgb ? 1 : coreMeta.sizeC) + channel;
if (index >= nPlanes) continue;
store.setStagePositionPositionX(posX, numDatasets, 0, index);
}
if (numChannels == 0) xPos.add(posX);
}
else if (attribute.equals("YPos")) {
int c = coreMeta.rgb || coreMeta.sizeC == 0 ? 1 : coreMeta.sizeC;
int nPlanes = coreMeta.imageCount / c;
Float posY = new Float(variant);
for (int image=0; image<nPlanes; image++) {
int index = image * (coreMeta.rgb ? 1 : coreMeta.sizeC) + channel;
if (index >= nPlanes) continue;
store.setStagePositionPositionY(posY, numDatasets, 0, index);
}
if (numChannels == 0) yPos.add(posY);
}
else if (attribute.equals("ZPos")) {
int c = coreMeta.rgb || coreMeta.sizeC == 0 ? 1 : coreMeta.sizeC;
int nPlanes = coreMeta.imageCount / c;
Float posZ = new Float(variant);
for (int image=0; image<nPlanes; image++) {
int index = image * (coreMeta.rgb ? 1 : coreMeta.sizeC) + channel;
if (index >= nPlanes) continue;
store.setStagePositionPositionZ(posZ, numDatasets, 0, index);
}
if (numChannels == 0) zPos.add(posZ);
}
else if (attribute.equals("Speed")) {
readOutRate = new Float(Float.parseFloat(variant) / 1000000);
}
}
else if (qName.equals("MultiBand")) {
if (channel >= core.get(numDatasets).sizeC) return;
String em = attributes.getValue("LeftWorld");
String ex = attributes.getValue("RightWorld");
Integer emWave = new Integer((int) Float.parseFloat(em));
Integer exWave = new Integer((int) Float.parseFloat(ex));
String name = attributes.getValue("DyeName");
store.setLogicalChannelEmWave(emWave, numDatasets, channel);
store.setLogicalChannelExWave(exWave, numDatasets, channel);
store.setLogicalChannelName(name, numDatasets, channel);
channel++;
}
else if (qName.equals("Detector")) {
Float gain = new Float(attributes.getValue("Gain"));
Float offset = new Float(attributes.getValue("Offset"));
int index = Integer.parseInt(attributes.getValue("Channel")) - 1;
int c = channel - 1;
if (c >= 0) {
store.setDetectorSettingsGain(gain, numDatasets, c);
store.setDetectorSettingsOffset(offset, numDatasets, c);
store.setDetectorSettingsReadOutRate(readOutRate, numDatasets, c);
int detectorIndex = nextDetector < 0 ? 0 : nextDetector;
String detectorID =
MetadataTools.createLSID("Detector", numDatasets, detectorIndex);
store.setDetectorSettingsDetector(detectorID, numDatasets, c);
detectorIndices.add(new Integer(index));
}
}
else if (qName.equals("LaserLineSetting")) {
String wavelength = attributes.getValue("LaserLine");
int index = Integer.parseInt(attributes.getValue("LineIndex"));
String id = MetadataTools.createLSID("LightSource", numDatasets, index);
store.setLightSourceID(id, numDatasets, index);
store.setLaserWavelength(new Integer(wavelength), numDatasets, index);
store.setLaserType("Unknown", numDatasets, index);
store.setLaserLaserMedium("Unknown", numDatasets, index);
float intensity = Float.parseFloat(attributes.getValue("IntensityDev"));
if (intensity > 0f && channel > 0) {
store.setLightSourceSettingsLightSource(id, numDatasets, channel - 1);
store.setLightSourceSettingsAttenuation(
new Float(intensity / 100f), numDatasets, channel - 1);
}
}
else if (qName.equals("TimeStamp")) {
long high = Long.parseLong(attributes.getValue("HighInteger"));
long low = Long.parseLong(attributes.getValue("LowInteger"));
high <<= 32;
if ((int) low < 0) {
low &= 0xffffffffL;
}
long ms = (high + low) / 10000;
if (count == 0) {
String date = DateTools.convertDate(ms, DateTools.COBOL);
if (DateTools.getTime(date, DateTools.ISO8601_FORMAT) <
System.currentTimeMillis())
{
store.setImageCreationDate(date, numDatasets);
}
firstStamp = ms;
store.setPlaneTimingDeltaT(new Float(0), numDatasets, 0, count);
}
else {
CoreMetadata coreMeta = core.get(numDatasets);
int nImages = coreMeta.sizeZ * coreMeta.sizeT * coreMeta.sizeC;
if (count < nImages) {
ms -= firstStamp;
store.setPlaneTimingDeltaT(
new Float(ms / 1000), numDatasets, 0, count);
}
}
count++;
}
else if (qName.equals("RelTimeStamp")) {
CoreMetadata coreMeta = core.get(numDatasets);
int nImages = coreMeta.sizeZ * coreMeta.sizeT * coreMeta.sizeC;
if (count < nImages) {
Float time = new Float(attributes.getValue("Time"));
store.setPlaneTimingDeltaT(time, numDatasets, 0, count++);
}
}
else if (qName.equals("Wheel")) {
filterIndex = Integer.parseInt(attributes.getValue("FilterIndex"));
}
else if (qName.equals("WheelName")) {
String id = MetadataTools.createLSID("Dichroic", numDatasets, nextFilter);
store.setDichroicID(id, numDatasets, nextFilter);
store.setDichroicModel(
attributes.getValue("FilterName").trim(), numDatasets, nextFilter);
if (nextFilter == filterIndex) {
String filterSet =
MetadataTools.createLSID("FilterSet", numDatasets, channel);
store.setFilterSetID(filterSet, numDatasets, channel);
store.setFilterSetDichroic(id, numDatasets, channel);
if (channel < core.get(numDatasets).sizeC) {
store.setLogicalChannelFilterSet(filterSet, numDatasets, channel);
}
}
nextFilter++;
}
else count = 0;
storeSeriesHashtable(numDatasets, h);
}
// -- Helper methods --
private Hashtable getSeriesHashtable(int series) {
if (series < 0 || series >= core.size()) return new Hashtable();
return core.get(series).seriesMetadata;
}
private void storeSeriesHashtable(int series, Hashtable h) {
if (series < 0) return;
CoreMetadata coreMeta = core.get(series);
coreMeta.seriesMetadata = h;
core.setElementAt(coreMeta, series);
}
}
| components/bio-formats/src/loci/formats/in/LeicaHandler.java | //
// LeicaHandler.java
//
/*
OME Bio-Formats package for reading and converting biological file formats.
Copyright (C) 2005-@year@ UW-Madison LOCI and Glencoe Software, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package loci.formats.in;
import java.util.Arrays;
import java.util.Hashtable;
import java.util.StringTokenizer;
import java.util.Vector;
import loci.common.DateTools;
import loci.formats.CoreMetadata;
import loci.formats.FormatTools;
import loci.formats.MetadataTools;
import loci.formats.meta.MetadataStore;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.DefaultHandler;
/**
* SAX handler for parsing XML in Leica LIF and Leica TCS files.
*
* <dl><dt><b>Source code:</b></dt>
* <dd><a href="https://skyking.microscopy.wisc.edu/trac/java/browser/trunk/components/bio-formats/src/loci/formats/in/LeicaHandler.java">Trac</a>,
* <a href="https://skyking.microscopy.wisc.edu/svn/java/trunk/components/bio-formats/src/loci/formats/in/LeicaHandler.java">SVN</a></dd></dl>
*
* @author Melissa Linkert linkert at wisc.edu
*/
public class LeicaHandler extends DefaultHandler {
// -- Fields --
private String elementName, collection;
private int count = 0, numChannels, extras;
private Vector<String> lutNames;
private Vector<Float> xPos, yPos, zPos;
private int numDatasets = -1;
private Hashtable globalMetadata;
private MetadataStore store;
private int nextLaser, channel, nextDetector = -1;
private Float zoom, pinhole, readOutRate;
private Vector<Integer> detectorIndices;
private String filterWheelName;
private int nextFilter = 0, filterIndex;
private Vector<CoreMetadata> core;
private boolean canParse = true;
private long firstStamp = 0;
private Hashtable<Integer, String> bytesPerAxis;
// -- Constructor --
public LeicaHandler(MetadataStore store) {
super();
globalMetadata = new Hashtable();
lutNames = new Vector<String>();
this.store = store;
core = new Vector<CoreMetadata>();
detectorIndices = new Vector<Integer>();
xPos = new Vector<Float>();
yPos = new Vector<Float>();
zPos = new Vector<Float>();
bytesPerAxis = new Hashtable<Integer, String>();
}
// -- LeicaHandler API methods --
public Vector<CoreMetadata> getCoreMetadata() { return core; }
public Hashtable getGlobalMetadata() { return globalMetadata; }
public Vector<String> getLutNames() { return lutNames; }
// -- DefaultHandler API methods --
public void endElement(String uri, String localName, String qName) {
if (qName.equals("ImageDescription")) {
CoreMetadata coreMeta = core.get(numDatasets);
if (numChannels == 0) numChannels = 1;
coreMeta.sizeC = numChannels;
if (extras > 1) {
if (coreMeta.sizeZ == 1) coreMeta.sizeZ = extras;
else coreMeta.sizeT *= extras;
}
if (coreMeta.sizeX == 0 && coreMeta.sizeY == 0) {
numDatasets--;
}
else {
if (coreMeta.sizeX == 0) coreMeta.sizeX = 1;
if (coreMeta.sizeZ == 0) coreMeta.sizeZ = 1;
if (coreMeta.sizeT == 0) coreMeta.sizeT = 1;
coreMeta.orderCertain = true;
coreMeta.metadataComplete = true;
coreMeta.littleEndian = true;
coreMeta.interleaved = coreMeta.rgb;
coreMeta.imageCount = coreMeta.sizeZ * coreMeta.sizeT;
if (!coreMeta.rgb) coreMeta.imageCount *= coreMeta.sizeC;
coreMeta.indexed = !coreMeta.rgb;
coreMeta.falseColor = true;
Integer[] bytes = bytesPerAxis.keySet().toArray(new Integer[0]);
Arrays.sort(bytes);
coreMeta.dimensionOrder = "XY";
for (Integer nBytes : bytes) {
String axis = bytesPerAxis.get(nBytes);
if (coreMeta.dimensionOrder.indexOf(axis) == -1) {
coreMeta.dimensionOrder += axis;
}
}
String[] axes = new String[] {"Z", "C", "T"};
for (String axis : axes) {
if (coreMeta.dimensionOrder.indexOf(axis) == -1) {
coreMeta.dimensionOrder += axis;
}
}
core.setElementAt(coreMeta, numDatasets);
}
int nChannels = coreMeta.rgb ? 0 : numChannels;
if (readOutRate != null) {
for (int c=0; c<nChannels; c++) {
if (c < detectorIndices.size()) {
int index = detectorIndices.get(c).intValue();
if (index < nChannels && index <= nextDetector) {
store.setDetectorSettingsReadOutRate(readOutRate, numDatasets, c);
String id =
MetadataTools.createLSID("Detector", numDatasets, index);
store.setDetectorSettingsDetector(id, numDatasets, c);
}
}
}
}
for (int c=0; c<nChannels; c++) {
store.setLogicalChannelPinholeSize(pinhole, numDatasets, c);
}
for (int i=0; i<xPos.size(); i++) {
int nPlanes = coreMeta.imageCount / (coreMeta.rgb ? 1 : coreMeta.sizeC);
for (int image=0; image<nPlanes; image++) {
int offset = image * nChannels + i;
store.setStagePositionPositionX(xPos.get(i), numDatasets, 0, offset);
store.setStagePositionPositionY(yPos.get(i), numDatasets, 0, offset);
store.setStagePositionPositionZ(zPos.get(i), numDatasets, 0, offset);
}
}
for (int c=0; c<nChannels; c++) {
int index = c < detectorIndices.size() ?
detectorIndices.get(c).intValue() : detectorIndices.size() - 1;
if (index < 0 || index >= nChannels || index > nextDetector) break;
String id = MetadataTools.createLSID("Detector", numDatasets, index);
store.setDetectorSettingsDetector(id, numDatasets, c);
}
xPos.clear();
yPos.clear();
zPos.clear();
detectorIndices.clear();
}
else if (qName.equals("Element")) {
nextLaser = 0;
nextFilter = 0;
nextDetector = -1;
int nChannels = core.get(numDatasets).rgb ? 1 : numChannels;
for (int c=0; c<detectorIndices.size(); c++) {
int index = detectorIndices.get(c).intValue();
if (c >= nChannels || index >= nChannels || index > nextDetector) break;
String id = MetadataTools.createLSID("Detector", numDatasets, index);
store.setDetectorSettingsDetector(id, numDatasets, index);
}
for (int c=0; c<nChannels; c++) {
store.setLogicalChannelPinholeSize(pinhole, numDatasets, c);
}
}
else if (qName.equals("LDM_Block_Sequential_Master")) {
canParse = true;
}
}
public void startElement(String uri, String localName, String qName,
Attributes attributes)
{
Hashtable h = getSeriesHashtable(numDatasets);
if (qName.equals("LDM_Block_Sequential_Master")) {
canParse = false;
}
if (!canParse) return;
if (qName.equals("Element")) {
elementName = attributes.getValue("Name");
}
else if (qName.equals("Collection")) {
collection = elementName;
}
else if (qName.equals("Image")) {
core.add(new CoreMetadata());
numDatasets++;
String name = elementName;
if (collection != null) name = collection + "/" + name;
store.setImageName(name, numDatasets);
String instrumentID = MetadataTools.createLSID("Instrument", numDatasets);
store.setInstrumentID(instrumentID, numDatasets);
store.setImageInstrumentRef(instrumentID, numDatasets);
channel = 0;
numChannels = 0;
extras = 1;
}
else if (qName.equals("Attachment")) {
if (attributes.getValue("Name").equals("ContextDescription")) {
store.setImageDescription(attributes.getValue("Content"), numDatasets);
}
}
else if (qName.equals("ChannelDescription")) {
count++;
numChannels++;
lutNames.add(attributes.getValue("LUTName"));
int bytes = Integer.parseInt(attributes.getValue("BytesInc"));
if (bytes > 0) {
bytesPerAxis.put(new Integer(bytes), "C");
}
}
else if (qName.equals("DimensionDescription")) {
int len = Integer.parseInt(attributes.getValue("NumberOfElements"));
int id = Integer.parseInt(attributes.getValue("DimID"));
float physicalLen = Float.parseFloat(attributes.getValue("Length"));
String unit = attributes.getValue("Unit");
int nBytes = Integer.parseInt(attributes.getValue("BytesInc"));
physicalLen /= len;
if (unit.equals("Ks")) {
physicalLen /= 1000;
}
else if (unit.equals("m")) {
physicalLen *= 1000000;
}
Float physicalSize = new Float(physicalLen);
CoreMetadata coreMeta = core.get(core.size() - 1);
switch (id) {
case 1: // X axis
coreMeta.sizeX = len;
coreMeta.rgb = (nBytes % 3) == 0;
if (coreMeta.rgb) nBytes /= 3;
switch (nBytes) {
case 1:
coreMeta.pixelType = FormatTools.UINT8;
break;
case 2:
coreMeta.pixelType = FormatTools.UINT16;
break;
case 4:
coreMeta.pixelType = FormatTools.FLOAT;
break;
}
store.setDimensionsPhysicalSizeX(physicalSize, numDatasets, 0);
break;
case 2: // Y axis
if (coreMeta.sizeY != 0) {
if (coreMeta.sizeZ == 1) {
coreMeta.sizeZ = len;
store.setDimensionsPhysicalSizeZ(physicalSize, numDatasets, 0);
}
else if (coreMeta.sizeT == 1) {
coreMeta.sizeT = len;
store.setDimensionsTimeIncrement(physicalSize, numDatasets, 0);
}
}
else {
coreMeta.sizeY = len;
store.setDimensionsPhysicalSizeY(physicalSize, numDatasets, 0);
}
break;
case 3: // Z axis
if (coreMeta.sizeY == 0) {
// XZ scan - swap Y and Z
coreMeta.sizeY = len;
coreMeta.sizeZ = 1;
store.setDimensionsPhysicalSizeY(physicalSize, numDatasets, 0);
}
else {
coreMeta.sizeZ = len;
store.setDimensionsPhysicalSizeZ(physicalSize, numDatasets, 0);
}
bytesPerAxis.put(new Integer(nBytes), "Z");
break;
case 4: // T axis
if (coreMeta.sizeY == 0) {
// XT scan - swap Y and T
coreMeta.sizeY = len;
coreMeta.sizeT = 1;
store.setDimensionsPhysicalSizeY(physicalSize, numDatasets, 0);
}
else {
coreMeta.sizeT = len;
store.setDimensionsTimeIncrement(physicalSize, numDatasets, 0);
}
bytesPerAxis.put(new Integer(nBytes), "T");
break;
default:
extras *= len;
}
count++;
}
else if (qName.equals("ScannerSettingRecord")) {
String id = attributes.getValue("Identifier");
String value = attributes.getValue("Variant");
if (id.equals("SystemType")) {
store.setMicroscopeModel(value, numDatasets);
store.setMicroscopeType("Unknown", numDatasets);
}
else if (id.equals("dblPinhole")) {
pinhole = new Float(Float.parseFloat(value) * 1000000);
}
else if (id.equals("dblZoom")) {
zoom = new Float(value);
}
else if (id.equals("CameraName")) {
store.setDetectorModel(value, numDatasets, 0);
}
else if (id.indexOf("WFC") == 1) {
int c = 0;
try {
c = Integer.parseInt(id.replaceAll("\\D", ""));
}
catch (NumberFormatException e) { }
if (id.endsWith("ExposureTime")) {
store.setPlaneTimingExposureTime(new Float(value), numDatasets, 0, c);
}
else if (id.endsWith("Gain")) {
store.setDetectorSettingsGain(new Float(value), numDatasets, c);
String detectorID =
MetadataTools.createLSID("Detector", numDatasets, 0);
store.setDetectorSettingsDetector(detectorID, numDatasets, c);
store.setDetectorID(detectorID, numDatasets, 0);
}
else if (id.endsWith("WaveLength")) {
store.setLogicalChannelExWave(new Integer(value), numDatasets, c);
}
else if (id.endsWith("UesrDefName")) {
store.setLogicalChannelName(value, numDatasets, c);
}
}
}
else if (qName.equals("FilterSettingRecord")) {
String object = attributes.getValue("ObjectName");
String attribute = attributes.getValue("Attribute");
String objectClass = attributes.getValue("ClassName");
String variant = attributes.getValue("Variant");
CoreMetadata coreMeta = core.get(numDatasets);
if (attribute.equals("NumericalAperture")) {
store.setObjectiveLensNA(new Float(variant), numDatasets, 0);
}
else if (attribute.equals("OrderNumber")) {
store.setObjectiveSerialNumber(variant, numDatasets, 0);
}
else if (objectClass.equals("CLaser")) {
if (attribute.equals("Wavelength")) {
String id =
MetadataTools.createLSID("LightSource", numDatasets, nextLaser);
store.setLightSourceID(id, numDatasets, nextLaser);
store.setLaserWavelength(
new Integer(variant), numDatasets, nextLaser);
String model =
object.substring(object.indexOf("(") + 1, object.indexOf(")"));
store.setLightSourceModel(model, numDatasets, nextLaser);
nextLaser++;
}
else if (attribute.equals("Output Power")) {
store.setLightSourcePower(
new Float(variant), numDatasets, nextLaser - 1);
}
}
else if (objectClass.equals("CDetectionUnit")) {
if (attribute.equals("State")) {
nextDetector++;
String id =
MetadataTools.createLSID("Detector", numDatasets, nextDetector);
store.setDetectorID(id, numDatasets, nextDetector);
store.setDetectorModel(object, numDatasets, nextDetector);
store.setDetectorType("Unknown", numDatasets, nextDetector);
store.setDetectorZoom(zoom, numDatasets, nextDetector);
}
else if (attribute.equals("HighVoltage")) {
store.setDetectorVoltage(
new Float(variant), numDatasets, nextDetector);
}
else if (attribute.equals("VideoOffset")) {
store.setDetectorOffset(
new Float(variant), numDatasets, nextDetector);
}
}
else if (attribute.equals("Objective")) {
StringTokenizer tokens = new StringTokenizer(variant, " ");
boolean foundMag = false;
StringBuffer model = new StringBuffer();
while (!foundMag) {
String token = tokens.nextToken();
int x = token.indexOf("x");
if (x != -1) {
foundMag = true;
int mag = (int) Float.parseFloat(token.substring(0, x));
String na = token.substring(x + 1);
store.setObjectiveNominalMagnification(
new Integer(mag), numDatasets, 0);
store.setObjectiveLensNA(new Float(na), numDatasets, 0);
}
else {
model.append(token);
model.append(" ");
}
}
if (tokens.hasMoreTokens()) {
String immersion = tokens.nextToken();
if (immersion == null || immersion.trim().equals("")) {
immersion = "Unknown";
}
store.setObjectiveImmersion(immersion, numDatasets, 0);
}
if (tokens.hasMoreTokens()) {
String correction = tokens.nextToken();
if (correction == null || correction.trim().equals("")) {
correction = "Unknown";
}
store.setObjectiveCorrection(correction, numDatasets, 0);
}
store.setObjectiveModel(model.toString().trim(), numDatasets, 0);
}
else if (attribute.equals("RefractionIndex")) {
String id = MetadataTools.createLSID("Objective", numDatasets, 0);
store.setObjectiveID(id, numDatasets, 0);
store.setObjectiveSettingsObjective(id, numDatasets);
store.setObjectiveSettingsRefractiveIndex(new Float(variant),
numDatasets);
}
else if (attribute.equals("XPos")) {
int c = coreMeta.rgb || coreMeta.sizeC == 0 ? 1 : coreMeta.sizeC;
int nPlanes = coreMeta.imageCount / c;
Float posX = new Float(variant);
for (int image=0; image<nPlanes; image++) {
int index = image * (coreMeta.rgb ? 1 : coreMeta.sizeC) + channel;
if (index >= nPlanes) continue;
store.setStagePositionPositionX(posX, numDatasets, 0, index);
}
if (numChannels == 0) xPos.add(posX);
}
else if (attribute.equals("YPos")) {
int c = coreMeta.rgb || coreMeta.sizeC == 0 ? 1 : coreMeta.sizeC;
int nPlanes = coreMeta.imageCount / c;
Float posY = new Float(variant);
for (int image=0; image<nPlanes; image++) {
int index = image * (coreMeta.rgb ? 1 : coreMeta.sizeC) + channel;
if (index >= nPlanes) continue;
store.setStagePositionPositionY(posY, numDatasets, 0, index);
}
if (numChannels == 0) yPos.add(posY);
}
else if (attribute.equals("ZPos")) {
int c = coreMeta.rgb || coreMeta.sizeC == 0 ? 1 : coreMeta.sizeC;
int nPlanes = coreMeta.imageCount / c;
Float posZ = new Float(variant);
for (int image=0; image<nPlanes; image++) {
int index = image * (coreMeta.rgb ? 1 : coreMeta.sizeC) + channel;
if (index >= nPlanes) continue;
store.setStagePositionPositionZ(posZ, numDatasets, 0, index);
}
if (numChannels == 0) zPos.add(posZ);
}
else if (attribute.equals("Speed")) {
readOutRate = new Float(Float.parseFloat(variant) / 1000000);
}
}
else if (qName.equals("MultiBand")) {
if (channel >= core.get(numDatasets).sizeC) return;
String em = attributes.getValue("LeftWorld");
String ex = attributes.getValue("RightWorld");
Integer emWave = new Integer((int) Float.parseFloat(em));
Integer exWave = new Integer((int) Float.parseFloat(ex));
String name = attributes.getValue("DyeName");
store.setLogicalChannelEmWave(emWave, numDatasets, channel);
store.setLogicalChannelExWave(exWave, numDatasets, channel);
store.setLogicalChannelName(name, numDatasets, channel);
channel++;
}
else if (qName.equals("Detector")) {
Float gain = new Float(attributes.getValue("Gain"));
Float offset = new Float(attributes.getValue("Offset"));
int index = Integer.parseInt(attributes.getValue("Channel")) - 1;
int c = channel - 1;
if (c >= 0) {
store.setDetectorSettingsGain(gain, numDatasets, c);
store.setDetectorSettingsOffset(offset, numDatasets, c);
store.setDetectorSettingsReadOutRate(readOutRate, numDatasets, c);
int detectorIndex = nextDetector < 0 ? 0 : nextDetector;
String detectorID =
MetadataTools.createLSID("Detector", numDatasets, detectorIndex);
store.setDetectorSettingsDetector(detectorID, numDatasets, c);
detectorIndices.add(new Integer(index));
}
}
else if (qName.equals("LaserLineSetting")) {
String wavelength = attributes.getValue("LaserLine");
int index = Integer.parseInt(attributes.getValue("LineIndex"));
String id = MetadataTools.createLSID("LightSource", numDatasets, index);
store.setLightSourceID(id, numDatasets, index);
store.setLaserWavelength(new Integer(wavelength), numDatasets, index);
store.setLaserType("Unknown", numDatasets, index);
store.setLaserLaserMedium("Unknown", numDatasets, index);
float intensity = Float.parseFloat(attributes.getValue("IntensityDev"));
if (intensity > 0f && channel > 0) {
store.setLightSourceSettingsLightSource(id, numDatasets, channel - 1);
store.setLightSourceSettingsAttenuation(
new Float(intensity / 100f), numDatasets, channel - 1);
}
}
else if (qName.equals("TimeStamp")) {
long high = Long.parseLong(attributes.getValue("HighInteger"));
long low = Long.parseLong(attributes.getValue("LowInteger"));
high <<= 32;
if ((int) low < 0) {
low &= 0xffffffffL;
}
long ms = (high + low) / 10000;
if (count == 0) {
String date = DateTools.convertDate(ms, DateTools.COBOL);
if (DateTools.getTime(date, DateTools.ISO8601_FORMAT) <
System.currentTimeMillis())
{
store.setImageCreationDate(date, numDatasets);
}
firstStamp = ms;
store.setPlaneTimingDeltaT(new Float(0), numDatasets, 0, count);
}
else {
CoreMetadata coreMeta = core.get(numDatasets);
int nImages = coreMeta.sizeZ * coreMeta.sizeT * coreMeta.sizeC;
if (count < nImages) {
ms -= firstStamp;
store.setPlaneTimingDeltaT(
new Float(ms / 1000), numDatasets, 0, count);
}
}
count++;
}
else if (qName.equals("RelTimeStamp")) {
CoreMetadata coreMeta = core.get(numDatasets);
int nImages = coreMeta.sizeZ * coreMeta.sizeT * coreMeta.sizeC;
if (count < nImages) {
Float time = new Float(attributes.getValue("Time"));
store.setPlaneTimingDeltaT(time, numDatasets, 0, count++);
}
}
else if (qName.equals("Wheel")) {
filterIndex = Integer.parseInt(attributes.getValue("FilterIndex"));
}
else if (qName.equals("WheelName")) {
String id = MetadataTools.createLSID("Dichroic", numDatasets, nextFilter);
store.setDichroicID(id, numDatasets, nextFilter);
store.setDichroicModel(
attributes.getValue("FilterName").trim(), numDatasets, nextFilter);
if (nextFilter == filterIndex) {
String filterSet =
MetadataTools.createLSID("FilterSet", numDatasets, channel);
store.setFilterSetID(filterSet, numDatasets, channel);
store.setFilterSetDichroic(id, numDatasets, channel);
if (channel < core.get(numDatasets).sizeC) {
store.setLogicalChannelFilterSet(filterSet, numDatasets, channel);
}
}
nextFilter++;
}
else count = 0;
storeSeriesHashtable(numDatasets, h);
}
// -- Helper methods --
private Hashtable getSeriesHashtable(int series) {
if (series < 0 || series >= core.size()) return new Hashtable();
return core.get(series).seriesMetadata;
}
private void storeSeriesHashtable(int series, Hashtable h) {
if (series < 0) return;
CoreMetadata coreMeta = core.get(series);
coreMeta.seriesMetadata = h;
core.setElementAt(coreMeta, series);
}
}
| Use stored Z step and time increment instead of calculating these values.
| components/bio-formats/src/loci/formats/in/LeicaHandler.java | Use stored Z step and time increment instead of calculating these values. |
|
Java | bsd-3-clause | 95d316d28e4c3e7d2fa357954b34fa296dc1a0c6 | 0 | threerings/depot | //
// $Id$
//
// samskivert library - useful routines for java programs
// Copyright (C) 2006 Michael Bayne, Pär Winzell
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.samskivert.jdbc.depot.expression;
import com.samskivert.jdbc.depot.Key;
import com.samskivert.jdbc.depot.MultiKey;
import com.samskivert.jdbc.depot.PersistentRecord;
import com.samskivert.jdbc.depot.WhereClause;
import com.samskivert.jdbc.depot.clause.DeleteClause;
import com.samskivert.jdbc.depot.clause.FieldDefinition;
import com.samskivert.jdbc.depot.clause.ForUpdate;
import com.samskivert.jdbc.depot.clause.FromOverride;
import com.samskivert.jdbc.depot.clause.GroupBy;
import com.samskivert.jdbc.depot.clause.InsertClause;
import com.samskivert.jdbc.depot.clause.Join;
import com.samskivert.jdbc.depot.clause.Limit;
import com.samskivert.jdbc.depot.clause.OrderBy;
import com.samskivert.jdbc.depot.clause.SelectClause;
import com.samskivert.jdbc.depot.clause.UpdateClause;
import com.samskivert.jdbc.depot.operator.Conditionals.Exists;
import com.samskivert.jdbc.depot.operator.Conditionals.In;
import com.samskivert.jdbc.depot.operator.Conditionals.IsNull;
import com.samskivert.jdbc.depot.operator.Conditionals.FullTextMatch;
import com.samskivert.jdbc.depot.operator.Logic.Not;
import com.samskivert.jdbc.depot.operator.SQLOperator.BinaryOperator;
import com.samskivert.jdbc.depot.operator.SQLOperator.MultiOperator;
/**
* Enumerates visitation methods for every possible SQL expression type.
*/
public interface ExpressionVisitor
{
public void visit (FieldDefinition fieldOverride);
public void visit (FunctionExp functionExp);
public void visit (EpochSeconds epochSeconds);
public void visit (FromOverride fromOverride);
public void visit (MultiOperator multiOperator);
public void visit (BinaryOperator binaryOperator);
public void visit (IsNull isNull);
public void visit (In in);
public void visit (FullTextMatch match);
public void visit (ColumnExp columnExp);
public void visit (Not not);
public void visit (GroupBy groupBy);
public void visit (ForUpdate forUpdate);
public void visit (OrderBy orderBy);
public void visit (Join join);
public void visit (Limit limit);
public void visit (LiteralExp literalExp);
public void visit (ValueExp valueExp);
public void visit (WhereClause where);
public void visit (Key<? extends PersistentRecord> key);
public void visit (MultiKey<? extends PersistentRecord> key);
public void visit (Exists<? extends PersistentRecord> exists);
public void visit (SelectClause<? extends PersistentRecord> selectClause);
public void visit (UpdateClause<? extends PersistentRecord> updateClause);
public void visit (DeleteClause<? extends PersistentRecord> deleteClause);
public void visit (InsertClause<? extends PersistentRecord> insertClause);
}
| src/java/com/samskivert/jdbc/depot/expression/ExpressionVisitor.java | //
// $Id$
//
// samskivert library - useful routines for java programs
// Copyright (C) 2006 Michael Bayne, Pär Winzell
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.samskivert.jdbc.depot.expression;
import com.samskivert.jdbc.depot.Key;
import com.samskivert.jdbc.depot.MultiKey;
import com.samskivert.jdbc.depot.PersistentRecord;
import com.samskivert.jdbc.depot.WhereClause;
import com.samskivert.jdbc.depot.clause.DeleteClause;
import com.samskivert.jdbc.depot.clause.FieldDefinition;
import com.samskivert.jdbc.depot.clause.ForUpdate;
import com.samskivert.jdbc.depot.clause.FromOverride;
import com.samskivert.jdbc.depot.clause.GroupBy;
import com.samskivert.jdbc.depot.clause.InsertClause;
import com.samskivert.jdbc.depot.clause.Join;
import com.samskivert.jdbc.depot.clause.Limit;
import com.samskivert.jdbc.depot.clause.OrderBy;
import com.samskivert.jdbc.depot.clause.SelectClause;
import com.samskivert.jdbc.depot.clause.UpdateClause;
import com.samskivert.jdbc.depot.clause.Where;
import com.samskivert.jdbc.depot.operator.Conditionals.Exists;
import com.samskivert.jdbc.depot.operator.Conditionals.In;
import com.samskivert.jdbc.depot.operator.Conditionals.IsNull;
import com.samskivert.jdbc.depot.operator.Conditionals.FullTextMatch;
import com.samskivert.jdbc.depot.operator.Logic.Not;
import com.samskivert.jdbc.depot.operator.SQLOperator.BinaryOperator;
import com.samskivert.jdbc.depot.operator.SQLOperator.MultiOperator;
/**
* Enumerates visitation methods for every possible SQL expression type.
*/
public interface ExpressionVisitor
{
public void visit (FieldDefinition fieldOverride);
public void visit (FunctionExp functionExp);
public void visit (EpochSeconds epochSeconds);
public void visit (FromOverride fromOverride);
public void visit (MultiOperator multiOperator);
public void visit (BinaryOperator binaryOperator);
public void visit (IsNull isNull);
public void visit (In in);
public void visit (FullTextMatch match);
public void visit (ColumnExp columnExp);
public void visit (Not not);
public void visit (GroupBy groupBy);
public void visit (ForUpdate forUpdate);
public void visit (OrderBy orderBy);
public void visit (Join join);
public void visit (Limit limit);
public void visit (LiteralExp literalExp);
public void visit (ValueExp valueExp);
public void visit (WhereClause where);
public void visit (Key<? extends PersistentRecord> key);
public void visit (MultiKey<? extends PersistentRecord> key);
public void visit (Exists<? extends PersistentRecord> exists);
public void visit (SelectClause<? extends PersistentRecord> selectClause);
public void visit (UpdateClause<? extends PersistentRecord> updateClause);
public void visit (DeleteClause<? extends PersistentRecord> deleteClause);
public void visit (InsertClause<? extends PersistentRecord> insertClause);
}
| One more unused import.
| src/java/com/samskivert/jdbc/depot/expression/ExpressionVisitor.java | One more unused import. |
|
Java | bsd-3-clause | 388743fe1d742ef14ade8ca089cfa60a088260d8 | 0 | NCIP/caaers,NCIP/caaers,NCIP/caaers,CBIIT/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers | /*******************************************************************************
* Copyright SemanticBits, Northwestern University and Akaza Research
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/caaers/LICENSE.txt for details.
******************************************************************************/
package gov.nih.nci.cabig.caaers.api.impl;
import gov.nih.nci.cabig.caaers.CaaersSystemException;
import gov.nih.nci.cabig.caaers.api.AbstractImportService;
import gov.nih.nci.cabig.caaers.dao.ParticipantDao;
import gov.nih.nci.cabig.caaers.dao.StudyDao;
import gov.nih.nci.cabig.caaers.dao.query.ParticipantQuery;
import gov.nih.nci.cabig.caaers.domain.Identifier;
import gov.nih.nci.cabig.caaers.domain.Organization;
import gov.nih.nci.cabig.caaers.domain.Participant;
import gov.nih.nci.cabig.caaers.domain.Study;
import gov.nih.nci.cabig.caaers.domain.StudyParticipantAssignment;
import gov.nih.nci.cabig.caaers.domain.ajax.StudySearchableAjaxableDomainObject;
import gov.nih.nci.cabig.caaers.event.EventFactory;
import gov.nih.nci.cabig.caaers.integration.schema.common.CaaersServiceResponse;
import gov.nih.nci.cabig.caaers.integration.schema.common.OrganizationType;
import gov.nih.nci.cabig.caaers.integration.schema.common.ResponseDataType;
import gov.nih.nci.cabig.caaers.integration.schema.common.Status;
import gov.nih.nci.cabig.caaers.integration.schema.common.WsError;
import gov.nih.nci.cabig.caaers.integration.schema.participant.AssignmentType;
import gov.nih.nci.cabig.caaers.integration.schema.participant.ParticipantRef;
import gov.nih.nci.cabig.caaers.integration.schema.participant.ParticipantType;
import gov.nih.nci.cabig.caaers.integration.schema.participant.ParticipantType.Assignments;
import gov.nih.nci.cabig.caaers.integration.schema.participant.Participants;
import gov.nih.nci.cabig.caaers.service.DomainObjectImportOutcome;
import gov.nih.nci.cabig.caaers.service.DomainObjectImportOutcome.Message;
import gov.nih.nci.cabig.caaers.service.ParticipantImportServiceImpl;
import gov.nih.nci.cabig.caaers.service.migrator.ParticipantConverter;
import gov.nih.nci.cabig.caaers.service.synchronizer.ParticipantSynchronizer;
import gov.nih.nci.security.util.StringUtilities;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import javax.validation.groups.Default;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.MessageSource;
import org.springframework.transaction.annotation.Transactional;
public class ParticipantServiceImpl extends AbstractImportService implements ApplicationContextAware {
private static Log logger = LogFactory.getLog(ParticipantServiceImpl.class);
private ApplicationContext applicationContext;
private MessageSource messageSource;
private ParticipantDao participantDao;
private StudyDao studyDao;
private ParticipantImportServiceImpl participantImportServiceImpl;
private ParticipantConverter participantConverter;
private ParticipantSynchronizer participantSynchronizer;
//private DomainObjectValidator domainObjectValidator;
private Validator validator;
private EventFactory eventFactory;
/**
* Method exisits only to be called from ImportController
*/
public DomainObjectImportOutcome<Participant> processParticipant(ParticipantType xmlParticipant){
logger.info("Entering processParticipant() in ParticipantServiceImpl");
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
DomainObjectImportOutcome<Participant> participantImportOutcome = null;
Participant participant = new Participant();
try{
participantConverter.convertParticipantDtoToParticipantDomain(xmlParticipant, participant);
}catch(CaaersSystemException caEX){
participantImportOutcome = new DomainObjectImportOutcome<Participant>();
logger.error("ParticipantDto to ParticipantDomain Conversion Failed " , caEX);
participantImportOutcome.addErrorMessage("ParticipantDto to ParticipantDomain Conversion Failed " , DomainObjectImportOutcome.Severity.ERROR);
}
if(participantImportOutcome == null){
participantImportOutcome = participantImportServiceImpl.importParticipant(participant);
if(participantImportOutcome.isSavable()){
Participant dbParticipant = fetchParticipantByAssignment(participantImportOutcome.getImportedDomainObject(), caaersServiceResponse);
if(dbParticipant != null){
logger.info("Participant Exists in caAERS trying to Update");
participantSynchronizer.migrate(dbParticipant, participantImportOutcome.getImportedDomainObject(), participantImportOutcome);
participantImportOutcome.setImportedDomainObject(dbParticipant);
logger.info("Participant in caAERS Updated");
}else if (caaersServiceResponse.getServiceResponse().getStatus() == Status.FAILED_TO_PROCESS) {
participantImportOutcome.addErrorMessage(caaersServiceResponse.getServiceResponse().getMessage(), DomainObjectImportOutcome.Severity.ERROR);
}else{
logger.info("New Participant to be Created");
}
}
}
logger.info("Leaving processParticipant() in ParticipantServiceImpl");
return participantImportOutcome;
}
public String getStudySubjectIdentifierFromInXML(ParticipantType xmlParticipant) {
String identifier = null;
Assignments assignments = xmlParticipant.getAssignments();
for(AssignmentType assignmentType : assignments.getAssignment()){
identifier = assignmentType.getStudySubjectIdentifier();
if(StringUtils.isNotEmpty(identifier) ){
return identifier;
}
}
return null;
}
public Identifier getStudyIdentifierFromInXML(ParticipantType xmlParticipant) {
Identifier identifier = null;
Assignments assignments = xmlParticipant.getAssignments();
for(AssignmentType assignmentType : assignments.getAssignment()){
identifier = new Identifier();
identifier.setType(assignmentType.getStudySite().getStudy().getIdentifiers().getIdentifier().getType().value());
identifier.setValue(assignmentType.getStudySite().getStudy().getIdentifiers().getIdentifier().getValue());
return identifier;
}
return identifier;
}
public List<StudySearchableAjaxableDomainObject> getAuthorizedStudies(Identifier identifier) {
List<StudySearchableAjaxableDomainObject> authorizedStudies = new ArrayList<StudySearchableAjaxableDomainObject>();
authorizedStudies = getAuthorizedStudies(identifier.getValue());
return authorizedStudies;
}
private String checkAuthorizedOrganizations (ParticipantType xmlParticipant) {
Assignments assignments = xmlParticipant.getAssignments();
for(AssignmentType assignmentType : assignments.getAssignment()){
String organizationName = assignmentType.getStudySite().getOrganization().getName();
String organizationNciInstituteCode = assignmentType.getStudySite().getOrganization().getNciInstituteCode();
List<Organization> organizations = new ArrayList<Organization>();
if (StringUtilities.isBlank(organizationNciInstituteCode)) {
//System.out.println("looking by name");
organizations = getAuthorizedOrganizationsByNameOrNciId(organizationName,null);
} else {
//System.out.println("looking by id");
organizations = getAuthorizedOrganizationsByNameOrNciId(null,organizationNciInstituteCode);
}
if (organizations.size() == 0 ) {
return organizationNciInstituteCode + " : " + organizationName;
}
}
return "ALL_ORGS_AUTH";
}
/**
* validates xml input and fetches participant based on study identifier and study subject identifier
* @param studySubjectIdentifier - string
* @param studyIdentifier - String
* @param xmlParticipant - xml participant object
* @param caaersServiceResponse - response
* @return Participant - returns retrieve participant, if not returns null, with response filled with appropriate messages
*/
private Participant validateInputsAndFetchParticipant(String studySubjectIdentifier, Identifier studyIdentifier, ParticipantType xmlParticipant,
CaaersServiceResponse caaersServiceResponse) {
if (studyIdentifier != null ) {
Study study = fetchStudy(studyIdentifier);
if(study == null){
createNoStudyFoundResponse(caaersServiceResponse,studyIdentifier);
return null;
}
List<StudySearchableAjaxableDomainObject> authorizedStudies = getAuthorizedStudies(studyIdentifier);
if(authorizedStudies.size() == 0) {
createNoStudyAuthorizationResponse(caaersServiceResponse, studyIdentifier);
return null;
}
}
String errorMsg = checkAuthorizedOrganizations(xmlParticipant);
if(!errorMsg.equals("ALL_ORGS_AUTH")) {
createNoOrganizationAuthorizationResponse(caaersServiceResponse, errorMsg);
return null;
}
return fetchParticipantByAssignment(studySubjectIdentifier, studyIdentifier, caaersServiceResponse);
}
/**
* converts xml participant to the DomainImportOutcome<Participant>
* @param xmlParticipant - xml participant object
* @param studySubjectIdentifier - string
* @param caaersServiceResponse - response
* @param processStr - should be created/updated/deleted to be used in response message
* @return converted and imported domain object
*/
private DomainObjectImportOutcome<Participant> convertToImportedDomainObject(ParticipantType xmlParticipant, String studySubjectIdentifier,
CaaersServiceResponse caaersServiceResponse, String processStr) {
Participant participant = new Participant();
try{
participantConverter.convertParticipantDtoToParticipantDomain(xmlParticipant, participant);
}catch(CaaersSystemException caEX){
String message = messageSource.getMessage("WS_PMS_005", new String[] { caEX.getMessage() }, "", Locale
.getDefault());
logger.error(message, caEX);
populateError(caaersServiceResponse, "WS_PMS_005", message);
return null;
}
DomainObjectImportOutcome<Participant> participantImportOutcome =
participantImportServiceImpl.importParticipant(participant);
Participant importedDomainObject = participantImportOutcome.getImportedDomainObject();
//List<String> errors = domainObjectValidator.validate(importedDomainObject);
Set<ConstraintViolation<Participant>> constraintViolations = validator.validate(importedDomainObject, Default.class);
if( !participantImportOutcome.isSavable() || constraintViolations.size() > 0) {
String errMessage = messageSource.getMessage("WS_PMS_007",
new String[] { importedDomainObject.getFirstName(), importedDomainObject.getLastName(),
studySubjectIdentifier, processStr },
"", Locale.getDefault());
populateError(caaersServiceResponse, "WS_PMS_007", errMessage);
logger.info(errMessage);
List<String> messages = new ArrayList<String>();
for(Message message : participantImportOutcome.getMessages()){
messages.add(message.getMessage());
}
String valErrmsg = null;
for (ConstraintViolation<Participant> violation : constraintViolations) {
valErrmsg = violation.getMessage()
+ " (" + violation.getPropertyPath()
+ ") in " + participant.getClass().getSimpleName()
+ "(" + participant.getFullName() + ")";
messages.add(valErrmsg);
}
Helper.populateErrorOutcome(caaersServiceResponse, null, null, null, messages);
return null;
}
return participantImportOutcome;
}
@Transactional(readOnly=false)
public CaaersServiceResponse createParticipant(
Participants xmlParticipants) {
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
ParticipantType xmlParticipant = xmlParticipants.getParticipant().get(0);
Identifier studyIdentifier = getStudyIdentifierFromInXML(xmlParticipant);
String studySubjectIdentifier = getStudySubjectIdentifierFromInXML(xmlParticipant);
Participant dbParticipant = validateInputsAndFetchParticipant(studySubjectIdentifier, studyIdentifier, xmlParticipant, caaersServiceResponse);
if( dbParticipant != null) {
String message = messageSource.getMessage("WS_PMS_004", new String[] { studySubjectIdentifier, studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_004", message);
return caaersServiceResponse;
} else {
//remove the error message for participant not found, as this is create flow
List<WsError> wsErrors = caaersServiceResponse.getServiceResponse().getWsError();
if(wsErrors != null && wsErrors.size() == 1 && "WS_PMS_003".equals(wsErrors.get(0).getErrorCode()) ) {
wsErrors.remove(0);
Helper.populateMessage(caaersServiceResponse, "");
}
}
validateAssignmentSite(caaersServiceResponse, xmlParticipant, null);
if(caaersServiceResponse.getServiceResponse().getStatus() == Status.FAILED_TO_PROCESS ) {
return caaersServiceResponse;
}
//resetting the response object
caaersServiceResponse = Helper.createResponse();
DomainObjectImportOutcome<Participant> participantImportOutcome =
convertToImportedDomainObject(xmlParticipant, studySubjectIdentifier, caaersServiceResponse, "created");
if(participantImportOutcome != null) {
Participant importedDomainObject = participantImportOutcome.getImportedDomainObject();
participantDao.save(importedDomainObject);
String message = messageSource.getMessage("WS_PMS_006",
new String[] { importedDomainObject.getFirstName(), importedDomainObject.getLastName(), studySubjectIdentifier },
"", Locale.getDefault());
Helper.populateMessage(caaersServiceResponse, message);
logger.info(message);
if(eventFactory != null) {
eventFactory.publishEntityModifiedEvent(importedDomainObject, false);
}
}
return caaersServiceResponse;
}
@Transactional(readOnly=false)
public CaaersServiceResponse updateParticipant(
Participants xmlParticipants) {
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
ParticipantType xmlParticipant = xmlParticipants.getParticipant().get(0);
Identifier studyIdentifier = getStudyIdentifierFromInXML(xmlParticipant);
String studySubjectIdentifier = getStudySubjectIdentifierFromInXML(xmlParticipant);
Participant dbParticipant = validateInputsAndFetchParticipant(studySubjectIdentifier, studyIdentifier, xmlParticipant, caaersServiceResponse);
if( dbParticipant == null) {
return caaersServiceResponse;
}
validateAssignmentSite(caaersServiceResponse, xmlParticipant, dbParticipant);
if(caaersServiceResponse.getServiceResponse().getStatus() == Status.FAILED_TO_PROCESS) {
return caaersServiceResponse;
}
//resetting the response object
caaersServiceResponse = Helper.createResponse();
DomainObjectImportOutcome<Participant> participantImportOutcome =
convertToImportedDomainObject(xmlParticipant, studySubjectIdentifier, caaersServiceResponse, "updated");
if(participantImportOutcome != null){
Participant importedDomainObject = participantImportOutcome.getImportedDomainObject();
participantSynchronizer.migrate(dbParticipant, participantImportOutcome.getImportedDomainObject(), participantImportOutcome);
participantImportOutcome.setImportedDomainObject(dbParticipant);
participantDao.save(participantImportOutcome.getImportedDomainObject());
String message = messageSource.getMessage("WS_PMS_008",
new String[] { importedDomainObject.getFirstName(), importedDomainObject.getLastName(), studySubjectIdentifier },
"", Locale.getDefault());
Helper.populateMessage(caaersServiceResponse, message);
logger.info(message);
if(eventFactory != null) {
eventFactory.publishEntityModifiedEvent(importedDomainObject, false);
}
}
return caaersServiceResponse;
}
private void validateAssignmentSite(CaaersServiceResponse caaersServiceResponse, ParticipantType xmlParticipant,
Participant dbParticipant) {
OrganizationType xmlOrg = xmlParticipant.getAssignments().getAssignment().get(0).getStudySite().getOrganization();
if (StringUtils.isEmpty(xmlOrg.getName()) || StringUtils.isEmpty(xmlOrg.getNciInstituteCode())
|| ":".equals(xmlOrg.getNciInstituteCode().trim())) {
String message = messageSource.getMessage("WS_PMS_017", new String[] {}, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_017", message);
}
if (dbParticipant == null) { //for create flow
return;
}
Organization dbOrg = dbParticipant.getAssignments().get(0).getStudySite().getOrganization();
if ( (dbOrg.getName() != null && !dbOrg.getName().equals(xmlOrg.getName()) )
|| (dbOrg.getNciInstituteCode() != null && !dbOrg.getNciInstituteCode().equals(xmlOrg.getNciInstituteCode()) )
) {
String message = messageSource.getMessage("WS_PMS_018", new String[] {}, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_018", message);
}
}
@Transactional(readOnly=false)
public CaaersServiceResponse deleteParticipant(Participants xmlParticipants) {
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
ParticipantType xmlParticipant = xmlParticipants.getParticipant().get(0);
Identifier studyIdentifier = getStudyIdentifierFromInXML(xmlParticipant);
String studySubjectIdentifier = getStudySubjectIdentifierFromInXML(xmlParticipant);
Participant dbParticipant = validateInputsAndFetchParticipant(studySubjectIdentifier, studyIdentifier, xmlParticipant, caaersServiceResponse);
if( dbParticipant == null) {
return caaersServiceResponse;
} else if(dbParticipant.getHasReportingPeriods()) {
String message = messageSource.getMessage("WS_PMS_009", new String[] { studySubjectIdentifier, studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_009", message);
}
if(caaersServiceResponse.getServiceResponse().getStatus() == Status.FAILED_TO_PROCESS) {
return caaersServiceResponse;
}
//resetting the response object
caaersServiceResponse = Helper.createResponse();
DomainObjectImportOutcome<Participant> participantImportOutcome =
convertToImportedDomainObject(xmlParticipant, studySubjectIdentifier, caaersServiceResponse, "deleted");
if(participantImportOutcome != null){
Participant importedDomainObject = participantImportOutcome.getImportedDomainObject();
participantDao.delete(dbParticipant);
String message = messageSource.getMessage("WS_PMS_010",
new String[] { importedDomainObject.getFirstName(), importedDomainObject.getLastName(), studySubjectIdentifier },
"", Locale.getDefault());
Helper.populateMessage(caaersServiceResponse, message);
logger.info(message);
if(eventFactory != null) {
eventFactory.publishEntityModifiedEvent(importedDomainObject, false);
}
}
return caaersServiceResponse;
}
// returns the domain participant after converting to jaxb participant based on the input identifiers
public CaaersServiceResponse getParticipant(ParticipantRef xmlParticipantRefType) {
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
Participant dbParticipant = null;
//TODO : Only fetch By Assignment works..Need to add fetch by Identifier(mostly not required)
ParticipantRef.ParticipantAssignment assignment = xmlParticipantRefType.getParticipantAssignment();
if(assignment == null || assignment.getStudyIdentifier() == null || assignment.getStudyIdentifier().getType() == null) {
populateError(caaersServiceResponse, "WS_PMS_013", messageSource.getMessage("WS_PMS_013",
new String[]{},"",Locale.getDefault()));
return caaersServiceResponse;
}
Identifier studyId = new Identifier();
studyId.setType(assignment.getStudyIdentifier().getType().value());
studyId.setValue(assignment.getStudyIdentifier().getValue());
dbParticipant = fetchParticipantByAssignment(assignment.getStudySubjectIdentifier(), studyId, caaersServiceResponse);
if(dbParticipant != null ){
caaersServiceResponse.getServiceResponse().setResponsecode("0");
ParticipantType dbParticipantType = new ParticipantType();
participantConverter.convertDomainParticipantToParticipantDto(dbParticipant, dbParticipantType);
caaersServiceResponse.getServiceResponse().setResponseData(new ResponseDataType());
Participants participants = new Participants();
participants.getParticipant().add(dbParticipantType);
caaersServiceResponse.getServiceResponse().getResponseData().setAny(participants);
}
return caaersServiceResponse;
}
private CaaersServiceResponse createNoStudyAuthorizationResponse(CaaersServiceResponse caaersServiceResponse, Identifier identifier){
populateError(caaersServiceResponse, "WS_GEN_003", messageSource.getMessage("WS_GEN_003", new String[]{identifier.getValue()},"",Locale.getDefault()));
return caaersServiceResponse;
}
private CaaersServiceResponse createNoStudyFoundResponse(CaaersServiceResponse caaersServiceResponse, Identifier identifier){
populateError(caaersServiceResponse, "WS_PMS_002", messageSource.getMessage("WS_PMS_002", new String[]{identifier.getValue()},"",Locale.getDefault()));
return caaersServiceResponse;
}
private CaaersServiceResponse createNoOrganizationAuthorizationResponse(CaaersServiceResponse caaersServiceResponse, String errorMsg){
populateError(caaersServiceResponse, "WS_GEN_005", messageSource.getMessage("WS_GEN_005", new String[]{errorMsg},"",Locale.getDefault()));
return caaersServiceResponse;
}
private Participant fetchParticipant(Participant participant){
Participant dbParticipant = null;
for(Identifier identifier : participant.getIdentifiers()){
dbParticipant = participantDao.getParticipantDesignByIdentifier(identifier);
if(dbParticipant != null){
break;
}
participantDao.evict(dbParticipant);
}
return dbParticipant;
}
private Participant fetchParticipantByAssignment(Participant participant,
CaaersServiceResponse caaersServiceResponse) {
Participant dbParticipant = null;
for(StudyParticipantAssignment assignment : participant.getAssignments()){
for(Identifier identifier : assignment.getStudySite().getStudy().getIdentifiers()) {
dbParticipant = fetchParticipantByAssignment(assignment.getStudySubjectIdentifier(),
identifier, caaersServiceResponse);
if(dbParticipant != null){
participantDao.evict(dbParticipant);
return dbParticipant;
}
}
}
return dbParticipant;
}
private Participant fetchParticipantByAssignment(String studySubjectIdentifier,
Identifier studyIdentifier, CaaersServiceResponse caaersServiceResponse) {
Participant dbParticipant = null;
if (StringUtils.isEmpty(studySubjectIdentifier)) {
String message = messageSource.getMessage("WS_PMS_014", new String[] { studySubjectIdentifier }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_014", message);
return null;
}
if (studyIdentifier == null || StringUtils.isEmpty(studyIdentifier.getValue())) {
String message = messageSource.getMessage("WS_PMS_015", new String[] { studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_015", message);
return null;
}
try {
Study study = fetchStudy(studyIdentifier);
if(study == null) {
String message = messageSource.getMessage("WS_PMS_002", new String[] { studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_002", message);
return dbParticipant;
}
ParticipantQuery pq = new ParticipantQuery();
pq.joinStudy();
pq.filterByStudySubjectIdentifier(studySubjectIdentifier, "=");
pq.filterByStudyId(study.getId(), "=");
List<Participant> dbParticipants = participantDao.searchParticipant(pq);
if (dbParticipants != null && dbParticipants.size() == 1) {
logger.info("Participant registered to this study in caAERS");
dbParticipant = dbParticipants.get(0);
} else {
String message = messageSource.getMessage("WS_PMS_003", new String[] { studySubjectIdentifier, studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_003", message);
}
} catch (Exception e) {
String message = messageSource.getMessage("WS_PMS_016", new String[] { e.getMessage() }, "", Locale
.getDefault());
logger.error("Error retrieving participant", e);
populateError(caaersServiceResponse, "WS_PMS_016", message);
dbParticipant = null;
}
return dbParticipant;
}
private void populateError(CaaersServiceResponse caaersServiceResponse, String errorCode, String message) {
Helper.populateError(caaersServiceResponse, errorCode, message);
caaersServiceResponse.getServiceResponse().setMessage(message);
}
private Study fetchStudy(Identifier identifier) {
Study dbStudy = null;
dbStudy = studyDao.getByIdentifier(identifier);
if(dbStudy != null){
return dbStudy;
}
studyDao.evict(dbStudy);
return dbStudy;
}
public ParticipantDao getParticipantDao() {
return participantDao;
}
public void setParticipantDao(ParticipantDao participantDao) {
this.participantDao = participantDao;
}
public ParticipantImportServiceImpl getParticipantImportServiceImpl() {
return participantImportServiceImpl;
}
public void setParticipantImportServiceImpl(
ParticipantImportServiceImpl participantImportServiceImpl) {
this.participantImportServiceImpl = participantImportServiceImpl;
}
public ParticipantConverter getParticipantConverter() {
return participantConverter;
}
public void setParticipantConverter(ParticipantConverter participantConverter) {
this.participantConverter = participantConverter;
}
public ParticipantSynchronizer getParticipantSynchronizer() {
return participantSynchronizer;
}
public void setParticipantSynchronizer(
ParticipantSynchronizer participantSynchronizer) {
this.participantSynchronizer = participantSynchronizer;
}
/*public void setDomainObjectValidator(DomainObjectValidator domainObjectValidator) {
this.domainObjectValidator = domainObjectValidator;
}*/
public void setMessageSource(MessageSource messageSource) {
this.messageSource = messageSource;
}
public Validator getValidator() {
return validator;
}
public void setValidator(Validator validator) {
this.validator = validator;
}
public void setStudyDao(StudyDao studyDao) {
this.studyDao = studyDao;
}
public EventFactory getEventFactory() {
return eventFactory;
}
public void setEventFactory(EventFactory eventFactory) {
this.eventFactory = eventFactory;
}
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
}
| caAERS/software/core/src/main/java/gov/nih/nci/cabig/caaers/api/impl/ParticipantServiceImpl.java | /*******************************************************************************
* Copyright SemanticBits, Northwestern University and Akaza Research
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/caaers/LICENSE.txt for details.
******************************************************************************/
package gov.nih.nci.cabig.caaers.api.impl;
import gov.nih.nci.cabig.caaers.CaaersSystemException;
import gov.nih.nci.cabig.caaers.api.AbstractImportService;
import gov.nih.nci.cabig.caaers.dao.ParticipantDao;
import gov.nih.nci.cabig.caaers.dao.StudyDao;
import gov.nih.nci.cabig.caaers.dao.query.ParticipantQuery;
import gov.nih.nci.cabig.caaers.domain.Identifier;
import gov.nih.nci.cabig.caaers.domain.Organization;
import gov.nih.nci.cabig.caaers.domain.Participant;
import gov.nih.nci.cabig.caaers.domain.Study;
import gov.nih.nci.cabig.caaers.domain.StudyParticipantAssignment;
import gov.nih.nci.cabig.caaers.domain.ajax.StudySearchableAjaxableDomainObject;
import gov.nih.nci.cabig.caaers.event.EventFactory;
import gov.nih.nci.cabig.caaers.integration.schema.common.CaaersServiceResponse;
import gov.nih.nci.cabig.caaers.integration.schema.common.OrganizationType;
import gov.nih.nci.cabig.caaers.integration.schema.common.ResponseDataType;
import gov.nih.nci.cabig.caaers.integration.schema.common.Status;
import gov.nih.nci.cabig.caaers.integration.schema.common.WsError;
import gov.nih.nci.cabig.caaers.integration.schema.participant.AssignmentType;
import gov.nih.nci.cabig.caaers.integration.schema.participant.ParticipantRef;
import gov.nih.nci.cabig.caaers.integration.schema.participant.ParticipantType;
import gov.nih.nci.cabig.caaers.integration.schema.participant.ParticipantType.Assignments;
import gov.nih.nci.cabig.caaers.integration.schema.participant.Participants;
import gov.nih.nci.cabig.caaers.service.DomainObjectImportOutcome;
import gov.nih.nci.cabig.caaers.service.DomainObjectImportOutcome.Message;
import gov.nih.nci.cabig.caaers.service.ParticipantImportServiceImpl;
import gov.nih.nci.cabig.caaers.service.migrator.ParticipantConverter;
import gov.nih.nci.cabig.caaers.service.synchronizer.ParticipantSynchronizer;
import gov.nih.nci.security.util.StringUtilities;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import javax.validation.groups.Default;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.MessageSource;
public class ParticipantServiceImpl extends AbstractImportService implements ApplicationContextAware {
private static Log logger = LogFactory.getLog(ParticipantServiceImpl.class);
private ApplicationContext applicationContext;
private MessageSource messageSource;
private ParticipantDao participantDao;
private StudyDao studyDao;
private ParticipantImportServiceImpl participantImportServiceImpl;
private ParticipantConverter participantConverter;
private ParticipantSynchronizer participantSynchronizer;
//private DomainObjectValidator domainObjectValidator;
private Validator validator;
private EventFactory eventFactory;
/**
* Method exisits only to be called from ImportController
*/
public DomainObjectImportOutcome<Participant> processParticipant(ParticipantType xmlParticipant){
logger.info("Entering processParticipant() in ParticipantServiceImpl");
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
DomainObjectImportOutcome<Participant> participantImportOutcome = null;
Participant participant = new Participant();
try{
participantConverter.convertParticipantDtoToParticipantDomain(xmlParticipant, participant);
}catch(CaaersSystemException caEX){
participantImportOutcome = new DomainObjectImportOutcome<Participant>();
logger.error("ParticipantDto to ParticipantDomain Conversion Failed " , caEX);
participantImportOutcome.addErrorMessage("ParticipantDto to ParticipantDomain Conversion Failed " , DomainObjectImportOutcome.Severity.ERROR);
}
if(participantImportOutcome == null){
participantImportOutcome = participantImportServiceImpl.importParticipant(participant);
if(participantImportOutcome.isSavable()){
Participant dbParticipant = fetchParticipantByAssignment(participantImportOutcome.getImportedDomainObject(), caaersServiceResponse);
if(dbParticipant != null){
logger.info("Participant Exists in caAERS trying to Update");
participantSynchronizer.migrate(dbParticipant, participantImportOutcome.getImportedDomainObject(), participantImportOutcome);
participantImportOutcome.setImportedDomainObject(dbParticipant);
logger.info("Participant in caAERS Updated");
}else if (caaersServiceResponse.getServiceResponse().getStatus() == Status.FAILED_TO_PROCESS) {
participantImportOutcome.addErrorMessage(caaersServiceResponse.getServiceResponse().getMessage(), DomainObjectImportOutcome.Severity.ERROR);
}else{
logger.info("New Participant to be Created");
}
}
}
logger.info("Leaving processParticipant() in ParticipantServiceImpl");
return participantImportOutcome;
}
public String getStudySubjectIdentifierFromInXML(ParticipantType xmlParticipant) {
String identifier = null;
Assignments assignments = xmlParticipant.getAssignments();
for(AssignmentType assignmentType : assignments.getAssignment()){
identifier = assignmentType.getStudySubjectIdentifier();
if(StringUtils.isNotEmpty(identifier) ){
return identifier;
}
}
return null;
}
public Identifier getStudyIdentifierFromInXML(ParticipantType xmlParticipant) {
Identifier identifier = null;
Assignments assignments = xmlParticipant.getAssignments();
for(AssignmentType assignmentType : assignments.getAssignment()){
identifier = new Identifier();
identifier.setType(assignmentType.getStudySite().getStudy().getIdentifiers().getIdentifier().getType().value());
identifier.setValue(assignmentType.getStudySite().getStudy().getIdentifiers().getIdentifier().getValue());
return identifier;
}
return identifier;
}
public List<StudySearchableAjaxableDomainObject> getAuthorizedStudies(Identifier identifier) {
List<StudySearchableAjaxableDomainObject> authorizedStudies = new ArrayList<StudySearchableAjaxableDomainObject>();
authorizedStudies = getAuthorizedStudies(identifier.getValue());
return authorizedStudies;
}
private String checkAuthorizedOrganizations (ParticipantType xmlParticipant) {
Assignments assignments = xmlParticipant.getAssignments();
for(AssignmentType assignmentType : assignments.getAssignment()){
String organizationName = assignmentType.getStudySite().getOrganization().getName();
String organizationNciInstituteCode = assignmentType.getStudySite().getOrganization().getNciInstituteCode();
List<Organization> organizations = new ArrayList<Organization>();
if (StringUtilities.isBlank(organizationNciInstituteCode)) {
//System.out.println("looking by name");
organizations = getAuthorizedOrganizationsByNameOrNciId(organizationName,null);
} else {
//System.out.println("looking by id");
organizations = getAuthorizedOrganizationsByNameOrNciId(null,organizationNciInstituteCode);
}
if (organizations.size() == 0 ) {
return organizationNciInstituteCode + " : " + organizationName;
}
}
return "ALL_ORGS_AUTH";
}
/**
* validates xml input and fetches participant based on study identifier and study subject identifier
* @param studySubjectIdentifier - string
* @param studyIdentifier - String
* @param xmlParticipant - xml participant object
* @param caaersServiceResponse - response
* @return Participant - returns retrieve participant, if not returns null, with response filled with appropriate messages
*/
private Participant validateInputsAndFetchParticipant(String studySubjectIdentifier, Identifier studyIdentifier, ParticipantType xmlParticipant,
CaaersServiceResponse caaersServiceResponse) {
if (studyIdentifier != null ) {
Study study = fetchStudy(studyIdentifier);
if(study == null){
createNoStudyFoundResponse(caaersServiceResponse,studyIdentifier);
return null;
}
List<StudySearchableAjaxableDomainObject> authorizedStudies = getAuthorizedStudies(studyIdentifier);
if(authorizedStudies.size() == 0) {
createNoStudyAuthorizationResponse(caaersServiceResponse, studyIdentifier);
return null;
}
}
String errorMsg = checkAuthorizedOrganizations(xmlParticipant);
if(!errorMsg.equals("ALL_ORGS_AUTH")) {
createNoOrganizationAuthorizationResponse(caaersServiceResponse, errorMsg);
return null;
}
return fetchParticipantByAssignment(studySubjectIdentifier, studyIdentifier, caaersServiceResponse);
}
/**
* converts xml participant to the DomainImportOutcome<Participant>
* @param xmlParticipant - xml participant object
* @param studySubjectIdentifier - string
* @param caaersServiceResponse - response
* @param processStr - should be created/updated/deleted to be used in response message
* @return converted and imported domain object
*/
private DomainObjectImportOutcome<Participant> convertToImportedDomainObject(ParticipantType xmlParticipant, String studySubjectIdentifier,
CaaersServiceResponse caaersServiceResponse, String processStr) {
Participant participant = new Participant();
try{
participantConverter.convertParticipantDtoToParticipantDomain(xmlParticipant, participant);
}catch(CaaersSystemException caEX){
String message = messageSource.getMessage("WS_PMS_005", new String[] { caEX.getMessage() }, "", Locale
.getDefault());
logger.error(message, caEX);
populateError(caaersServiceResponse, "WS_PMS_005", message);
return null;
}
DomainObjectImportOutcome<Participant> participantImportOutcome =
participantImportServiceImpl.importParticipant(participant);
Participant importedDomainObject = participantImportOutcome.getImportedDomainObject();
//List<String> errors = domainObjectValidator.validate(importedDomainObject);
Set<ConstraintViolation<Participant>> constraintViolations = validator.validate(importedDomainObject, Default.class);
if( !participantImportOutcome.isSavable() || constraintViolations.size() > 0) {
String errMessage = messageSource.getMessage("WS_PMS_007",
new String[] { importedDomainObject.getFirstName(), importedDomainObject.getLastName(),
studySubjectIdentifier, processStr },
"", Locale.getDefault());
populateError(caaersServiceResponse, "WS_PMS_007", errMessage);
logger.info(errMessage);
List<String> messages = new ArrayList<String>();
for(Message message : participantImportOutcome.getMessages()){
messages.add(message.getMessage());
}
String valErrmsg = null;
for (ConstraintViolation<Participant> violation : constraintViolations) {
valErrmsg = violation.getMessage()
+ " (" + violation.getPropertyPath()
+ ") in " + participant.getClass().getSimpleName()
+ "(" + participant.getFullName() + ")";
messages.add(valErrmsg);
}
Helper.populateErrorOutcome(caaersServiceResponse, null, null, null, messages);
return null;
}
return participantImportOutcome;
}
public CaaersServiceResponse createParticipant(
Participants xmlParticipants) {
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
ParticipantType xmlParticipant = xmlParticipants.getParticipant().get(0);
Identifier studyIdentifier = getStudyIdentifierFromInXML(xmlParticipant);
String studySubjectIdentifier = getStudySubjectIdentifierFromInXML(xmlParticipant);
Participant dbParticipant = validateInputsAndFetchParticipant(studySubjectIdentifier, studyIdentifier, xmlParticipant, caaersServiceResponse);
if( dbParticipant != null) {
String message = messageSource.getMessage("WS_PMS_004", new String[] { studySubjectIdentifier, studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_004", message);
return caaersServiceResponse;
} else {
//remove the error message for participant not found, as this is create flow
List<WsError> wsErrors = caaersServiceResponse.getServiceResponse().getWsError();
if(wsErrors != null && wsErrors.size() == 1 && "WS_PMS_003".equals(wsErrors.get(0).getErrorCode()) ) {
wsErrors.remove(0);
Helper.populateMessage(caaersServiceResponse, "");
}
}
validateAssignmentSite(caaersServiceResponse, xmlParticipant, null);
if(caaersServiceResponse.getServiceResponse().getStatus() == Status.FAILED_TO_PROCESS ) {
return caaersServiceResponse;
}
//resetting the response object
caaersServiceResponse = Helper.createResponse();
DomainObjectImportOutcome<Participant> participantImportOutcome =
convertToImportedDomainObject(xmlParticipant, studySubjectIdentifier, caaersServiceResponse, "created");
if(participantImportOutcome != null) {
Participant importedDomainObject = participantImportOutcome.getImportedDomainObject();
participantDao.save(importedDomainObject);
String message = messageSource.getMessage("WS_PMS_006",
new String[] { importedDomainObject.getFirstName(), importedDomainObject.getLastName(), studySubjectIdentifier },
"", Locale.getDefault());
Helper.populateMessage(caaersServiceResponse, message);
logger.info(message);
if(eventFactory != null) {
eventFactory.publishEntityModifiedEvent(importedDomainObject, false);
}
}
return caaersServiceResponse;
}
public CaaersServiceResponse updateParticipant(
Participants xmlParticipants) {
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
ParticipantType xmlParticipant = xmlParticipants.getParticipant().get(0);
Identifier studyIdentifier = getStudyIdentifierFromInXML(xmlParticipant);
String studySubjectIdentifier = getStudySubjectIdentifierFromInXML(xmlParticipant);
Participant dbParticipant = validateInputsAndFetchParticipant(studySubjectIdentifier, studyIdentifier, xmlParticipant, caaersServiceResponse);
if( dbParticipant == null) {
return caaersServiceResponse;
}
validateAssignmentSite(caaersServiceResponse, xmlParticipant, dbParticipant);
if(caaersServiceResponse.getServiceResponse().getStatus() == Status.FAILED_TO_PROCESS) {
return caaersServiceResponse;
}
//resetting the response object
caaersServiceResponse = Helper.createResponse();
DomainObjectImportOutcome<Participant> participantImportOutcome =
convertToImportedDomainObject(xmlParticipant, studySubjectIdentifier, caaersServiceResponse, "updated");
if(participantImportOutcome != null){
Participant importedDomainObject = participantImportOutcome.getImportedDomainObject();
participantSynchronizer.migrate(dbParticipant, participantImportOutcome.getImportedDomainObject(), participantImportOutcome);
participantImportOutcome.setImportedDomainObject(dbParticipant);
participantDao.save(participantImportOutcome.getImportedDomainObject());
String message = messageSource.getMessage("WS_PMS_008",
new String[] { importedDomainObject.getFirstName(), importedDomainObject.getLastName(), studySubjectIdentifier },
"", Locale.getDefault());
Helper.populateMessage(caaersServiceResponse, message);
logger.info(message);
if(eventFactory != null) {
eventFactory.publishEntityModifiedEvent(importedDomainObject, false);
}
}
return caaersServiceResponse;
}
private void validateAssignmentSite(CaaersServiceResponse caaersServiceResponse, ParticipantType xmlParticipant,
Participant dbParticipant) {
OrganizationType xmlOrg = xmlParticipant.getAssignments().getAssignment().get(0).getStudySite().getOrganization();
if (StringUtils.isEmpty(xmlOrg.getName()) || StringUtils.isEmpty(xmlOrg.getNciInstituteCode())
|| ":".equals(xmlOrg.getNciInstituteCode().trim())) {
String message = messageSource.getMessage("WS_PMS_017", new String[] {}, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_017", message);
}
if (dbParticipant == null) { //for create flow
return;
}
Organization dbOrg = dbParticipant.getAssignments().get(0).getStudySite().getOrganization();
if ( (dbOrg.getName() != null && !dbOrg.getName().equals(xmlOrg.getName()) )
|| (dbOrg.getNciInstituteCode() != null && !dbOrg.getNciInstituteCode().equals(xmlOrg.getNciInstituteCode()) )
) {
String message = messageSource.getMessage("WS_PMS_018", new String[] {}, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_018", message);
}
}
public CaaersServiceResponse deleteParticipant(Participants xmlParticipants) {
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
ParticipantType xmlParticipant = xmlParticipants.getParticipant().get(0);
Identifier studyIdentifier = getStudyIdentifierFromInXML(xmlParticipant);
String studySubjectIdentifier = getStudySubjectIdentifierFromInXML(xmlParticipant);
Participant dbParticipant = validateInputsAndFetchParticipant(studySubjectIdentifier, studyIdentifier, xmlParticipant, caaersServiceResponse);
if( dbParticipant == null) {
return caaersServiceResponse;
} else if(dbParticipant.getHasReportingPeriods()) {
String message = messageSource.getMessage("WS_PMS_009", new String[] { studySubjectIdentifier, studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_009", message);
}
if(caaersServiceResponse.getServiceResponse().getStatus() == Status.FAILED_TO_PROCESS) {
return caaersServiceResponse;
}
//resetting the response object
caaersServiceResponse = Helper.createResponse();
DomainObjectImportOutcome<Participant> participantImportOutcome =
convertToImportedDomainObject(xmlParticipant, studySubjectIdentifier, caaersServiceResponse, "deleted");
if(participantImportOutcome != null){
Participant importedDomainObject = participantImportOutcome.getImportedDomainObject();
participantDao.delete(dbParticipant);
String message = messageSource.getMessage("WS_PMS_010",
new String[] { importedDomainObject.getFirstName(), importedDomainObject.getLastName(), studySubjectIdentifier },
"", Locale.getDefault());
Helper.populateMessage(caaersServiceResponse, message);
logger.info(message);
if(eventFactory != null) {
eventFactory.publishEntityModifiedEvent(importedDomainObject, false);
}
}
return caaersServiceResponse;
}
// returns the domain participant after converting to jaxb participant based on the input identifiers
public CaaersServiceResponse getParticipant(ParticipantRef xmlParticipantRefType) {
CaaersServiceResponse caaersServiceResponse = Helper.createResponse();
Participant dbParticipant = null;
//TODO : Only fetch By Assignment works..Need to add fetch by Identifier(mostly not required)
ParticipantRef.ParticipantAssignment assignment = xmlParticipantRefType.getParticipantAssignment();
if(assignment == null || assignment.getStudyIdentifier() == null || assignment.getStudyIdentifier().getType() == null) {
populateError(caaersServiceResponse, "WS_PMS_013", messageSource.getMessage("WS_PMS_013",
new String[]{},"",Locale.getDefault()));
return caaersServiceResponse;
}
Identifier studyId = new Identifier();
studyId.setType(assignment.getStudyIdentifier().getType().value());
studyId.setValue(assignment.getStudyIdentifier().getValue());
dbParticipant = fetchParticipantByAssignment(assignment.getStudySubjectIdentifier(), studyId, caaersServiceResponse);
if(dbParticipant != null ){
caaersServiceResponse.getServiceResponse().setResponsecode("0");
ParticipantType dbParticipantType = new ParticipantType();
participantConverter.convertDomainParticipantToParticipantDto(dbParticipant, dbParticipantType);
caaersServiceResponse.getServiceResponse().setResponseData(new ResponseDataType());
Participants participants = new Participants();
participants.getParticipant().add(dbParticipantType);
caaersServiceResponse.getServiceResponse().getResponseData().setAny(participants);
}
return caaersServiceResponse;
}
private CaaersServiceResponse createNoStudyAuthorizationResponse(CaaersServiceResponse caaersServiceResponse, Identifier identifier){
populateError(caaersServiceResponse, "WS_GEN_003", messageSource.getMessage("WS_GEN_003", new String[]{identifier.getValue()},"",Locale.getDefault()));
return caaersServiceResponse;
}
private CaaersServiceResponse createNoStudyFoundResponse(CaaersServiceResponse caaersServiceResponse, Identifier identifier){
populateError(caaersServiceResponse, "WS_PMS_002", messageSource.getMessage("WS_PMS_002", new String[]{identifier.getValue()},"",Locale.getDefault()));
return caaersServiceResponse;
}
private CaaersServiceResponse createNoOrganizationAuthorizationResponse(CaaersServiceResponse caaersServiceResponse, String errorMsg){
populateError(caaersServiceResponse, "WS_GEN_005", messageSource.getMessage("WS_GEN_005", new String[]{errorMsg},"",Locale.getDefault()));
return caaersServiceResponse;
}
private Participant fetchParticipant(Participant participant){
Participant dbParticipant = null;
for(Identifier identifier : participant.getIdentifiers()){
dbParticipant = participantDao.getParticipantDesignByIdentifier(identifier);
if(dbParticipant != null){
break;
}
participantDao.evict(dbParticipant);
}
return dbParticipant;
}
private Participant fetchParticipantByAssignment(Participant participant,
CaaersServiceResponse caaersServiceResponse) {
Participant dbParticipant = null;
for(StudyParticipantAssignment assignment : participant.getAssignments()){
for(Identifier identifier : assignment.getStudySite().getStudy().getIdentifiers()) {
dbParticipant = fetchParticipantByAssignment(assignment.getStudySubjectIdentifier(),
identifier, caaersServiceResponse);
if(dbParticipant != null){
participantDao.evict(dbParticipant);
return dbParticipant;
}
}
}
return dbParticipant;
}
private Participant fetchParticipantByAssignment(String studySubjectIdentifier,
Identifier studyIdentifier, CaaersServiceResponse caaersServiceResponse) {
Participant dbParticipant = null;
if (StringUtils.isEmpty(studySubjectIdentifier)) {
String message = messageSource.getMessage("WS_PMS_014", new String[] { studySubjectIdentifier }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_014", message);
return null;
}
if (studyIdentifier == null || StringUtils.isEmpty(studyIdentifier.getValue())) {
String message = messageSource.getMessage("WS_PMS_015", new String[] { studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_015", message);
return null;
}
try {
Study study = fetchStudy(studyIdentifier);
if(study == null) {
String message = messageSource.getMessage("WS_PMS_002", new String[] { studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_002", message);
return dbParticipant;
}
ParticipantQuery pq = new ParticipantQuery();
pq.joinStudy();
pq.filterByStudySubjectIdentifier(studySubjectIdentifier, "=");
pq.filterByStudyId(study.getId(), "=");
List<Participant> dbParticipants = participantDao.searchParticipant(pq);
if (dbParticipants != null && dbParticipants.size() == 1) {
logger.info("Participant registered to this study in caAERS");
dbParticipant = dbParticipants.get(0);
} else {
String message = messageSource.getMessage("WS_PMS_003", new String[] { studySubjectIdentifier, studyIdentifier.getValue() }, "", Locale
.getDefault());
logger.error(message);
populateError(caaersServiceResponse, "WS_PMS_003", message);
}
} catch (Exception e) {
String message = messageSource.getMessage("WS_PMS_016", new String[] { e.getMessage() }, "", Locale
.getDefault());
logger.error("Error retrieving participant", e);
populateError(caaersServiceResponse, "WS_PMS_016", message);
dbParticipant = null;
}
return dbParticipant;
}
private void populateError(CaaersServiceResponse caaersServiceResponse, String errorCode, String message) {
Helper.populateError(caaersServiceResponse, errorCode, message);
caaersServiceResponse.getServiceResponse().setMessage(message);
}
private Study fetchStudy(Identifier identifier) {
Study dbStudy = null;
dbStudy = studyDao.getByIdentifier(identifier);
if(dbStudy != null){
return dbStudy;
}
studyDao.evict(dbStudy);
return dbStudy;
}
public ParticipantDao getParticipantDao() {
return participantDao;
}
public void setParticipantDao(ParticipantDao participantDao) {
this.participantDao = participantDao;
}
public ParticipantImportServiceImpl getParticipantImportServiceImpl() {
return participantImportServiceImpl;
}
public void setParticipantImportServiceImpl(
ParticipantImportServiceImpl participantImportServiceImpl) {
this.participantImportServiceImpl = participantImportServiceImpl;
}
public ParticipantConverter getParticipantConverter() {
return participantConverter;
}
public void setParticipantConverter(ParticipantConverter participantConverter) {
this.participantConverter = participantConverter;
}
public ParticipantSynchronizer getParticipantSynchronizer() {
return participantSynchronizer;
}
public void setParticipantSynchronizer(
ParticipantSynchronizer participantSynchronizer) {
this.participantSynchronizer = participantSynchronizer;
}
/*public void setDomainObjectValidator(DomainObjectValidator domainObjectValidator) {
this.domainObjectValidator = domainObjectValidator;
}*/
public void setMessageSource(MessageSource messageSource) {
this.messageSource = messageSource;
}
public Validator getValidator() {
return validator;
}
public void setValidator(Validator validator) {
this.validator = validator;
}
public void setStudyDao(StudyDao studyDao) {
this.studyDao = studyDao;
}
public EventFactory getEventFactory() {
return eventFactory;
}
public void setEventFactory(EventFactory eventFactory) {
this.eventFactory = eventFactory;
}
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
}
| CAAERS-6350
| caAERS/software/core/src/main/java/gov/nih/nci/cabig/caaers/api/impl/ParticipantServiceImpl.java | CAAERS-6350 |
|
Java | isc | f183511bb2299dfe4ca420d32ecdc7bc3bea3206 | 0 | semantic-dependency-parsing/toolkit,semantic-dependency-parsing/toolkit | /*
* See the file "LICENSE" for the full license governing this code.
*/
package se.liu.ida.nlp.sdp.tools;
import se.liu.ida.nlp.sdp.graph.Graph;
import se.liu.ida.nlp.sdp.graph.GraphInspector;
import se.liu.ida.nlp.sdp.graph.Node;
import se.liu.ida.nlp.sdp.io.GraphReader;
/**
* Print statistics about a collection of graphs.
*
* @author Marco Kuhlmann <[email protected]>
*/
public class Analyzer {
private int nGraphs;
private int nNodes;
private double avgNTopNodes;
private double avgNStructuralRoots;
private double pcSemiConnected;
private double pcReentrant;
private double pcCyclic;
private int maxIndegreeGlobal;
private double avgIndegreeGlobal;
private int maxOutdegreeGlobal;
private double avgOutdegreeGlobal;
private double avgSingletons;
public void update(Graph graph) {
GraphInspector analyzer = new GraphInspector(graph);
int nTopNodes = 0;
int nEdgesFromTopNode = 0;
int nStructuralRoots = 0;
boolean isSemiConnected = true;
boolean isReentrant = false;
boolean isCyclic = false;
int maxIndegree = 0;
double avgIndegree = 0.0;
int maxOutdegree = 0;
double avgOutdegree = 0.0;
int nSingletons = 0;
for (Node node : graph.getNodes()) {
if (node.isTop) {
nTopNodes++;
nEdgesFromTopNode += node.getNOutgoingEdges();
}
if (!node.hasIncomingEdges()) {
nStructuralRoots++;
}
if (node.getNIncomingEdges() >= 2) {
isReentrant = true;
}
if (analyzer.isCyclic()) {
isCyclic = true;
}
maxIndegree = Math.max(maxIndegree, node.getNIncomingEdges());
avgIndegree += node.getNIncomingEdges();
maxOutdegree = Math.max(maxOutdegree, node.getNOutgoingEdges());
avgOutdegree += node.getNOutgoingEdges();
if (!node.hasIncomingEdges() && !node.hasOutgoingEdges()) {
nSingletons++;
}
avgIndegreeGlobal += node.getNIncomingEdges();
avgOutdegreeGlobal += node.getNOutgoingEdges();
}
if (analyzer.getNComponents() - nSingletons > 1) {
isSemiConnected = false;
}
avgIndegree /= (double) graph.getNNodes();
avgOutdegree /= (double) graph.getNNodes();
nGraphs++;
nNodes += graph.getNNodes();
avgNTopNodes += nTopNodes;
avgNStructuralRoots += nTopNodes;
pcSemiConnected += isSemiConnected ? 1.0 : 0.0;
pcReentrant += isReentrant ? 1.0 : 0.0;
pcCyclic += isCyclic ? 1.0 : 0.0;
maxIndegreeGlobal = Math.max(maxIndegreeGlobal, maxIndegree);
avgSingletons += nSingletons;
System.out.format("%s", graph.id);
// number of top nodes
System.out.format("\t%d", nTopNodes);
// number of outgoing arcs from top
System.out.format("\t%d", nEdgesFromTopNode);
// cyclic?
System.out.format("\t%s", analyzer.isCyclic() ? "+" : "-");
// semiconnected?
System.out.format("\t%s", isSemiConnected ? "+" : "-");
System.out.println();
}
public void finish() {
avgIndegreeGlobal /= nNodes;
avgOutdegreeGlobal /= nNodes;
//
avgNTopNodes /= nGraphs;
avgNStructuralRoots /= nGraphs;
pcSemiConnected /= nGraphs;
pcReentrant /= nGraphs;
pcCyclic /= nGraphs;
avgSingletons /= nGraphs;
}
public static void main(String[] args) throws Exception {
Analyzer analyzer = new Analyzer();
for (String arg : args) {
GraphReader reader = new GraphReader(arg);
Graph graph;
while ((graph = reader.readGraph()) != null) {
analyzer.update(graph);
}
reader.close();
analyzer.finish();
}
}
}
| src/main/java/se/liu/ida/nlp/sdp/tools/Analyzer.java | /*
* See the file "LICENSE" for the full license governing this code.
*/
package se.liu.ida.nlp.sdp.tools;
import se.liu.ida.nlp.sdp.graph.Graph;
import se.liu.ida.nlp.sdp.graph.GraphInspector;
import se.liu.ida.nlp.sdp.graph.Node;
import se.liu.ida.nlp.sdp.io.GraphReader;
/**
* Print statistics about a collection of graphs.
*
* @author Marco Kuhlmann <[email protected]>
*/
public class Analyzer {
private int nGraphs;
private int nNodes;
private double avgNRoots;
private double avgNStructuralRoots;
private double pcSemiConnected;
private double pcReentrant;
private double pcCyclic;
private int maxIndegreeGlobal;
private double avgIndegreeGlobal;
private int maxOutdegreeGlobal;
private double avgOutdegreeGlobal;
private double avgSingletons;
public void update(Graph graph) {
GraphInspector analyzer = new GraphInspector(graph);
int nRoots = 0;
int nStructuralRoots = 0;
boolean isSemiConnected = true;
boolean isReentrant = false;
boolean isCyclic = false;
int maxIndegree = 0;
double avgIndegree = 0.0;
int maxOutdegree = 0;
double avgOutdegree = 0.0;
int nSingletons = 0;
for (Node node : graph.getNodes()) {
if (node.isTop) {
nRoots++;
}
if (!node.hasIncomingEdges()) {
nStructuralRoots++;
}
if (node.getNIncomingEdges() >= 2) {
isReentrant = true;
}
if (analyzer.isCyclic()) {
isCyclic = true;
}
maxIndegree = Math.max(maxIndegree, node.getNIncomingEdges());
avgIndegree += node.getNIncomingEdges();
maxOutdegree = Math.max(maxOutdegree, node.getNOutgoingEdges());
avgOutdegree += node.getNOutgoingEdges();
if (!node.hasIncomingEdges() && !node.hasOutgoingEdges()) {
nSingletons++;
}
avgIndegreeGlobal += node.getNIncomingEdges();
avgOutdegreeGlobal += node.getNOutgoingEdges();
}
if (analyzer.getNComponents() - nSingletons > 1) {
System.err.format("%s%n", graph.id);
isSemiConnected = false;
}
avgIndegree /= (double) graph.getNNodes();
avgOutdegree /= (double) graph.getNNodes();
nGraphs++;
nNodes += graph.getNNodes();
avgNRoots += nRoots;
avgNStructuralRoots += nRoots;
pcSemiConnected += isSemiConnected ? 1.0 : 0.0;
pcReentrant += isReentrant ? 1.0 : 0.0;
pcCyclic += isCyclic ? 1.0 : 0.0;
maxIndegreeGlobal = Math.max(maxIndegreeGlobal, maxIndegree);
avgSingletons += nSingletons;
}
public void finish() {
avgIndegreeGlobal /= nNodes;
avgOutdegreeGlobal /= nNodes;
//
avgNRoots /= nGraphs;
avgNStructuralRoots /= nGraphs;
pcSemiConnected /= nGraphs;
pcReentrant /= nGraphs;
pcCyclic /= nGraphs;
avgSingletons /= nGraphs;
}
public static void main(String[] args) throws Exception {
Analyzer analyzer = new Analyzer();
for (String arg : args) {
GraphReader reader = new GraphReader(arg);
Graph graph;
while ((graph = reader.readGraph()) != null) {
analyzer.update(graph);
}
reader.close();
analyzer.finish();
System.out.format("%% semi-connected: %f%n", analyzer.pcSemiConnected);
// System.out.format("%% cyclic: %f%n", analyzer.pcCyclic);
}
}
}
| Compute properties requested by Stephan.
| src/main/java/se/liu/ida/nlp/sdp/tools/Analyzer.java | Compute properties requested by Stephan. |
|
Java | mit | ba22498fab32ee962c2d44266d9c5ad1560afd5c | 0 | BirdBrainTechnologies/BirdBlox-Android-Support | package com.birdbraintechnologies.birdblox.httpservice.RequestHandlers;
import android.app.AlertDialog;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.le.ScanFilter;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Handler;
import android.os.ParcelUuid;
import android.util.Log;
import android.widget.Toast;
import com.birdbraintechnologies.birdblox.Bluetooth.BluetoothHelper;
import com.birdbraintechnologies.birdblox.Bluetooth.UARTConnection;
import com.birdbraintechnologies.birdblox.Bluetooth.UARTSettings;
import com.birdbraintechnologies.birdblox.Robots.Hummingbird;
import com.birdbraintechnologies.birdblox.Robots.Hummingbit;
import com.birdbraintechnologies.birdblox.Robots.Microbit;
import com.birdbraintechnologies.birdblox.Robots.Robot;
import com.birdbraintechnologies.birdblox.Robots.RobotType;
import com.birdbraintechnologies.birdblox.httpservice.HttpService;
import com.birdbraintechnologies.birdblox.httpservice.RequestHandler;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import fi.iki.elonen.NanoHTTPD;
import static com.birdbraintechnologies.birdblox.MainWebView.bbxEncode;
import static com.birdbraintechnologies.birdblox.MainWebView.mainWebViewContext;
import static com.birdbraintechnologies.birdblox.MainWebView.runJavascript;
import static com.birdbraintechnologies.birdblox.Robots.RobotType.robotTypeFromString;
import static fi.iki.elonen.NanoHTTPD.MIME_PLAINTEXT;
/**
* @author AppyFizz (Shreyan Bakshi)
* @author Zhendong Yuan (yzd1998111)
*/
public class RobotRequestHandler implements RequestHandler {
private final String TAG = this.getClass().getName();
private static final String FIRMWARE_UPDATE_URL = "http://www.hummingbirdkit.com/learning/installing-birdblox#BurnFirmware";
/* UUIDs for different Hummingbird features */
private static final String DEVICE_UUID = "6E400001-B5A3-F393-E0A9-E50E24DCCA9E";
private static final UUID HB_UART_UUID = UUID.fromString("6E400001-B5A3-F393-E0A9-E50E24DCCA9E");
private static final UUID HB_TX_UUID = UUID.fromString("6E400002-B5A3-F393-E0A9-E50E24DCCA9E");
private static final UUID HB_RX_UUID = UUID.fromString("6E400003-B5A3-F393-E0A9-E50E24DCCA9E");
// TODO: Remove this, it is the same across devices
private static final UUID RX_CONFIG_UUID = UUID.fromString("00002902-0000-1000-8000-00805f9b34fb");
public static HashSet<String> hummingbirdsToConnect = new HashSet<>();
public static HashSet<String> hummingbitsToConnect = new HashSet<>();
public static HashSet<String> microbitsToConnect = new HashSet<>();
HttpService service;
private static BluetoothHelper btHelper;
private static HashMap<String, Thread> threadMap;
private static UARTSettings HBUARTSettings;
private static HashMap<String, Hummingbird> connectedHummingbirds;
private static UARTSettings HBitUARTSettings;
private static HashMap<String, Hummingbit> connectedHummingbits;
private static UARTSettings MBitUARTSettings;
private static HashMap<String, Microbit> connectedMicrobits;
private AlertDialog.Builder builder;
private AlertDialog robotInfoDialog;
public static HashMap<String, BluetoothGatt> deviceGatt;
public RobotRequestHandler(HttpService service) {
this.service = service;
btHelper = service.getBluetoothHelper();
threadMap = new HashMap<>();
connectedHummingbirds = new HashMap<>();
connectedHummingbits = new HashMap<>();
connectedMicrobits = new HashMap<>();
deviceGatt = new HashMap<>();
// Build Hummingbird UART settings
HBUARTSettings = (new UARTSettings.Builder())
.setUARTServiceUUID(HB_UART_UUID)
.setRxCharacteristicUUID(HB_RX_UUID)
.setTxCharacteristicUUID(HB_TX_UUID)
.setRxConfigUUID(RX_CONFIG_UUID)
.build();
HBitUARTSettings = (new UARTSettings.Builder())
.setUARTServiceUUID(HB_UART_UUID)
.setRxCharacteristicUUID(HB_RX_UUID)
.setTxCharacteristicUUID(HB_TX_UUID)
.setRxConfigUUID(RX_CONFIG_UUID)
.build();
MBitUARTSettings = (new UARTSettings.Builder())
.setUARTServiceUUID(HB_UART_UUID)
.setRxCharacteristicUUID(HB_RX_UUID)
.setTxCharacteristicUUID(HB_TX_UUID)
.setRxConfigUUID(RX_CONFIG_UUID)
.build();
}
@Override
public NanoHTTPD.Response handleRequest(NanoHTTPD.IHTTPSession session, List<String> args) {
String[] path = args.get(0).split("/");
Map<String, List<String>> m = session.getParameters();
// Generate response body
String responseBody = "";
Robot robot;
switch (path[0]) {
case "startDiscover":
responseBody = startScan();
break;
case "stopDiscover":
responseBody = stopDiscover();
break;
case "totalStatus":
responseBody = getTotalStatus(robotTypeFromString(m.get("type").get(0)));
break;
case "connect":
responseBody = connectToRobot(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
break;
case "disconnect":
responseBody = disconnectFromRobot(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
break;
case "out":
robot = getRobotFromId(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
if (robot == null) {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', false);");
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.NOT_FOUND, MIME_PLAINTEXT, "Robot " + m.get("id").get(0) + " was not found.");
} else if (!robot.setOutput(path[1], m)) {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', false);");
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.EXPECTATION_FAILED, MIME_PLAINTEXT, "Failed to send to robot " + m.get("id").get(0) + ".");
} else {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', true);");
responseBody = "Sent to robot " + m.get("type").get(0) + " successfully.";
}
break;
case "in":
robot = getRobotFromId(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
if (robot == null) {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', false);");
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.NOT_FOUND, MIME_PLAINTEXT, "Robot " + m.get("id").get(0) + " was not found.");
} else {
String sensorPort = null;
String sensorAxis = null;
if (m.get("port") != null) {
sensorPort = m.get("port").get(0);
}
if (m.get("axis") != null) {
sensorAxis = m.get("axis").get(0);
}
String sensorValue = robot.readSensor(m.get("sensor").get(0), sensorPort, sensorAxis);
if (sensorValue == null) {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', false);");
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.NO_CONTENT, MIME_PLAINTEXT, "Failed to read sensors from robot " + m.get("id").get(0) + ".");
} else {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', true);");
responseBody = sensorValue;
}
}
break;
case "showInfo":
responseBody = showRobotInfo(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
break;
case "showUpdateInstructions":
showFirmwareUpdateInstructions();
break;
case "stopAll":
stopAll();
break;
}
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.OK, MIME_PLAINTEXT, responseBody);
}
// TODO: Properly define Robot Object
// TODO: Synchronization of below functions
// TODO: Finish implementing new Robot commands and callbacks
private static String startScan() {
final List deviceFilter = generateDeviceFilter();
if (BluetoothHelper.currentlyScanning) {
return "";
}
if (BluetoothHelper.currentlyScanning) {
stopDiscover();
}
new Thread() {
@Override
public void run() {
btHelper.scanDevices(deviceFilter);
}
}.start();
return "";
}
/**
* Finds a robotId in the list of connected robots. Null if it does not exist.
*
* @param robotType The type of the robot to be found. Must be 'hummingbird' or 'hummingbit' or 'microbit'.
* @param robotId Robot ID to find.
* @return The connected Robot if it exists, null otherwise.
*/
private static Robot getRobotFromId(RobotType robotType, String robotId) {
if (robotType == RobotType.Hummingbird) {
return connectedHummingbirds.get(robotId);
} else if (robotType == RobotType.Hummingbit) {
return connectedHummingbits.get(robotId);
} else {
return connectedMicrobits.get(robotId);
}
}
/**
* Creates a Bluetooth scan Robot filter that only matches the required 'type' of Robot.
*
* @return List of scan filters.
*/
private static List<ScanFilter> generateDeviceFilter() {
String ROBOT_UUID = DEVICE_UUID;
ScanFilter scanFilter = (new ScanFilter.Builder())
.setServiceUuid(ParcelUuid.fromString(ROBOT_UUID))
.build();
List<ScanFilter> robotFilters = new ArrayList<>();
robotFilters.add(scanFilter);
return robotFilters;
}
/**
* @param robotType
* @param robotId
* @return
*/
public static String connectToRobot(RobotType robotType, String robotId) {
if (robotType == RobotType.Hummingbird) {
connectToHummingbird(robotId);
} else if (robotType == RobotType.Hummingbit) {
connectToHummingbit(robotId);
} else {
connectToMicrobit(robotId);
}
return "";
}
private static void connectToHummingbird(final String hummingbirdId) {
if (connectedHummingbirds.containsKey(hummingbirdId) == false) {
final UARTSettings HBUART = HBUARTSettings;
if (hummingbirdsToConnect.contains(hummingbirdId)) {
hummingbirdsToConnect.remove(hummingbirdId);
}
try {
Thread hbConnectionThread = new Thread() {
@Override
public void run() {
UARTConnection hbConn = btHelper.connectToDeviceUART(hummingbirdId, HBUART);
if (hbConn != null && hbConn.isConnected() && connectedHummingbirds != null) {
Hummingbird hummingbird = new Hummingbird(hbConn);
connectedHummingbirds.put(hummingbirdId, hummingbird);
hummingbird.setConnected();
}
}
};
hbConnectionThread.start();
final Thread oldThread = threadMap.put(hummingbirdId, hbConnectionThread);
if (oldThread != null) {
new Thread() {
@Override
public void run() {
super.run();
oldThread.interrupt();
}
}.start();
}
} catch (Exception e) {
Log.e("ConnectHB", " Error while connecting to HB " + e.getMessage());
}
}
}
private static void connectToHummingbit(final String hummingbitId) {
if (connectedHummingbits.containsKey(hummingbitId) == false) {
final UARTSettings HBitUART = HBitUARTSettings;
if (hummingbitsToConnect.contains(hummingbitId)) {
hummingbitsToConnect.remove(hummingbitId);
}
try {
Thread hbitConnectionThread = new Thread() {
@Override
public void run() {
UARTConnection hbitConn = btHelper.connectToDeviceUART(hummingbitId, HBitUART);
if (hbitConn != null && hbitConn.isConnected() && connectedHummingbits != null) {
Hummingbit hummingbit = new Hummingbit(hbitConn);
connectedHummingbits.put(hummingbitId, hummingbit);
hummingbit.setConnected();
}
}
};
hbitConnectionThread.start();
final Thread oldThread = threadMap.put(hummingbitId, hbitConnectionThread);
if (oldThread != null) {
new Thread() {
@Override
public void run() {
super.run();
oldThread.interrupt();
}
}.start();
}
} catch (Exception e) {
Log.e("ConnectHBit", " Error while connecting to HBit " + e.getMessage());
}
}
}
private static void connectToMicrobit(final String microbitId) {
if (connectedMicrobits.containsKey(microbitId) == false) {
final UARTSettings MBitUART = MBitUARTSettings;
if (microbitsToConnect.contains(microbitId)) {
microbitsToConnect.remove(microbitId);
}
try {
Thread mbitConnectionThread = new Thread() {
@Override
public void run() {
UARTConnection mbitConn = btHelper.connectToDeviceUART(microbitId, MBitUART);
if (mbitConn != null && mbitConn.isConnected() && connectedMicrobits != null) {
Microbit microbit = new Microbit(mbitConn);
connectedMicrobits.put(microbitId, microbit);
microbit.setConnected();
}
}
};
mbitConnectionThread.start();
final Thread oldThread = threadMap.put(microbitId, mbitConnectionThread);
if (oldThread != null) {
new Thread() {
@Override
public void run() {
super.run();
oldThread.interrupt();
}
}.start();
}
} catch (Exception e) {
Log.e("ConnectHBit", " Error while connecting to HBit " + e.getMessage());
}
}
}
/**
* @param robotType
* @param robotId
* @return
*/
private String disconnectFromRobot(RobotType robotType, final String robotId) {
new Thread() {
@Override
public void run() {
super.run();
Thread connThread = threadMap.get(robotId);
if (connThread != null) connThread.interrupt();
}
}.start();
if (robotType == RobotType.Hummingbird) {
disconnectFromHummingbird(robotId);
} else if (robotType == RobotType.Hummingbit) {
disconnectFromHummingbit(robotId);
} else {
disconnectFromMicrobit(robotId);
}
hummingbirdsToConnect = new HashSet<>();
hummingbitsToConnect = new HashSet<>();
microbitsToConnect = new HashSet<>();
btHelper.stopScan();
runJavascript("CallbackManager.robot.updateStatus('" + bbxEncode(robotId) + "', false);");
Log.d("TotStat", "Connected Hummingbirds: " + connectedHummingbirds.toString());
Log.d("TotStat", "Connected Hummingbits: " + connectedHummingbits.toString());
Log.d("TotStat", "Connected Hummingbits: " + connectedMicrobits.toString());
return robotType.toString() + " disconnected successfully.";
}
/**
* @param hummingbirdId
*/
public static void disconnectFromHummingbird(String hummingbirdId) {
try {
Hummingbird hummingbird = (Hummingbird) getRobotFromId(RobotType.Hummingbird, hummingbirdId);
if (hummingbird != null) {
hummingbird.disconnect();
if (hummingbird.getDisconnected()) {
connectedHummingbirds.remove(hummingbirdId);
}
Log.d("TotStat", "Removing hummingbird: " + hummingbirdId);
} else {
BluetoothGatt curDeviceGatt = deviceGatt.get(hummingbirdId);
if (curDeviceGatt != null) {
curDeviceGatt.disconnect();
curDeviceGatt.close();
curDeviceGatt = null;
if (deviceGatt.containsKey(hummingbirdId)) {
deviceGatt.remove(hummingbirdId);
}
}
}
} catch (Exception e) {
Log.e("ConnectHB", " Error while disconnecting from HB " + e.getMessage());
}
}
/**
* @param hummingbitId
*/
public static void disconnectFromHummingbit(String hummingbitId) {
try {
Hummingbit hummingbit = (Hummingbit) getRobotFromId(RobotType.Hummingbit, hummingbitId);
if (hummingbit != null) {
hummingbit.disconnect();
if (hummingbit.getDisconnected()) {
connectedHummingbits.remove(hummingbitId);
}
Log.d("TotStat", "Removing hummingbit: " + hummingbitId);
} else {
BluetoothGatt curDeviceGatt = deviceGatt.get(hummingbitId);
if (curDeviceGatt != null) {
curDeviceGatt.disconnect();
curDeviceGatt.close();
curDeviceGatt = null;
if (deviceGatt.containsKey(hummingbitId)) {
deviceGatt.remove(hummingbitId);
}
}
}
} catch (Exception e) {
Log.e("ConnectHB", " Error while disconnecting from HB " + e.getMessage());
}
}
/**
* @param microbitId
*/
public static void disconnectFromMicrobit(String microbitId) {
try {
Microbit microbit = (Microbit) getRobotFromId(RobotType.Microbit, microbitId);
if (microbit != null) {
microbit.disconnect();
if (microbit.getDisconnected()) {
connectedMicrobits.remove(microbitId);
}
Log.d("TotStat", "Removing microbit: " + microbitId);
} else {
BluetoothGatt curDeviceGatt = deviceGatt.get(microbitId);
if (curDeviceGatt != null) {
curDeviceGatt.disconnect();
curDeviceGatt.close();
curDeviceGatt = null;
if (deviceGatt.containsKey(microbitId)) {
deviceGatt.remove(microbitId);
}
}
}
} catch (Exception e) {
Log.e("ConnectHB", " Error while disconnecting from MB " + e.getMessage());
}
}
public static void disconnectAll() {
hummingbirdsToConnect = null;
hummingbitsToConnect = null;
microbitsToConnect = null;
if (connectedHummingbirds != null) {
for (String individualHummingBird : connectedHummingbirds.keySet()) {
disconnectFromHummingbird(individualHummingBird);
}
}
if (connectedHummingbits != null) {
for (String individualHummingBit : connectedHummingbits.keySet()) {
disconnectFromHummingbit(individualHummingBit);
}
}
if (connectedMicrobits != null) {
for (String individualMicroBit : connectedMicrobits.keySet()) {
disconnectFromMicrobit(individualMicroBit);
}
}
}
/**
* @param robotType
* @return
*/
private String getTotalStatus(RobotType robotType) {
if (robotType == RobotType.Hummingbird) {
return getTotalHBStatus();
} else if (robotType == RobotType.Hummingbit) {
return getTotalHBitStatus();
} else {
return getTotalMBitStatus();
}
}
/**
* @return
*/
private String getTotalHBStatus() {
Log.d("TotStat", "Connected Hummingbirds: " + connectedHummingbirds.toString());
if (connectedHummingbirds.size() == 0) {
return "2"; // No hummingbirds connected
}
for (Hummingbird hummingbird : connectedHummingbirds.values()) {
if (!hummingbird.isConnected()) {
return "0"; // Some hummingbird is disconnected
}
}
return "1"; // All hummingbirds are OK
}
/**
* @return
*/
private String getTotalHBitStatus() {
Log.d("TotStat", "Connected Hummingbits: " + connectedHummingbits.toString());
if (connectedHummingbits.size() == 0) {
return "2"; // No hummingbits connected
}
for (Hummingbit hummingbit : connectedHummingbits.values()) {
if (!hummingbit.isConnected()) {
return "0"; // Some hummingbit is disconnected
}
}
return "1"; // All hummingbits are OK
}
/**
* @return
*/
private String getTotalMBitStatus() {
Log.d("TotStat", "Connected Microbits: " + connectedMicrobits.toString());
if (connectedMicrobits.size() == 0) {
return "2"; // No hummingbits connected
}
for (Microbit microbit : connectedMicrobits.values()) {
if (!microbit.isConnected()) {
return "0"; // Some hummingbit is disconnected
}
}
return "1"; // All hummingbits are OK
}
private static String stopDiscover() {
if (btHelper != null)
btHelper.stopScan();
runJavascript("CallbackManager.robot.stopDiscover();");
return "Bluetooth discovery stopped.";
}
private String showRobotInfo(RobotType robotType, String robotId) {
builder = new AlertDialog.Builder(mainWebViewContext);
// Get details
Robot robot = getRobotFromId(robotType, robotId);
String name = robot.getName();
String macAddress = robot.getMacAddress();
String gapName = robot.getGAPName();
String hardwareVersion = "";
String firmwareVersion = "";
if (robotType == RobotType.Hummingbird) {
hardwareVersion = ((Hummingbird) robot).getHardwareVersion();
firmwareVersion = ((Hummingbird) robot).getFirmwareVersion();
} else if (robotType == RobotType.Hummingbit) {
hardwareVersion = ((Hummingbit) robot).getHardwareVersion();
firmwareVersion = "microBit: " + ((Hummingbit) robot).getMicroBitVersion() + "SMD: " + ((Hummingbit) robot).getSMDVersion();
} else if (robotType == RobotType.Microbit) {
hardwareVersion = ((Microbit) robot).getHardwareVersion();
firmwareVersion = "microBit: " + ((Microbit) robot).getMicroBitVersion();
}
builder.setTitle(robotType.toString() + " Peripheral");
String message = "";
if (name != null)
message += ("Name: " + name + "\n");
if (macAddress != null)
message += ("MAC Address: " + macAddress + "\n");
if (gapName != null)
message += ("Bluetooth Name: " + gapName + "\n");
if (hardwareVersion != null)
message += ("Hardware Version: " + hardwareVersion + "\n");
if (firmwareVersion != null)
message += ("Firmware Version: " + firmwareVersion + "\n");
if (!robot.hasLatestFirmware())
message += ("\nFirmware update available.");
builder.setMessage(message);
builder.setCancelable(true);
if (!robot.hasLatestFirmware()) {
builder.setPositiveButton(
"Update Firmware",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
showFirmwareUpdateInstructions();
}
});
builder.setNegativeButton(
"Dismiss",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
} else {
builder.setNeutralButton(
"Dismiss",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
}
new Thread() {
@Override
public void run() {
super.run();
new Handler(mainWebViewContext.getMainLooper()).post(new Runnable() {
@Override
public void run() {
robotInfoDialog = builder.create();
robotInfoDialog.show();
}
});
}
}.start();
return "Successfully showed robot info.";
}
private static void showFirmwareUpdateInstructions() {
mainWebViewContext.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(FIRMWARE_UPDATE_URL)));
}
/**
* Resets the values of the peripherals of all connected hummingbirds
* and microbits and hummingbits to their default values.
*/
private void stopAll() {
for (Hummingbird hummingbird : connectedHummingbirds.values())
hummingbird.stopAll();
for (Hummingbit hummingbit : connectedHummingbits.values())
hummingbit.stopAll();
for (Microbit microbit : connectedMicrobits.values())
microbit.stopAll();
}
} | app/src/main/java/com/birdbraintechnologies/birdblox/httpservice/RequestHandlers/RobotRequestHandler.java | package com.birdbraintechnologies.birdblox.httpservice.RequestHandlers;
import android.app.AlertDialog;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.le.ScanFilter;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Handler;
import android.os.ParcelUuid;
import android.util.Log;
import android.widget.Toast;
import com.birdbraintechnologies.birdblox.Bluetooth.BluetoothHelper;
import com.birdbraintechnologies.birdblox.Bluetooth.UARTConnection;
import com.birdbraintechnologies.birdblox.Bluetooth.UARTSettings;
import com.birdbraintechnologies.birdblox.Robots.Hummingbird;
import com.birdbraintechnologies.birdblox.Robots.Hummingbit;
import com.birdbraintechnologies.birdblox.Robots.Microbit;
import com.birdbraintechnologies.birdblox.Robots.Robot;
import com.birdbraintechnologies.birdblox.Robots.RobotType;
import com.birdbraintechnologies.birdblox.httpservice.HttpService;
import com.birdbraintechnologies.birdblox.httpservice.RequestHandler;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import fi.iki.elonen.NanoHTTPD;
import static com.birdbraintechnologies.birdblox.MainWebView.bbxEncode;
import static com.birdbraintechnologies.birdblox.MainWebView.mainWebViewContext;
import static com.birdbraintechnologies.birdblox.MainWebView.runJavascript;
import static com.birdbraintechnologies.birdblox.Robots.RobotType.robotTypeFromString;
import static fi.iki.elonen.NanoHTTPD.MIME_PLAINTEXT;
/**
* @author AppyFizz (Shreyan Bakshi)
* @author Zhendong Yuan (yzd1998111)
*/
public class RobotRequestHandler implements RequestHandler {
private final String TAG = this.getClass().getName();
private static final String FIRMWARE_UPDATE_URL = "http://www.hummingbirdkit.com/learning/installing-birdblox#BurnFirmware";
/* UUIDs for different Hummingbird features */
private static final String DEVICE_UUID = "6E400001-B5A3-F393-E0A9-E50E24DCCA9E";
private static final UUID HB_UART_UUID = UUID.fromString("6E400001-B5A3-F393-E0A9-E50E24DCCA9E");
private static final UUID HB_TX_UUID = UUID.fromString("6E400002-B5A3-F393-E0A9-E50E24DCCA9E");
private static final UUID HB_RX_UUID = UUID.fromString("6E400003-B5A3-F393-E0A9-E50E24DCCA9E");
// TODO: Remove this, it is the same across devices
private static final UUID RX_CONFIG_UUID = UUID.fromString("00002902-0000-1000-8000-00805f9b34fb");
public static HashSet<String> hummingbirdsToConnect = new HashSet<>();
public static HashSet<String> hummingbitsToConnect = new HashSet<>();
public static HashSet<String> microbitsToConnect = new HashSet<>();
HttpService service;
private static BluetoothHelper btHelper;
private static HashMap<String, Thread> threadMap;
private static UARTSettings HBUARTSettings;
private static HashMap<String, Hummingbird> connectedHummingbirds;
private static UARTSettings HBitUARTSettings;
private static HashMap<String, Hummingbit> connectedHummingbits;
private static UARTSettings MBitUARTSettings;
private static HashMap<String, Microbit> connectedMicrobits;
public static String lastScanType;
private AlertDialog.Builder builder;
private AlertDialog robotInfoDialog;
public static HashMap<String, BluetoothGatt> deviceGatt;
public RobotRequestHandler(HttpService service) {
this.service = service;
btHelper = service.getBluetoothHelper();
threadMap = new HashMap<>();
connectedHummingbirds = new HashMap<>();
connectedHummingbits = new HashMap<>();
connectedMicrobits = new HashMap<>();
deviceGatt = new HashMap<>();
// Build Hummingbird UART settings
HBUARTSettings = (new UARTSettings.Builder())
.setUARTServiceUUID(HB_UART_UUID)
.setRxCharacteristicUUID(HB_RX_UUID)
.setTxCharacteristicUUID(HB_TX_UUID)
.setRxConfigUUID(RX_CONFIG_UUID)
.build();
HBitUARTSettings = (new UARTSettings.Builder())
.setUARTServiceUUID(HB_UART_UUID)
.setRxCharacteristicUUID(HB_RX_UUID)
.setTxCharacteristicUUID(HB_TX_UUID)
.setRxConfigUUID(RX_CONFIG_UUID)
.build();
MBitUARTSettings = (new UARTSettings.Builder())
.setUARTServiceUUID(HB_UART_UUID)
.setRxCharacteristicUUID(HB_RX_UUID)
.setTxCharacteristicUUID(HB_TX_UUID)
.setRxConfigUUID(RX_CONFIG_UUID)
.build();
}
@Override
public NanoHTTPD.Response handleRequest(NanoHTTPD.IHTTPSession session, List<String> args) {
String[] path = args.get(0).split("/");
Map<String, List<String>> m = session.getParameters();
// Generate response body
String responseBody = "";
Robot robot;
switch (path[0]) {
case "startDiscover":
responseBody = startScan();
break;
case "stopDiscover":
responseBody = stopDiscover();
break;
case "totalStatus":
responseBody = getTotalStatus(robotTypeFromString(m.get("type").get(0)));
break;
case "connect":
responseBody = connectToRobot(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
break;
case "disconnect":
responseBody = disconnectFromRobot(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
break;
case "out":
robot = getRobotFromId(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
if (robot == null) {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', false);");
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.NOT_FOUND, MIME_PLAINTEXT, "Robot " + m.get("id").get(0) + " was not found.");
} else if (!robot.setOutput(path[1], m)) {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', false);");
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.EXPECTATION_FAILED, MIME_PLAINTEXT, "Failed to send to robot " + m.get("id").get(0) + ".");
} else {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', true);");
responseBody = "Sent to robot " + m.get("type").get(0) + " successfully.";
}
break;
case "in":
robot = getRobotFromId(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
if (robot == null) {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', false);");
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.NOT_FOUND, MIME_PLAINTEXT, "Robot " + m.get("id").get(0) + " was not found.");
} else {
String sensorPort = null;
String sensorAxis = null;
if (m.get("port") != null) {
sensorPort = m.get("port").get(0);
}
if (m.get("axis") != null) {
sensorAxis = m.get("axis").get(0);
}
String sensorValue = robot.readSensor(m.get("sensor").get(0), sensorPort, sensorAxis);
if (sensorValue == null) {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', false);");
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.NO_CONTENT, MIME_PLAINTEXT, "Failed to read sensors from robot " + m.get("id").get(0) + ".");
} else {
runJavascript("CallbackManager.robot.updateStatus('" + m.get("id").get(0) + "', true);");
responseBody = sensorValue;
}
}
break;
case "showInfo":
responseBody = showRobotInfo(robotTypeFromString(m.get("type").get(0)), m.get("id").get(0));
break;
case "showUpdateInstructions":
showFirmwareUpdateInstructions();
break;
case "stopAll":
stopAll();
break;
}
return NanoHTTPD.newFixedLengthResponse(
NanoHTTPD.Response.Status.OK, MIME_PLAINTEXT, responseBody);
}
// TODO: Properly define Robot Object
// TODO: Synchronization of below functions
// TODO: Finish implementing new Robot commands and callbacks
private static String startScan() {
final List deviceFilter = generateDeviceFilter();
if (BluetoothHelper.currentlyScanning) {
return "";
}
if (BluetoothHelper.currentlyScanning) {
stopDiscover();
}
new Thread() {
@Override
public void run() {
btHelper.scanDevices(deviceFilter);
}
}.start();
return "";
}
/**
* Finds a robotId in the list of connected robots. Null if it does not exist.
*
* @param robotType The type of the robot to be found. Must be 'hummingbird' or 'hummingbit' or 'microbit'.
* @param robotId Robot ID to find.
* @return The connected Robot if it exists, null otherwise.
*/
private static Robot getRobotFromId(RobotType robotType, String robotId) {
if (robotType == RobotType.Hummingbird) {
return connectedHummingbirds.get(robotId);
} else if (robotType == RobotType.Hummingbit) {
return connectedHummingbits.get(robotId);
} else {
return connectedMicrobits.get(robotId);
}
}
/**
* Creates a Bluetooth scan Robot filter that only matches the required 'type' of Robot.
*
* @return List of scan filters.
*/
private static List<ScanFilter> generateDeviceFilter() {
String ROBOT_UUID = DEVICE_UUID;
ScanFilter scanFilter = (new ScanFilter.Builder())
.setServiceUuid(ParcelUuid.fromString(ROBOT_UUID))
.build();
List<ScanFilter> robotFilters = new ArrayList<>();
robotFilters.add(scanFilter);
return robotFilters;
}
/**
* @param robotType
* @param robotId
* @return
*/
public static String connectToRobot(RobotType robotType, String robotId) {
if (robotType == RobotType.Hummingbird) {
connectToHummingbird(robotId);
} else if (robotType == RobotType.Hummingbit) {
connectToHummingbit(robotId);
} else {
connectToMicrobit(robotId);
}
return "";
}
private static void connectToHummingbird(final String hummingbirdId) {
if (connectedHummingbirds.containsKey(hummingbirdId) == false) {
final UARTSettings HBUART = HBUARTSettings;
if (hummingbirdsToConnect.contains(hummingbirdId)) {
hummingbirdsToConnect.remove(hummingbirdId);
}
try {
Thread hbConnectionThread = new Thread() {
@Override
public void run() {
UARTConnection hbConn = btHelper.connectToDeviceUART(hummingbirdId, HBUART);
if (hbConn != null && hbConn.isConnected() && connectedHummingbirds != null) {
Hummingbird hummingbird = new Hummingbird(hbConn);
connectedHummingbirds.put(hummingbirdId, hummingbird);
hummingbird.setConnected();
}
}
};
hbConnectionThread.start();
final Thread oldThread = threadMap.put(hummingbirdId, hbConnectionThread);
if (oldThread != null) {
new Thread() {
@Override
public void run() {
super.run();
oldThread.interrupt();
}
}.start();
}
} catch (Exception e) {
Log.e("ConnectHB", " Error while connecting to HB " + e.getMessage());
}
}
}
private static void connectToHummingbit(final String hummingbitId) {
if (connectedHummingbits.containsKey(hummingbitId) == false) {
final UARTSettings HBitUART = HBitUARTSettings;
if (hummingbitsToConnect.contains(hummingbitId)) {
hummingbitsToConnect.remove(hummingbitId);
}
try {
Thread hbitConnectionThread = new Thread() {
@Override
public void run() {
UARTConnection hbitConn = btHelper.connectToDeviceUART(hummingbitId, HBitUART);
if (hbitConn != null && hbitConn.isConnected() && connectedHummingbits != null) {
Hummingbit hummingbit = new Hummingbit(hbitConn);
connectedHummingbits.put(hummingbitId, hummingbit);
hummingbit.setConnected();
}
}
};
hbitConnectionThread.start();
final Thread oldThread = threadMap.put(hummingbitId, hbitConnectionThread);
if (oldThread != null) {
new Thread() {
@Override
public void run() {
super.run();
oldThread.interrupt();
}
}.start();
}
} catch (Exception e) {
Log.e("ConnectHBit", " Error while connecting to HBit " + e.getMessage());
}
}
}
private static void connectToMicrobit(final String microbitId) {
if (connectedMicrobits.containsKey(microbitId) == false) {
final UARTSettings MBitUART = MBitUARTSettings;
if (microbitsToConnect.contains(microbitId)) {
microbitsToConnect.remove(microbitId);
}
try {
Thread mbitConnectionThread = new Thread() {
@Override
public void run() {
UARTConnection mbitConn = btHelper.connectToDeviceUART(microbitId, MBitUART);
if (mbitConn != null && mbitConn.isConnected() && connectedMicrobits != null) {
Microbit microbit = new Microbit(mbitConn);
connectedMicrobits.put(microbitId, microbit);
microbit.setConnected();
}
}
};
mbitConnectionThread.start();
final Thread oldThread = threadMap.put(microbitId, mbitConnectionThread);
if (oldThread != null) {
new Thread() {
@Override
public void run() {
super.run();
oldThread.interrupt();
}
}.start();
}
} catch (Exception e) {
Log.e("ConnectHBit", " Error while connecting to HBit " + e.getMessage());
}
}
}
/**
* @param robotType
* @param robotId
* @return
*/
private String disconnectFromRobot(RobotType robotType, final String robotId) {
new Thread() {
@Override
public void run() {
super.run();
Thread connThread = threadMap.get(robotId);
if (connThread != null) connThread.interrupt();
}
}.start();
if (robotType == RobotType.Hummingbird) {
disconnectFromHummingbird(robotId);
} else if (robotType == RobotType.Hummingbit) {
disconnectFromHummingbit(robotId);
} else {
disconnectFromMicrobit(robotId);
}
hummingbirdsToConnect = new HashSet<>();
hummingbitsToConnect = new HashSet<>();
microbitsToConnect = new HashSet<>();
btHelper.stopScan();
runJavascript("CallbackManager.robot.updateStatus('" + bbxEncode(robotId) + "', false);");
Log.d("TotStat", "Connected Hummingbirds: " + connectedHummingbirds.toString());
Log.d("TotStat", "Connected Hummingbits: " + connectedHummingbits.toString());
Log.d("TotStat", "Connected Hummingbits: " + connectedMicrobits.toString());
return robotType.toString() + " disconnected successfully.";
}
/**
* @param hummingbirdId
*/
public static void disconnectFromHummingbird(String hummingbirdId) {
try {
Hummingbird hummingbird = (Hummingbird) getRobotFromId(RobotType.Hummingbird, hummingbirdId);
if (hummingbird != null) {
hummingbird.disconnect();
if (hummingbird.getDisconnected()) {
connectedHummingbirds.remove(hummingbirdId);
}
Log.d("TotStat", "Removing hummingbird: " + hummingbirdId);
} else {
BluetoothGatt curDeviceGatt = deviceGatt.get(hummingbirdId);
if (curDeviceGatt != null) {
curDeviceGatt.disconnect();
curDeviceGatt.close();
curDeviceGatt = null;
if (deviceGatt.containsKey(hummingbirdId)) {
deviceGatt.remove(hummingbirdId);
}
}
}
} catch (Exception e) {
Log.e("ConnectHB", " Error while disconnecting from HB " + e.getMessage());
}
}
/**
* @param hummingbitId
*/
public static void disconnectFromHummingbit(String hummingbitId) {
try {
Hummingbit hummingbit = (Hummingbit) getRobotFromId(RobotType.Hummingbit, hummingbitId);
if (hummingbit != null) {
hummingbit.disconnect();
if (hummingbit.getDisconnected()) {
connectedHummingbits.remove(hummingbitId);
}
Log.d("TotStat", "Removing hummingbit: " + hummingbitId);
} else {
BluetoothGatt curDeviceGatt = deviceGatt.get(hummingbitId);
if (curDeviceGatt != null) {
curDeviceGatt.disconnect();
curDeviceGatt.close();
curDeviceGatt = null;
if (deviceGatt.containsKey(hummingbitId)) {
deviceGatt.remove(hummingbitId);
}
}
}
} catch (Exception e) {
Log.e("ConnectHB", " Error while disconnecting from HB " + e.getMessage());
}
}
/**
* @param microbitId
*/
public static void disconnectFromMicrobit(String microbitId) {
try {
Microbit microbit = (Microbit) getRobotFromId(RobotType.Microbit, microbitId);
if (microbit != null) {
microbit.disconnect();
if (microbit.getDisconnected()) {
connectedMicrobits.remove(microbitId);
}
Log.d("TotStat", "Removing microbit: " + microbitId);
} else {
BluetoothGatt curDeviceGatt = deviceGatt.get(microbitId);
if (curDeviceGatt != null) {
curDeviceGatt.disconnect();
curDeviceGatt.close();
curDeviceGatt = null;
if (deviceGatt.containsKey(microbitId)) {
deviceGatt.remove(microbitId);
}
}
}
} catch (Exception e) {
Log.e("ConnectHB", " Error while disconnecting from MB " + e.getMessage());
}
}
public static void disconnectAll() {
hummingbirdsToConnect = null;
hummingbitsToConnect = null;
microbitsToConnect = null;
if (connectedHummingbirds != null) {
for (String individualHummingBird : connectedHummingbirds.keySet()) {
disconnectFromHummingbird(individualHummingBird);
}
}
if (connectedHummingbits != null) {
for (String individualHummingBit : connectedHummingbits.keySet()) {
disconnectFromHummingbit(individualHummingBit);
}
}
if (connectedMicrobits != null) {
for (String individualMicroBit : connectedMicrobits.keySet()) {
disconnectFromMicrobit(individualMicroBit);
}
}
}
/**
* @param robotType
* @return
*/
private String getTotalStatus(RobotType robotType) {
if (robotType == RobotType.Hummingbird) {
return getTotalHBStatus();
} else if (robotType == RobotType.Hummingbit) {
return getTotalHBitStatus();
} else {
return getTotalMBitStatus();
}
}
/**
* @return
*/
private String getTotalHBStatus() {
Log.d("TotStat", "Connected Hummingbirds: " + connectedHummingbirds.toString());
if (connectedHummingbirds.size() == 0) {
return "2"; // No hummingbirds connected
}
for (Hummingbird hummingbird : connectedHummingbirds.values()) {
if (!hummingbird.isConnected()) {
return "0"; // Some hummingbird is disconnected
}
}
return "1"; // All hummingbirds are OK
}
/**
* @return
*/
private String getTotalHBitStatus() {
Log.d("TotStat", "Connected Hummingbits: " + connectedHummingbits.toString());
if (connectedHummingbits.size() == 0) {
return "2"; // No hummingbits connected
}
for (Hummingbit hummingbit : connectedHummingbits.values()) {
if (!hummingbit.isConnected()) {
return "0"; // Some hummingbit is disconnected
}
}
return "1"; // All hummingbits are OK
}
/**
* @return
*/
private String getTotalMBitStatus() {
Log.d("TotStat", "Connected Microbits: " + connectedMicrobits.toString());
if (connectedMicrobits.size() == 0) {
return "2"; // No hummingbits connected
}
for (Microbit microbit : connectedMicrobits.values()) {
if (!microbit.isConnected()) {
return "0"; // Some hummingbit is disconnected
}
}
return "1"; // All hummingbits are OK
}
private static String stopDiscover() {
if (btHelper != null)
btHelper.stopScan();
runJavascript("CallbackManager.robot.stopDiscover();");
return "Bluetooth discovery stopped.";
}
private String showRobotInfo(RobotType robotType, String robotId) {
builder = new AlertDialog.Builder(mainWebViewContext);
// Get details
Robot robot = getRobotFromId(robotType, robotId);
String name = robot.getName();
String macAddress = robot.getMacAddress();
String gapName = robot.getGAPName();
String hardwareVersion = "";
String firmwareVersion = "";
if (robotType == RobotType.Hummingbird) {
hardwareVersion = ((Hummingbird) robot).getHardwareVersion();
firmwareVersion = ((Hummingbird) robot).getFirmwareVersion();
} else if (robotType == RobotType.Hummingbit) {
hardwareVersion = ((Hummingbit) robot).getHardwareVersion();
firmwareVersion = "microBit: " + ((Hummingbit) robot).getMicroBitVersion() + "SMD: " + ((Hummingbit) robot).getSMDVersion();
} else if (robotType == RobotType.Microbit) {
hardwareVersion = ((Microbit) robot).getHardwareVersion();
firmwareVersion = "microBit: " + ((Microbit) robot).getMicroBitVersion();
}
builder.setTitle(robotType.toString() + " Peripheral");
String message = "";
if (name != null)
message += ("Name: " + name + "\n");
if (macAddress != null)
message += ("MAC Address: " + macAddress + "\n");
if (gapName != null)
message += ("Bluetooth Name: " + gapName + "\n");
if (hardwareVersion != null)
message += ("Hardware Version: " + hardwareVersion + "\n");
if (firmwareVersion != null)
message += ("Firmware Version: " + firmwareVersion + "\n");
if (!robot.hasLatestFirmware())
message += ("\nFirmware update available.");
builder.setMessage(message);
builder.setCancelable(true);
if (!robot.hasLatestFirmware()) {
builder.setPositiveButton(
"Update Firmware",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
showFirmwareUpdateInstructions();
}
});
builder.setNegativeButton(
"Dismiss",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
} else {
builder.setNeutralButton(
"Dismiss",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
}
new Thread() {
@Override
public void run() {
super.run();
new Handler(mainWebViewContext.getMainLooper()).post(new Runnable() {
@Override
public void run() {
robotInfoDialog = builder.create();
robotInfoDialog.show();
}
});
}
}.start();
return "Successfully showed robot info.";
}
private static void showFirmwareUpdateInstructions() {
mainWebViewContext.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(FIRMWARE_UPDATE_URL)));
}
/**
* Resets the values of the peripherals of all connected hummingbirds
* and microbits and hummingbits to their default values.
*/
private void stopAll() {
for (Hummingbird hummingbird : connectedHummingbirds.values())
hummingbird.stopAll();
for (Hummingbit hummingbit : connectedHummingbits.values())
hummingbit.stopAll();
for (Microbit microbit : connectedMicrobits.values())
microbit.stopAll();
}
} | minor change
| app/src/main/java/com/birdbraintechnologies/birdblox/httpservice/RequestHandlers/RobotRequestHandler.java | minor change |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.